Skip to content

Commit

Permalink
Merge branch 'main' into feat/orca-websocket-connection-component
Browse files Browse the repository at this point in the history
  • Loading branch information
raylrui committed Nov 25, 2024
2 parents 979bd02 + a0fe733 commit 33877b2
Show file tree
Hide file tree
Showing 43 changed files with 479 additions and 73 deletions.
1 change: 1 addition & 0 deletions cdk.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@
]
},
"context": {
"@aws-cdk/core:suppressTemplateIndentation": true
}
}
47 changes: 24 additions & 23 deletions lib/pipeline/statefulPipelineStack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,24 +93,7 @@ export class StatefulPipelineStack extends cdk.Stack {
});

/**
* Deployment to Beta (Dev) account
*/
const betaConfig = getEnvironmentConfig(AppStage.BETA);
if (!betaConfig) throw new Error(`No 'Beta' account configuration`);
pipeline.addStage(
new OrcaBusStatefulDeploymentStage(
this,
'OrcaBusBeta',
betaConfig.stackProps.statefulConfig,
{
account: betaConfig.accountId,
region: betaConfig.region,
}
)
);

/**
* Deployment to Gamma (Staging) account
* Deployment to Gamma (Staging) account directly without approval
*/
const gammaConfig = getEnvironmentConfig(AppStage.GAMMA);
if (!gammaConfig) throw new Error(`No 'Gamma' account configuration`);
Expand All @@ -123,13 +106,9 @@ export class StatefulPipelineStack extends cdk.Stack {
account: gammaConfig.accountId,
region: gammaConfig.region,
}
),
{ pre: [new pipelines.ManualApprovalStep('PromoteToGamma')] }
)
);

// Some stack have dependencies to the 'shared stack' so we need to deploy it first beforehand
// should only be a one-off initial deployment

/**
* Deployment to Prod account
*/
Expand All @@ -148,6 +127,28 @@ export class StatefulPipelineStack extends cdk.Stack {
{ pre: [new pipelines.ManualApprovalStep('PromoteToProd')] }
);

/**
* Deployment to Beta (Dev)
* This shouldn't be deployed automatically. Some dev work may be deployed manually from local
* for testing but then could got overwritten by the pipeline if someone has pushed to the main
* branch. This is put at the end of the pipeline just to have a way of deployment with
* a click of a button.
*/
const betaConfig = getEnvironmentConfig(AppStage.BETA);
if (!betaConfig) throw new Error(`No 'Beta' account configuration`);
pipeline.addStage(
new OrcaBusStatefulDeploymentStage(
this,
'OrcaBusBeta',
betaConfig.stackProps.statefulConfig,
{
account: betaConfig.accountId,
region: betaConfig.region,
}
),
{ pre: [new pipelines.ManualApprovalStep('PromoteToDev')] }
);

// need to build pipeline so we could add notification at the pipeline construct
pipeline.buildPipeline();

Expand Down
45 changes: 24 additions & 21 deletions lib/pipeline/statelessPipelineStack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -178,25 +178,7 @@ export class StatelessPipelineStack extends cdk.Stack {
});

/**
* Deployment to Beta (Dev) account
*/
const betaConfig = getEnvironmentConfig(AppStage.BETA);
if (!betaConfig) throw new Error(`No 'Beta' account configuration`);
pipeline.addStage(
new OrcaBusStatelessDeploymentStage(
this,
'OrcaBusBeta',
betaConfig.stackProps.statelessConfig,
{
account: betaConfig.accountId,
region: betaConfig.region,
}
),
{ pre: [stripAssetsFromAssembly] } // I think this should only be done once across stages
);

/**
* Deployment to Gamma (Staging) account
* Deployment to Gamma (Staging) account directly without approval
*/
const gammaConfig = getEnvironmentConfig(AppStage.GAMMA);
if (!gammaConfig) throw new Error(`No 'Gamma' account configuration`);
Expand All @@ -210,7 +192,7 @@ export class StatelessPipelineStack extends cdk.Stack {
region: gammaConfig.region,
}
),
{ pre: [new pipelines.ManualApprovalStep('PromoteToGamma')] }
{ pre: [stripAssetsFromAssembly] } // See above for the reason
);

/**
Expand All @@ -231,9 +213,30 @@ export class StatelessPipelineStack extends cdk.Stack {
{ pre: [new pipelines.ManualApprovalStep('PromoteToProd')] }
);

/**
* Deployment to Beta (Dev)
* This shouldn't be deployed automatically. Some dev work may be deployed manually from local
* for testing but then could got overwritten by the pipeline if someone has pushed to the main
* branch. This is put at the end of the pipeline just to have a way of deployment with
* a click of a button.
*/
const betaConfig = getEnvironmentConfig(AppStage.BETA);
if (!betaConfig) throw new Error(`No 'Beta' account configuration`);
pipeline.addStage(
new OrcaBusStatelessDeploymentStage(
this,
'OrcaBusBeta',
betaConfig.stackProps.statelessConfig,
{
account: betaConfig.accountId,
region: betaConfig.region,
}
),
{ pre: [new pipelines.ManualApprovalStep('PromoteToDev')] }
);

// need to build pipeline so we could add notification at the pipeline construct
pipeline.buildPipeline();

pipeline.pipeline.artifactBucket.grantReadWrite(stripAssetsFromAssembly.project);

// notification for success/failure
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ def load_metadata_csv(df: pd.DataFrame, is_emit_eb_events: bool = True, user_id:
try:
subject.individual_set.get(orcabus_id=idv.orcabus_id)
except ObjectDoesNotExist:
subject._change_reason = reason
subject.individual_set.add(idv)

# We update the stats when new idv is linked to sbj, only if this is not recorded as
Expand Down Expand Up @@ -182,6 +183,7 @@ def load_metadata_csv(df: pd.DataFrame, is_emit_eb_events: bool = True, user_id:
try:
project.contact_set.get(orcabus_id=contact.orcabus_id)
except ObjectDoesNotExist:
project._change_reason = reason
project.contact_set.add(contact)

# We update the stats when new ctc is linked to prj, only if this is not recorded as
Expand Down Expand Up @@ -236,8 +238,8 @@ def load_metadata_csv(df: pd.DataFrame, is_emit_eb_events: bool = True, user_id:
try:
library.project_set.get(orcabus_id=project.orcabus_id)
except ObjectDoesNotExist:
library._change_reason = reason
library.project_set.add(project)

# We update the stats when new project is linked to library, only if this is not recorded as
# update/create in previous upsert method
if not is_lib_created and not is_lib_updated:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,7 @@ def persist_lab_metadata(df: pd.DataFrame, sheet_year: str, is_emit_eb_events: b
try:
subject.individual_set.get(orcabus_id=idv.orcabus_id)
except ObjectDoesNotExist:
subject._change_reason = reason
subject.individual_set.add(idv)

# We update the stats when new idv is linked to sbj, only if this is not recorded as
Expand Down Expand Up @@ -192,6 +193,7 @@ def persist_lab_metadata(df: pd.DataFrame, sheet_year: str, is_emit_eb_events: b
try:
project.contact_set.get(orcabus_id=contact.orcabus_id)
except ObjectDoesNotExist:
project._change_reason = reason
project.contact_set.add(contact)

# We update the stats when new ctc is linked to prj, only if this is not recorded as
Expand Down Expand Up @@ -247,6 +249,7 @@ def persist_lab_metadata(df: pd.DataFrame, sheet_year: str, is_emit_eb_events: b
try:
library.project_set.get(orcabus_id=project.orcabus_id)
except ObjectDoesNotExist:
library._change_reason = reason
library.project_set.add(project)

# We update the stats when new project is linked to library, only if this is not recorded as
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,8 @@ export class OraCompressionIcav2PipelineManagerStack extends cdk.Stack {
environment: {
ICAV2_ACCESS_TOKEN_SECRET_ID: icav2AccessTokenSecretObj.secretName,
},
timeout: Duration.seconds(60),
memorySize: 1024,
timeout: Duration.seconds(300),
}
);
const findAllFastqPairsInInstrumentRunLambdaObj = new PythonFunction(
Expand All @@ -151,7 +152,8 @@ export class OraCompressionIcav2PipelineManagerStack extends cdk.Stack {
environment: {
ICAV2_ACCESS_TOKEN_SECRET_ID: icav2AccessTokenSecretObj.secretName,
},
timeout: Duration.seconds(60),
memorySize: 1024,
timeout: Duration.seconds(300),
}
);

Expand Down Expand Up @@ -257,7 +259,8 @@ export class OraCompressionIcav2PipelineManagerStack extends cdk.Stack {
environment: {
ICAV2_ACCESS_TOKEN_SECRET_ID: icav2AccessTokenSecretObj.secretName,
},
timeout: Duration.seconds(60),
memorySize: 1024,
timeout: Duration.seconds(300),
}
);

Expand Down Expand Up @@ -401,7 +404,8 @@ export class OraCompressionIcav2PipelineManagerStack extends cdk.Stack {
environment: {
ICAV2_ACCESS_TOKEN_SECRET_ID: icav2AccessTokenSecretObj.secretName,
},
timeout: Duration.seconds(60),
memorySize: 1024,
timeout: Duration.seconds(300),
});

// Give the lambda function access to the secret
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import boto3
from os import environ
import re
import pandas as pd

from wrapica.project_data import (
find_project_data_bulk,
Expand Down Expand Up @@ -142,6 +143,11 @@ def handler(event, context):
"read_2_file_uri": convert_project_data_obj_to_uri(r2_file)
})

# Assert that the all rgid_partial are unique
assert \
len(pd.DataFrame(fastq_pair_list)['rgid_partial'].unique().tolist()) == len(fastq_pair_list), \
"rgid_partial are not unique"

return fastq_pair_list


Expand All @@ -166,13 +172,13 @@ def handler(event, context):
# # [
# # {
# # "rgid_partial": "1.L2401526",
# # "read1_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_1/L2401526/L2401526_S1_L001_R1_001.fastq.gz",
# # "read2_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_1/L2401526/L2401526_S1_L001_R2_001.fastq.gz"
# # "read_1_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_1/L2401526/L2401526_S1_L001_R1_001.fastq.gz",
# # "read_2_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_1/L2401526/L2401526_S1_L001_R2_001.fastq.gz"
# # },
# # ...
# # {
# # "rgid_partial": "4.L2401553",
# # "read1_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_4/L2401553/L2401553_S27_L004_R1_001.fastq.gz",
# # "read2_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_4/L2401553/L2401553_S27_L004_R2_001.fastq.gz"
# # "read_1_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_4/L2401553/L2401553_S27_L004_R1_001.fastq.gz",
# # "read_2_file_uri": "icav2://ea19a3f5-ec7c-4940-a474-c31cd91dbad4/primary/241024_A00130_0336_BHW7MVDSXC/20241030c613872c/Samples/Lane_4/L2401553/L2401553_S27_L004_R2_001.fastq.gz"
# # }
# # ]
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
wrapica==2.27.1.post20240830140737
pandas>=2
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import boto3
from typing import List, Dict
from os import environ
import pandas as pd

from wrapica.project_data import (
find_project_data_bulk,
Expand Down Expand Up @@ -122,12 +123,13 @@ def handler(event, context):
for bclconvert_iter_ in samplesheet_data_dict["bclconvert_data"]:
rgids_list.append(
{
"rgid": f"{bclconvert_iter_['index']}.{bclconvert_iter_['index2']}.{bclconvert_iter_['lane']}.{bclconvert_iter_['sample_id']}.{instrument_run_id}",
"rgid_partial": f"{bclconvert_iter_['lane']}.{bclconvert_iter_['sample_id']}",
"rgid": f"{bclconvert_iter_['index']}.{bclconvert_iter_['index2']}.{bclconvert_iter_.get('lane', 1)}.{bclconvert_iter_['sample_id']}.{instrument_run_id}",
"rgid_partial": f"{bclconvert_iter_.get('lane', 1)}.{bclconvert_iter_['sample_id']}",
}
)

return rgids_list
# Convert rgids_list to pandas dataframe and drop duplicates
return pd.DataFrame(rgids_list).drop_duplicates().to_dict(orient='records')


# if __name__ == "__main__":
Expand Down Expand Up @@ -159,3 +161,38 @@ def handler(event, context):
# # "rgid_partial": "4.L2401553"
# # }
# # ]


# if __name__ == "__main__":
# # Test the handler function
# import json
# environ["AWS_PROFILE"] = "umccr-production"
# environ["ICAV2_ACCESS_TOKEN_SECRET_ID"] = "ICAv2JWTKey-umccr-prod-service-production"
# print(
# json.dumps(
# handler(
# {
# "instrument_run_folder_uri": "icav2://data-migration/primary_data/210701_A01052_0055_AH7KWGDSX2/202201052f795bab/",
# "instrument_run_id": "210701_A01052_0055_AH7KWGDSX2"
# },
# None,
# ),
# indent=4
# )
# )
#
# # [
# # {
# # "rgid": "TACCGAGG.AGTTCAGG.1.PRJ210449_L2100607.210701_A01052_0055_AH7KWGDSX2",
# # "rgid_partial": "1.PRJ210449_L2100607"
# # },
# # {
# # "rgid": "CGTTAGAA.GACCTGAA.1.PRJ210450_L2100608.210701_A01052_0055_AH7KWGDSX2",
# # "rgid_partial": "1.PRJ210450_L2100608"
# # },
# # ...
# # {
# # "rgid": "TTACAGGA.GCTTGTCA.1.MDX210166_L2100720.210701_A01052_0055_AH7KWGDSX2",
# # "rgid_partial": "1.MDX210166_L2100720"
# # }
# # ]
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
wrapica==2.27.1.post20240830140737
v2-samplesheet-maker==4.2.4.post20241110133537
v2-samplesheet-maker==4.2.4.post20241110133537
pandas>=2
14 changes: 14 additions & 0 deletions lib/workload/stateless/stacks/workflow-manager/deploy/stack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ export class WorkflowManagerStack extends Stack {
vpcSubnets: { subnets: this.vpc.privateSubnets },
role: this.lambdaRole,
architecture: Architecture.ARM_64,
memorySize: 1024,
...props,
});
}
Expand All @@ -111,6 +112,7 @@ export class WorkflowManagerStack extends Stack {
}

private createApiHandlerAndIntegration(props: WorkflowManagerStackProps) {
const API_VERSION = 'v1';
const apiFn: PythonFunction = this.createPythonFunction('Api', {
index: 'api.py',
handler: 'handler',
Expand Down Expand Up @@ -145,6 +147,18 @@ export class WorkflowManagerStack extends Stack {
integration: apiIntegration,
routeKey: HttpRouteKey.with('/{proxy+}', HttpMethod.DELETE),
});

// Route and permission for rerun cases where it needs to put event to mainBus
this.mainBus.grantPutEventsTo(apiFn);
new HttpRoute(this, 'PostRerunHttpRoute', {
httpApi: httpApi,
integration: apiIntegration,
authorizer: wfmApi.authStackHttpLambdaAuthorizer,
routeKey: HttpRouteKey.with(
`/api/${API_VERSION}/workflowrun/{orcabusId}/rerun/{proxy+}`,
HttpMethod.POST
),
});
}

private createHandleServiceWrscEventHandler() {
Expand Down
Loading

0 comments on commit 33877b2

Please sign in to comment.