1313from seqr .utils .middleware import ErrorsWarningsException
1414from seqr .views .utils .airtable_utils import AirtableSession , ANVIL_REQUEST_TRACKING_TABLE
1515from seqr .views .utils .export_utils import write_multiple_files
16+ from seqr .views .utils .json_utils import _to_title_case
1617from seqr .views .utils .pedigree_info_utils import JsonConstants
1718from settings import SEQR_SLACK_DATA_ALERTS_NOTIFICATION_CHANNEL , BASE_URL , ANVIL_UI_URL , PIPELINE_RUNNER_SERVER , \
1819 SEQR_SLACK_ANVIL_DATA_LOADING_CHANNEL , SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL , LOADING_DATASETS_DIR
@@ -65,6 +66,22 @@ def update_airtable_loading_tracking_status(project, status, additional_update=N
6566 update = {'Status' : status , ** (additional_update or {})},
6667 )
6768
69+ def trigger_delete_families_search (project , family_guids , user = None ):
70+ search_samples = Sample .objects .filter (is_active = True , individual__family__guid__in = family_guids )
71+ info = []
72+ if search_samples :
73+ updated_families = search_samples .values_list ("individual__family__family_id" , flat = True ).distinct ()
74+ family_summary = ", " .join (sorted (updated_families ))
75+ num_updated = search_samples .update (is_active = False )
76+ message = f'Disabled search for { num_updated } samples in the following { len (updated_families )} families: { family_summary } '
77+ info .append (message )
78+ logger .info (message , user )
79+
80+ variables = {'project_guid' : project .guid , 'family_guids' : family_guids }
81+ _enqueue_pipeline_request ('delete_families' , variables , user )
82+ info .append ('Triggered delete family data' )
83+ return info
84+
6885def trigger_data_loading (projects : list [Project ], individual_ids : list [int ], sample_type : str , dataset_type : str ,
6986 genome_version : str , data_path : str , user : User , raise_error : bool = False , skip_expect_tdr_metrics : bool = True ,
7087 skip_check_sex_and_relatedness : bool = True , vcf_sample_id_map = None ,
@@ -85,34 +102,40 @@ def trigger_data_loading(projects: list[Project], individual_ids: list[int], sam
85102 _upload_data_loading_files (individual_ids , vcf_sample_id_map or {}, user , file_path , raise_error )
86103 _write_gene_id_file (user )
87104
88- response = requests .post (f'{ PIPELINE_RUNNER_SERVER } /loading_pipeline_enqueue' , json = variables , timeout = 60 )
89- success = True
105+ error = _enqueue_pipeline_request ('loading_pipeline' , variables , user , raise_error )
106+ if error :
107+ safe_post_to_slack (
108+ SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ,
109+ f'{ error_message } : { error } \n Loading pipeline should be triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
110+ )
111+
112+ success = not error
113+ if success_message and (success or success_slack_channel != SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ):
114+ safe_post_to_slack (success_slack_channel , '\n \n ' .join ([
115+ success_message ,
116+ f'Pedigree files have been uploaded to { file_path } ' ,
117+ f'Loading pipeline is triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
118+ ]))
119+
120+ return success
121+
122+
123+ def _enqueue_pipeline_request (name : str , variables : dict , user : User , raise_error : bool = True ):
124+ response = requests .post (f'{ PIPELINE_RUNNER_SERVER } /{ name } _enqueue' , json = variables , timeout = 60 )
125+ error = None
90126 try :
91127 response .raise_for_status ()
92- logger .info ('Triggered loading pipeline ' , user , detail = variables )
128+ logger .info (f 'Triggered { _to_title_case ( name ) } ' , user , detail = variables )
93129 except requests .HTTPError as e :
94- success = False
95130 error = str (e )
96131 if response .status_code == 409 :
97132 error = 'Loading pipeline is already running. Wait for it to complete and resubmit'
98133 e = ErrorsWarningsException ([error ])
99134 if raise_error :
100135 raise e
101136 else :
102- logger .warning (f'Error triggering loading pipeline: { error } ' , user , detail = variables )
103- safe_post_to_slack (
104- SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ,
105- f'{ error_message } : { error } \n Loading pipeline should be triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
106- )
107-
108- if success_message and (success or success_slack_channel != SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ):
109- safe_post_to_slack (success_slack_channel , '\n \n ' .join ([
110- success_message ,
111- f'Pedigree files have been uploaded to { file_path } ' ,
112- f'Loading pipeline is triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
113- ]))
114-
115- return success
137+ logger .warning (f'Error Triggering { _to_title_case (name )} : { error } ' , user , detail = variables )
138+ return error
116139
117140
118141def _loading_dataset_type (sample_type : str , dataset_type : str ):
0 commit comments