12
12
from seqr .utils .middleware import ErrorsWarningsException
13
13
from seqr .views .utils .airtable_utils import AirtableSession , ANVIL_REQUEST_TRACKING_TABLE
14
14
from seqr .views .utils .export_utils import write_multiple_files
15
+ from seqr .views .utils .json_utils import _to_title_case
15
16
from seqr .views .utils .pedigree_info_utils import JsonConstants
16
17
from settings import SEQR_SLACK_DATA_ALERTS_NOTIFICATION_CHANNEL , BASE_URL , ANVIL_UI_URL , PIPELINE_RUNNER_SERVER , \
17
18
SEQR_SLACK_ANVIL_DATA_LOADING_CHANNEL , SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL , LOADING_DATASETS_DIR
@@ -64,6 +65,22 @@ def update_airtable_loading_tracking_status(project, status, additional_update=N
64
65
update = {'Status' : status , ** (additional_update or {})},
65
66
)
66
67
68
+ def trigger_delete_families_search (project , family_guids , user = None ):
69
+ search_samples = Sample .objects .filter (is_active = True , individual__family__guid__in = family_guids )
70
+ info = []
71
+ if search_samples :
72
+ updated_families = search_samples .values_list ("individual__family__family_id" , flat = True ).distinct ()
73
+ family_summary = ", " .join (sorted (updated_families ))
74
+ num_updated = search_samples .update (is_active = False )
75
+ message = f'Disabled search for { num_updated } samples in the following { len (updated_families )} families: { family_summary } '
76
+ info .append (message )
77
+ logger .info (message , user )
78
+
79
+ variables = {'project_guid' : project .guid , 'family_guids' : family_guids }
80
+ _enqueue_pipeline_request ('delete_families' , variables , user )
81
+ info .append ('Triggered delete family data' )
82
+ return info
83
+
67
84
def trigger_data_loading (projects : list [Project ], individual_ids : list [int ], sample_type : str , dataset_type : str ,
68
85
genome_version : str , data_path : str , user : User , raise_error : bool = False , skip_expect_tdr_metrics : bool = True ,
69
86
skip_check_sex_and_relatedness : bool = True , vcf_sample_id_map = None ,
@@ -84,34 +101,40 @@ def trigger_data_loading(projects: list[Project], individual_ids: list[int], sam
84
101
_upload_data_loading_files (individual_ids , vcf_sample_id_map or {}, user , file_path , raise_error )
85
102
_write_gene_id_file (user )
86
103
87
- response = requests .post (f'{ PIPELINE_RUNNER_SERVER } /loading_pipeline_enqueue' , json = variables , timeout = 60 )
88
- success = True
104
+ error = _enqueue_pipeline_request ('loading_pipeline' , variables , user , raise_error )
105
+ if error :
106
+ safe_post_to_slack (
107
+ SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ,
108
+ f'{ error_message } : { error } \n Loading pipeline should be triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
109
+ )
110
+
111
+ success = not error
112
+ if success_message and (success or success_slack_channel != SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ):
113
+ safe_post_to_slack (success_slack_channel , '\n \n ' .join ([
114
+ success_message ,
115
+ f'Pedigree files have been uploaded to { file_path } ' ,
116
+ f'Loading pipeline is triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
117
+ ]))
118
+
119
+ return success
120
+
121
+
122
+ def _enqueue_pipeline_request (name : str , variables : dict , user : User , raise_error : bool = True ):
123
+ response = requests .post (f'{ PIPELINE_RUNNER_SERVER } /{ name } _enqueue' , json = variables , timeout = 60 )
124
+ error = None
89
125
try :
90
126
response .raise_for_status ()
91
- logger .info ('Triggered loading pipeline ' , user , detail = variables )
127
+ logger .info (f 'Triggered { _to_title_case ( name ) } ' , user , detail = variables )
92
128
except requests .HTTPError as e :
93
- success = False
94
129
error = str (e )
95
130
if response .status_code == 409 :
96
131
error = 'Loading pipeline is already running. Wait for it to complete and resubmit'
97
132
e = ErrorsWarningsException ([error ])
98
133
if raise_error :
99
134
raise e
100
135
else :
101
- logger .warning (f'Error triggering loading pipeline: { error } ' , user , detail = variables )
102
- safe_post_to_slack (
103
- SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ,
104
- f'{ error_message } : { error } \n Loading pipeline should be triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
105
- )
106
-
107
- if success_message and (success or success_slack_channel != SEQR_SLACK_LOADING_NOTIFICATION_CHANNEL ):
108
- safe_post_to_slack (success_slack_channel , '\n \n ' .join ([
109
- success_message ,
110
- f'Pedigree files have been uploaded to { file_path } ' ,
111
- f'Loading pipeline is triggered with:\n ```{ json .dumps (variables , indent = 4 )} ```' ,
112
- ]))
113
-
114
- return success
136
+ logger .warning (f'Error Triggering { _to_title_case (name )} : { error } ' , user , detail = variables )
137
+ return error
115
138
116
139
117
140
def _loading_dataset_type (sample_type : str , dataset_type : str ):
0 commit comments