@@ -1358,8 +1358,7 @@ def process(
1358
1358
1359
1359
# Spawn the workers to run the sub-pipeline
1360
1360
run_config = RunConfig (
1361
- config = {'log_level' : logging .getLevelName (self .logger .level ),
1362
- 'spawn' : 1 })
1361
+ log_level = logging .getLevelName (self .logger .level ), spawn = 1 )
1363
1362
tmp_names = []
1364
1363
with NamedTemporaryFile (delete = False ) as fp :
1365
1364
fp_name = fp .name
@@ -1371,14 +1370,14 @@ def process(
1371
1370
tmp_names .append (f_name )
1372
1371
with open (f_name , 'w' ) as f :
1373
1372
yaml .dump (
1374
- {'config' : run_config .__dict__ ,
1373
+ {'config' : run_config .model_dump () ,
1375
1374
'pipeline' : pipeline_config [n_proc - 1 ]},
1376
1375
f , sort_keys = False )
1377
1376
sub_comm = MPI .COMM_SELF .Spawn (
1378
1377
'CHAP' , args = [fp_name ], maxprocs = num_proc - 1 )
1379
1378
common_comm = sub_comm .Merge (False )
1380
1379
# Align with the barrier in RunConfig() on common_comm
1381
- # called from the spawned main()
1380
+ # called from the spawned main() in common_comm
1382
1381
common_comm .barrier ()
1383
1382
# Align with the barrier in run() on common_comm
1384
1383
# called from the spawned main()
@@ -1421,13 +1420,15 @@ def process(
1421
1420
if num_proc > 1 :
1422
1421
# Reset the scan_numbers to the original full set
1423
1422
spec_scans .scan_numbers = scan_numbers
1424
- # Disconnect spawned workers and cleanup temporary files
1423
+ # Align with the barrier in main() on common_comm
1424
+ # when disconnecting the spawned worker
1425
1425
common_comm .barrier ()
1426
+ # Disconnect spawned workers and cleanup temporary files
1426
1427
sub_comm .Disconnect ()
1427
1428
for tmp_name in tmp_names :
1428
1429
os .remove (tmp_name )
1429
1430
1430
- # Construct the NeXus NXroot object
1431
+ # Construct and return the NeXus NXroot object
1431
1432
return self ._get_nxroot (
1432
1433
map_config , detector_config , data , independent_dimensions ,
1433
1434
all_scalar_data , placeholder_data )
@@ -1984,7 +1985,7 @@ def process(self, data, sub_pipeline=None, inputdir='.', outputdir='.',
1984
1985
run_config = {'inputdir' : inputdir , 'outputdir' : outputdir ,
1985
1986
'interactive' : interactive , 'log_level' : log_level }
1986
1987
run_config .update (sub_pipeline .get ('config' ))
1987
- run_config = RunConfig (run_config , comm )
1988
+ run_config = RunConfig (** run_config , comm = comm , logger = self . logger )
1988
1989
pipeline_config = []
1989
1990
for item in sub_pipeline ['pipeline' ]:
1990
1991
if isinstance (item , dict ):
@@ -2067,7 +2068,7 @@ def process(
2067
2068
run_config = {'inputdir' : inputdir , 'outputdir' : outputdir ,
2068
2069
'interactive' : interactive , 'log_level' : log_level }
2069
2070
run_config .update (sub_pipeline .get ('config' ))
2070
- run_config = RunConfig (run_config )
2071
+ run_config = RunConfig (** run_config , logger = self . logger )
2071
2072
2072
2073
# Create the sub-pipeline configuration for each processor
2073
2074
spec_scans = map_config .spec_scans [0 ]
@@ -2128,7 +2129,7 @@ def process(
2128
2129
tmp_names .append (f_name )
2129
2130
with open (f_name , 'w' ) as f :
2130
2131
yaml .dump (
2131
- {'config' : run_config .__dict__ ,
2132
+ {'config' : run_config .model_dump () ,
2132
2133
'pipeline' : pipeline_config [n_proc ]},
2133
2134
f , sort_keys = False )
2134
2135
sub_comm = MPI .COMM_SELF .Spawn (
0 commit comments