@@ -1358,8 +1358,7 @@ def process(
1358
1358
1359
1359
# Spawn the workers to run the sub-pipeline
1360
1360
run_config = RunConfig (
1361
- config = {'log_level' : logging .getLevelName (self .logger .level ),
1362
- 'spawn' : 1 })
1361
+ log_level = logging .getLevelName (self .logger .level ), spawn = 1 )
1363
1362
tmp_names = []
1364
1363
with NamedTemporaryFile (delete = False ) as fp :
1365
1364
fp_name = fp .name
@@ -1371,14 +1370,14 @@ def process(
1371
1370
tmp_names .append (f_name )
1372
1371
with open (f_name , 'w' ) as f :
1373
1372
yaml .dump (
1374
- {'config' : run_config .__dict__ ,
1373
+ {'config' : run_config .model_dump () ,
1375
1374
'pipeline' : pipeline_config [n_proc - 1 ]},
1376
1375
f , sort_keys = False )
1377
1376
sub_comm = MPI .COMM_SELF .Spawn (
1378
1377
'CHAP' , args = [fp_name ], maxprocs = num_proc - 1 )
1379
1378
common_comm = sub_comm .Merge (False )
1380
1379
# Align with the barrier in RunConfig() on common_comm
1381
- # called from the spawned main()
1380
+ # called from the spawned main() in common_comm
1382
1381
common_comm .barrier ()
1383
1382
# Align with the barrier in run() on common_comm
1384
1383
# called from the spawned main()
@@ -1421,13 +1420,15 @@ def process(
1421
1420
if num_proc > 1 :
1422
1421
# Reset the scan_numbers to the original full set
1423
1422
spec_scans .scan_numbers = scan_numbers
1424
- # Disconnect spawned workers and cleanup temporary files
1423
+ # Align with the barrier in main() on common_comm
1424
+ # when disconnecting the spawned worker
1425
1425
common_comm .barrier ()
1426
+ # Disconnect spawned workers and cleanup temporary files
1426
1427
sub_comm .Disconnect ()
1427
1428
for tmp_name in tmp_names :
1428
1429
os .remove (tmp_name )
1429
1430
1430
- # Construct the NeXus NXroot object
1431
+ # Construct and return the NeXus NXroot object
1431
1432
return self ._get_nxroot (
1432
1433
map_config , detector_config , data , independent_dimensions ,
1433
1434
all_scalar_data , placeholder_data )
@@ -1920,24 +1921,28 @@ class MPIMapProcessor(Processor):
1920
1921
"""A Processor that applies a parallel generic sub-pipeline to
1921
1922
a map configuration.
1922
1923
"""
1923
- def process (self , data , sub_pipeline = None , inputdir = '.' , outputdir = '.' ,
1924
- interactive = False , log_level = 'INFO' ):
1924
+ def process (self , data , config = None , sub_pipeline = None , inputdir = None ,
1925
+ outputdir = None , interactive = None , log_level = None ):
1925
1926
"""Run a parallel generic sub-pipeline.
1926
1927
1927
1928
:param data: Input data.
1928
1929
:type data: list[PipelineData]
1930
+ :param config: Initialization parameters for an instance of
1931
+ common.models.map.MapConfig.
1932
+ :type config: dict, optional
1929
1933
:param sub_pipeline: The sub-pipeline.
1930
1934
:type sub_pipeline: Pipeline, optional
1931
1935
:param inputdir: Input directory, used only if files in the
1932
- input configuration are not absolute paths,
1933
- defaults to `'.'`.
1936
+ input configuration are not absolute paths.
1934
1937
:type inputdir: str, optional
1935
1938
:param outputdir: Directory to which any output figures will
1936
- be saved, defaults to `'.'` .
1939
+ be saved.
1937
1940
:type outputdir: str, optional
1938
- :param interactive: Allows for user interactions, defaults to
1939
- `False`.
1941
+ :param interactive: Allows for user interactions.
1940
1942
:type interactive: bool, optional
1943
+ :ivar log_level: Logger level (not case sesitive).
1944
+ :type log_level: Literal[
1945
+ 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], optional
1941
1946
:return: The `data` field of the first item in the returned
1942
1947
list of sub-pipeline items.
1943
1948
"""
@@ -1955,11 +1960,24 @@ def process(self, data, sub_pipeline=None, inputdir='.', outputdir='.',
1955
1960
num_proc = comm .Get_size ()
1956
1961
rank = comm .Get_rank ()
1957
1962
1958
- # Get the map configuration from data
1959
- map_config = self .get_config (
1960
- data , 'common.models.map.MapConfig' , inputdir = inputdir )
1963
+ # Get the validated map configuration
1964
+ try :
1965
+ map_config = self .get_config (
1966
+ data , 'common.models.map.MapConfig' , inputdir = inputdir )
1967
+ except :
1968
+ self .logger .info ('No valid Map configuration in input pipeline '
1969
+ 'data, using config parameter instead.' )
1970
+ try :
1971
+ # Local modules
1972
+ from CHAP .common .models .map import MapConfig
1973
+
1974
+ map_config = MapConfig (** config , inputdir = inputdir )
1975
+ except Exception as exc :
1976
+ raise RuntimeError from exc
1961
1977
1962
1978
# Create the spec reader configuration for each processor
1979
+ # FIX: catered to EDD with one spec scan
1980
+ assert len (map_config .spec_scans ) == 1
1963
1981
spec_scans = map_config .spec_scans [0 ]
1964
1982
scan_numbers = spec_scans .scan_numbers
1965
1983
num_scan = len (scan_numbers )
@@ -1984,7 +2002,7 @@ def process(self, data, sub_pipeline=None, inputdir='.', outputdir='.',
1984
2002
run_config = {'inputdir' : inputdir , 'outputdir' : outputdir ,
1985
2003
'interactive' : interactive , 'log_level' : log_level }
1986
2004
run_config .update (sub_pipeline .get ('config' ))
1987
- run_config = RunConfig (run_config , comm )
2005
+ run_config = RunConfig (** run_config , comm = comm )
1988
2006
pipeline_config = []
1989
2007
for item in sub_pipeline ['pipeline' ]:
1990
2008
if isinstance (item , dict ):
@@ -1999,20 +2017,17 @@ def process(self, data, sub_pipeline=None, inputdir='.', outputdir='.',
1999
2017
pipeline_config .append (item )
2000
2018
2001
2019
# Run the sub-pipeline on each processor
2002
- return run (
2003
- pipeline_config , inputdir = run_config .inputdir ,
2004
- outputdir = run_config .outputdir , interactive = run_config .interactive ,
2005
- logger = self .logger , comm = comm )
2020
+ return run (run_config , pipeline_config , logger = self .logger , comm = comm )
2006
2021
2007
2022
2008
2023
class MPISpawnMapProcessor (Processor ):
2009
2024
"""A Processor that applies a parallel generic sub-pipeline to
2010
2025
a map configuration by spawning workers processes.
2011
2026
"""
2012
2027
def process (
2013
- self , data , num_proc = 1 , root_as_worker = True , collect_on_root = True ,
2014
- sub_pipeline = None , inputdir = '.' , outputdir = '.' , interactive = False ,
2015
- log_level = 'INFO' ):
2028
+ self , data , num_proc = 1 , root_as_worker = True , collect_on_root = False ,
2029
+ sub_pipeline = None , inputdir = None , outputdir = None , interactive = None ,
2030
+ log_level = None ):
2016
2031
"""Spawn workers running a parallel generic sub-pipeline.
2017
2032
2018
2033
:param data: Input data.
@@ -2023,7 +2038,7 @@ def process(
2023
2038
defaults to `True`.
2024
2039
:type root_as_worker: bool, optional
2025
2040
:param collect_on_root: Collect the result of the spawned
2026
- workers on the root node, defaults to `True `.
2041
+ workers on the root node, defaults to `False `.
2027
2042
:type collect_on_root: bool, optional
2028
2043
:param sub_pipeline: The sub-pipeline.
2029
2044
:type sub_pipeline: Pipeline, optional
@@ -2037,6 +2052,9 @@ def process(
2037
2052
:param interactive: Allows for user interactions, defaults to
2038
2053
`False`.
2039
2054
:type interactive: bool, optional
2055
+ :ivar log_level: Logger level (not case sesitive).
2056
+ :type log_level: Literal[
2057
+ 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], optional
2040
2058
:return: The `data` field of the first item in the returned
2041
2059
list of sub-pipeline items.
2042
2060
"""
@@ -2067,7 +2085,7 @@ def process(
2067
2085
run_config = {'inputdir' : inputdir , 'outputdir' : outputdir ,
2068
2086
'interactive' : interactive , 'log_level' : log_level }
2069
2087
run_config .update (sub_pipeline .get ('config' ))
2070
- run_config = RunConfig (run_config )
2088
+ run_config = RunConfig (** run_config , logger = self . logger )
2071
2089
2072
2090
# Create the sub-pipeline configuration for each processor
2073
2091
spec_scans = map_config .spec_scans [0 ]
@@ -2128,7 +2146,7 @@ def process(
2128
2146
tmp_names .append (f_name )
2129
2147
with open (f_name , 'w' ) as f :
2130
2148
yaml .dump (
2131
- {'config' : run_config .__dict__ ,
2149
+ {'config' : run_config .model_dump () ,
2132
2150
'pipeline' : pipeline_config [n_proc ]},
2133
2151
f , sort_keys = False )
2134
2152
sub_comm = MPI .COMM_SELF .Spawn (
@@ -2143,7 +2161,7 @@ def process(
2143
2161
2144
2162
# Run the sub-pipeline on the root node
2145
2163
if root_as_worker :
2146
- data = runner (run_config , pipeline_config [0 ], common_comm )
2164
+ data = runner (run_config , pipeline_config [0 ], comm = common_comm )
2147
2165
elif collect_on_root :
2148
2166
run_config .spawn = 0
2149
2167
pipeline_config = [{'common.MPICollectProcessor' : {
@@ -2157,7 +2175,10 @@ def process(
2157
2175
2158
2176
# Disconnect spawned workers and cleanup temporary files
2159
2177
if num_proc > first_proc :
2178
+ # Align with the barrier in main() on common_comm
2179
+ # when disconnecting the spawned worker
2160
2180
common_comm .barrier ()
2181
+ # Disconnect spawned workers and cleanup temporary files
2161
2182
sub_comm .Disconnect ()
2162
2183
for tmp_name in tmp_names :
2163
2184
os .remove (tmp_name )
0 commit comments