diff --git a/.copier-answers.yml b/.copier-answers.yml index a9d9e77b..baa40268 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -5,7 +5,7 @@ description: Diffraction data reduction for the European Spallation Source max_python: '3.13' min_python: '3.10' namespace_package: ess -nightly_deps: scipp,scippnexus,sciline,plopp,scippneutron,essreduce +nightly_deps: scipp,scippnexus,sciline,plopp,scippneutron,essreduce,tof orgname: scipp prettyname: ESSdiffraction projectname: essdiffraction diff --git a/docs/user-guide/dream/dream-data-reduction.ipynb b/docs/user-guide/dream/dream-data-reduction.ipynb index 2ba3bf91..8ef0c028 100644 --- a/docs/user-guide/dream/dream-data-reduction.ipynb +++ b/docs/user-guide/dream/dream-data-reduction.ipynb @@ -20,7 +20,6 @@ "outputs": [], "source": [ "import scipp as sc\n", - "\n", "from ess import dream, powder\n", "import ess.dream.data # noqa: F401\n", "from ess.powder.types import *" @@ -71,14 +70,14 @@ "workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n", "# Edges for binning in d-spacing\n", "workflow[DspacingBins] = sc.linspace(\"dspacing\", 0.0, 2.3434, 201, unit=\"angstrom\")\n", - "# Mask in time-of-flight to crop to valid range\n", - "workflow[TofMask] = lambda x: (x < sc.scalar(0.0, unit=\"ns\")) | (\n", - " x > sc.scalar(86e6, unit=\"ns\")\n", - ")\n", + "# Empty masks by default\n", + "workflow[TofMask] = None\n", "workflow[TwoThetaMask] = None\n", "workflow[WavelengthMask] = None\n", "# No pixel masks\n", - "workflow = powder.with_pixel_mask_filenames(workflow, [])" + "workflow = powder.with_pixel_mask_filenames(workflow, [])\n", + "# Time-of-flight lookup table\n", + "workflow[TimeOfFlightLookupTableFilename] = dream.data.tof_lookup_table_high_flux()" ] }, { @@ -86,7 +85,7 @@ "id": "6", "metadata": {}, "source": [ - "## Use the workflow\n", + "## Use the reduction workflow\n", "\n", "We can visualize the graph for computing the final normalized result for intensity as a function of time-of-flight:" ] @@ -152,7 +151,7 @@ "in the documentation of ESSdiffraction.\n", "See https://scipp.github.io/essdiffraction/\n", "\"\"\"\n", - "cif_data.save('reduced.cif')" + "cif_data.save(\"reduced.cif\")" ] }, { @@ -192,9 +191,9 @@ "outputs": [], "source": [ "two_theta = sc.linspace(\"two_theta\", 0.8, 2.4, 301, unit=\"rad\")\n", - "intermediates[MaskedData[SampleRun]].hist(two_theta=two_theta, wavelength=300).plot(\n", - " norm=\"log\"\n", - ")" + "intermediates[MaskedData[SampleRun]].hist(\n", + " two_theta=two_theta, wavelength=300\n", + ").plot(norm=\"log\")" ] }, { @@ -216,7 +215,7 @@ "outputs": [], "source": [ "workflow[TwoThetaBins] = sc.linspace(\n", - " dim=\"two_theta\", unit=\"rad\", start=0.8, stop=2.4, num=17\n", + " dim=\"two_theta\", unit=\"rad\", start=0.8, stop=2.4, num=201\n", ")" ] }, @@ -237,31 +236,13 @@ "id": "19", "metadata": {}, "outputs": [], - "source": [ - "angle = sc.midpoints(grouped_dspacing.coords[\"two_theta\"])\n", - "sc.plot(\n", - " {\n", - " f\"{angle[group].value:.3f} {angle[group].unit}\": grouped_dspacing[\n", - " \"two_theta\", group\n", - " ].hist()\n", - " for group in range(2, 6)\n", - " }\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], "source": [ "grouped_dspacing.hist().plot(norm=\"log\")" ] }, { "cell_type": "markdown", - "id": "21", + "id": "20", "metadata": {}, "source": [ "## Normalizing by monitor\n", @@ -283,7 +264,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -292,7 +273,7 @@ }, { "cell_type": "markdown", - "id": "23", + "id": "22", "metadata": {}, "source": [ "In addition to the parameters used before, we also need to provide filenames for monitor data and a position of the monitor as that is not saved in the simulation files:" @@ -301,14 +282,14 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "23", "metadata": {}, "outputs": [], "source": [ "workflow[MonitorFilename[SampleRun]] = dream.data.simulated_monitor_diamond_sample()\n", "workflow[MonitorFilename[VanadiumRun]] = dream.data.simulated_monitor_vanadium_sample()\n", "workflow[MonitorFilename[BackgroundRun]] = dream.data.simulated_monitor_empty_can()\n", - "workflow[CaveMonitorPosition] = sc.vector([0.0, 0.0, -4220.0], unit='mm')\n", + "workflow[CaveMonitorPosition] = sc.vector([0.0, 0.0, -4220.0], unit=\"mm\")\n", "\n", "# These are the same as at the top of the notebook:\n", "workflow[Filename[SampleRun]] = dream.data.simulated_diamond_sample()\n", @@ -318,18 +299,17 @@ "workflow[NeXusDetectorName] = \"mantle\"\n", "workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n", "workflow[DspacingBins] = sc.linspace(\"dspacing\", 0.0, 2.3434, 201, unit=\"angstrom\")\n", - "workflow[TofMask] = lambda x: (x < sc.scalar(0.0, unit=\"ns\")) | (\n", - " x > sc.scalar(86e6, unit=\"ns\")\n", - ")\n", + "workflow[TofMask] = None\n", "workflow[TwoThetaMask] = None\n", "workflow[WavelengthMask] = None\n", - "workflow = powder.with_pixel_mask_filenames(workflow, [])" + "workflow = powder.with_pixel_mask_filenames(workflow, [])\n", + "workflow[TimeOfFlightLookupTableFilename] = dream.data.tof_lookup_table_high_flux()" ] }, { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -339,7 +319,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -352,7 +332,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -361,7 +341,7 @@ }, { "cell_type": "markdown", - "id": "28", + "id": "27", "metadata": {}, "source": [ "Comparing the final, normalized result shows that it agrees with the data that was normalized by proton-charge:" @@ -370,14 +350,11 @@ { "cell_type": "code", "execution_count": null, - "id": "29", + "id": "28", "metadata": {}, "outputs": [], "source": [ - "sc.plot({\n", - " 'By proton charge': histogram,\n", - " 'By monitor': normalized_by_monitor.hist()\n", - "})" + "sc.plot({\"By proton charge\": histogram, \"By monitor\": normalized_by_monitor.hist()})" ] } ], @@ -396,8 +373,7 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/docs/user-guide/dream/workflow-widget-dream.ipynb b/docs/user-guide/dream/workflow-widget-dream.ipynb index 5ef1cf83..d190b167 100644 --- a/docs/user-guide/dream/workflow-widget-dream.ipynb +++ b/docs/user-guide/dream/workflow-widget-dream.ipynb @@ -47,7 +47,7 @@ }, "outputs": [], "source": [ - "from ess.powder.types import DspacingBins, Filename, SampleRun, VanadiumRun\n", + "from ess.powder.types import DspacingBins, Filename, SampleRun, TimeOfFlightLookupTableFilename, VanadiumRun\n", "import ess.dream.data # noqa: F401\n", "\n", "select = widget.children[0].children[0]\n", @@ -66,6 +66,7 @@ "# Set parameters\n", "pbox._input_widgets[Filename[SampleRun]].children[0].value = dream.data.simulated_diamond_sample()\n", "pbox._input_widgets[Filename[VanadiumRun]].children[0].value = dream.data.simulated_vanadium_sample()\n", + "pbox._input_widgets[TimeOfFlightLookupTableFilename].children[0].value = dream.data.tof_lookup_table_high_flux()\n", "pbox._input_widgets[DspacingBins].fields[\"stop\"].value = 2.3434\n", "# Run the workflow\n", "rbox = wfw.result_box\n", @@ -133,8 +134,7 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb index 24614ba5..822d2e12 100644 --- a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb +++ b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb @@ -239,7 +239,7 @@ "source": [ "results = workflow.compute(\n", " (\n", - " DetectorData[SampleRun],\n", + " TofData[SampleRun],\n", " MaskedData[SampleRun],\n", " FilteredData[SampleRun],\n", " FilteredData[VanadiumRun],\n", @@ -254,7 +254,7 @@ "metadata": {}, "outputs": [], "source": [ - "results[DetectorData[SampleRun]]" + "results[TofData[SampleRun]]" ] }, { @@ -393,8 +393,7 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/pyproject.toml b/pyproject.toml index 497e9793..29c12ebf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,7 @@ dependencies = [ "scipp>=24.09.1", "scippneutron>=25.02.0", "scippnexus>=23.12.0", + "tof>=25.01.2", ] dynamic = ["version"] diff --git a/requirements/base.in b/requirements/base.in index 98fc645c..9232804e 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -12,3 +12,4 @@ sciline>=24.06.0 scipp>=24.09.1 scippneutron>=25.02.0 scippnexus>=23.12.0 +tof>=25.01.2 diff --git a/requirements/base.txt b/requirements/base.txt index d332b7b5..c630b71a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:4b779f60b811ecde0982bc9dc5df5ed5b296b876 +# SHA1:bd57e2addebc398f9de7064eb2a5c993a359ba46 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -45,6 +45,8 @@ h5py==3.12.1 # scippnexus idna==3.10 # via email-validator +importlib-resources==6.5.2 + # via tof ipydatawidgets==4.3.5 # via pythreejs ipython==8.32.0 @@ -102,6 +104,7 @@ plopp==24.10.0 # via # -r base.in # scippneutron + # tof prompt-toolkit==3.0.50 # via ipython ptyprocess==0.7.0 @@ -135,6 +138,7 @@ scipp==25.2.0 # essreduce # scippneutron # scippnexus + # tof scippneutron==25.2.0 # via # -r base.in @@ -148,10 +152,13 @@ scipy==1.15.2 # via # scippneutron # scippnexus + # tof six==1.17.0 # via python-dateutil stack-data==0.6.3 # via ipython +tof==25.2.0 + # via -r base.in toolz==1.0.0 # via # dask diff --git a/requirements/nightly.in b/requirements/nightly.in index b71e3631..4219857b 100644 --- a/requirements/nightly.in +++ b/requirements/nightly.in @@ -17,3 +17,4 @@ sciline @ git+https://github.com/scipp/sciline@main plopp @ git+https://github.com/scipp/plopp@main scippneutron @ git+https://github.com/scipp/scippneutron@main essreduce @ git+https://github.com/scipp/essreduce@main +tof @ git+https://github.com/scipp/tof@main diff --git a/requirements/nightly.txt b/requirements/nightly.txt index 474900a4..fead986a 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -1,4 +1,4 @@ -# SHA1:b20670ae7cbdca0a2dae7facc845a0a636ee537c +# SHA1:366263fa92dc0eb9f81d46f056e0c80a931f671e # # This file is autogenerated by pip-compile-multi # To update, run: @@ -54,6 +54,8 @@ idna==3.10 # via # email-validator # requests +importlib-resources==6.5.2 + # via tof iniconfig==2.0.0 # via pytest ipydatawidgets==4.3.5 @@ -122,6 +124,7 @@ plopp @ git+https://github.com/scipp/plopp@main # via # -r nightly.in # scippneutron + # tof pluggy==1.5.0 # via pytest pooch==1.8.2 @@ -166,6 +169,7 @@ scipp==100.0.0.dev0 # essreduce # scippneutron # scippnexus + # tof scippneutron @ git+https://github.com/scipp/scippneutron@main # via # -r nightly.in @@ -179,10 +183,13 @@ scipy==1.15.2 # via # scippneutron # scippnexus + # tof six==1.17.0 # via python-dateutil stack-data==0.6.3 # via ipython +tof @ git+https://github.com/scipp/tof@main + # via -r nightly.in toolz==1.0.0 # via # dask diff --git a/src/ess/dream/__init__.py b/src/ess/dream/__init__.py index b080c6f2..f2350845 100644 --- a/src/ess/dream/__init__.py +++ b/src/ess/dream/__init__.py @@ -7,6 +7,7 @@ import importlib.metadata +from . import beamline from .instrument_view import instrument_view from .io import load_geant4_csv, nexus from .workflow import DreamGeant4Workflow, default_parameters @@ -21,6 +22,7 @@ __all__ = [ 'DreamGeant4Workflow', '__version__', + 'beamline', 'default_parameters', 'instrument_view', 'load_geant4_csv', diff --git a/src/ess/dream/beamline.py b/src/ess/dream/beamline.py new file mode 100644 index 00000000..d11f9092 --- /dev/null +++ b/src/ess/dream/beamline.py @@ -0,0 +1,256 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) + +import enum + +import scipp as sc +from scippneutron.chopper import DiskChopper + + +class InstrumentConfiguration(enum.Enum): + """Choose between high-flux and high-resolution configurations.""" + + high_flux = enum.auto() + high_resolution = enum.auto() + + +def choppers(configuration: InstrumentConfiguration) -> dict[str, DiskChopper]: + """Return the chopper configuration for the given instrument configuration.""" + + match configuration: + case InstrumentConfiguration.high_flux: + return { + "psc1": DiskChopper( + frequency=sc.scalar(14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(286 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.145], unit="m"), + slit_begin=sc.array( + dims=["cutout"], + values=[ + -1.23, + 70.49, + 84.765, + 113.565, + 170.29, + 271.635, + 286.035, + 301.17, + ], + unit="deg", + ), + slit_end=sc.array( + dims=["cutout"], + values=[ + 1.23, + 73.51, + 88.035, + 116.835, + 175.31, + 275.565, + 289.965, + 303.63, + ], + unit="deg", + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "psc2": DiskChopper( + frequency=sc.scalar(-14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(-236, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.155], unit="m"), + slit_begin=sc.array( + dims=["cutout"], + values=[ + -1.23, + 27.0, + 55.8, + 142.385, + 156.765, + 214.115, + 257.23, + 315.49, + ], + unit="deg", + ), + slit_end=sc.array( + dims=["cutout"], + values=[ + 1.23, + 30.6, + 59.4, + 145.615, + 160.035, + 217.885, + 261.17, + 318.11, + ], + unit="deg", + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "oc": DiskChopper( + frequency=sc.scalar(14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(297 - 180 - 90, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.174], unit="m"), + slit_begin=sc.array( + dims=["cutout"], values=[-27.6 * 0.5], unit="deg" + ), + slit_end=sc.array(dims=["cutout"], values=[27.6 * 0.5], unit="deg"), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "bcc": DiskChopper( + frequency=sc.scalar(112.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(215 - 180, unit="deg"), + # Use 240 to reduce overlap between frames + # phase=sc.scalar(240 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 9.78], unit="m"), + slit_begin=sc.array( + dims=["cutout"], values=[-36.875, 143.125], unit="deg" + ), + slit_end=sc.array( + dims=["cutout"], values=[36.875, 216.875], unit="deg" + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "t0": DiskChopper( + frequency=sc.scalar(28.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(280 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 13.05], unit="m"), + slit_begin=sc.array( + dims=["cutout"], values=[-314.9 * 0.5], unit="deg" + ), + slit_end=sc.array( + dims=["cutout"], values=[314.9 * 0.5], unit="deg" + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + } + case InstrumentConfiguration.high_resolution: + return ( + { + "psc1": DiskChopper( + frequency=sc.scalar(15 * 14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(25 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.145], unit="m"), + slit_begin=sc.array( + dims=["cutout"], + values=[ + -1.23, + 70.49, + 84.765, + 113.565, + 170.29, + 271.635, + 286.035, + 301.17, + ], + unit="deg", + ), + slit_end=sc.array( + dims=["cutout"], + values=[ + 1.23, + 73.51, + 88.035, + 116.835, + 175.31, + 275.565, + 289.965, + 303.63, + ], + unit="deg", + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "psc2": DiskChopper( + frequency=sc.scalar(-14.0 * 14, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(-100.5, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.155], unit="m"), + slit_begin=sc.array( + dims=["cutout"], + values=[ + -1.23, + 27.0, + 55.8, + 142.385, + 156.765, + 214.115, + 257.23, + 315.49, + ], + unit="deg", + ), + slit_end=sc.array( + dims=["cutout"], + values=[ + 1.23, + 30.6, + 59.4, + 145.615, + 160.035, + 217.885, + 261.17, + 318.11, + ], + unit="deg", + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "oc": DiskChopper( + frequency=sc.scalar(14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(297 - 180 - 90, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.174], unit="m"), + slit_begin=sc.array( + dims=["cutout"], values=[-27.6 * 0.5], unit="deg" + ), + slit_end=sc.array( + dims=["cutout"], values=[27.6 * 0.5], unit="deg" + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "bcc": DiskChopper( + frequency=sc.scalar(112.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(200 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 9.78], unit="m"), + slit_begin=sc.array( + dims=["cutout"], values=[-36.875, 143.125], unit="deg" + ), + slit_end=sc.array( + dims=["cutout"], values=[36.875, 216.875], unit="deg" + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + "t0": DiskChopper( + frequency=sc.scalar(28.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(270 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 13.05], unit="m"), + slit_begin=sc.array( + dims=["cutout"], values=[-314.9 * 0.5], unit="deg" + ), + slit_end=sc.array( + dims=["cutout"], values=[314.9 * 0.5], unit="deg" + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ), + }, + ) diff --git a/src/ess/dream/data.py b/src/ess/dream/data.py index 57b4a9c4..502ba3b3 100644 --- a/src/ess/dream/data.py +++ b/src/ess/dream/data.py @@ -27,6 +27,7 @@ def _make_pooch(): "DREAM_simple_pwd_workflow/Cave_TOF_Monitor_diam_in_can.dat": "md5:ef24f4a4186c628574046e6629e31611", # noqa: E501 "DREAM_simple_pwd_workflow/Cave_TOF_Monitor_van_can.dat": "md5:e63456c347fb36a362a0b5ae2556b3cf", # noqa: E501 "DREAM_simple_pwd_workflow/Cave_TOF_Monitor_vana_inc_coh.dat": "md5:701d66792f20eb283a4ce76bae0c8f8f", # noqa: E501 + "DREAM-high-flux-tof-lookup-table.h5": "md5:404145a970ed1188e524cba10194610e", # noqa: E501 }, ) @@ -181,3 +182,18 @@ def simulated_monitor_empty_can() -> str: This is the DREAM cave monitor for ``simulated_empty_can``. """ return get_path("DREAM_simple_pwd_workflow/Cave_TOF_Monitor_van_can.dat") + + +def tof_lookup_table_high_flux() -> str: + """Path to a HDF5 file containing a lookup table for high-flux ToF. + + The table was created using the ``tof`` package and the chopper settings for the + DREAM instrument in high-resolution mode. + Note that the phase of the band-control chopper (BCC) was set to 240 degrees to + match that of the simulated data (this has since been found to be non-optimal as it + leads to time overlap between the two frames). + + The notebook that was used to create the table can be found at + https://github.com/scipp/essdiffraction/blob/main/tools/dream-make-tof-lookup-table.ipynb + """ + return get_path("DREAM-high-flux-tof-lookup-table.h5") diff --git a/src/ess/dream/io/geant4.py b/src/ess/dream/io/geant4.py index f68e93c0..3c8ea6b6 100644 --- a/src/ess/dream/io/geant4.py +++ b/src/ess/dream/io/geant4.py @@ -4,6 +4,7 @@ import numpy as np import sciline import scipp as sc +import scippneutron as scn import scippnexus as snx from scippneutron.metadata import ESS_SOURCE @@ -16,9 +17,11 @@ CaveMonitor, CaveMonitorPosition, DetectorData, + DetectorLtotal, Filename, MonitorData, MonitorFilename, + MonitorLtotal, MonitorType, NeXusComponent, NeXusDetectorName, @@ -88,14 +91,14 @@ def get_calibrated_geant4_detector( Since the Geant4 detectors already have computed positions as well as logical shape, this just extracts the relevant event data. """ - return detector['events'].copy(deep=False) + return detector["events"].copy(deep=False) def _load_raw_events(file_path: str) -> sc.DataArray: table = sc.io.load_csv( file_path, sep="\t", header_parser="bracket", data_columns=[] ) - table.coords['sumo'] = table.coords['det ID'] + table.coords["sumo"] = table.coords["det ID"] table.coords.pop("lambda", None) table = table.rename_dims(row="event") return sc.DataArray( @@ -122,7 +125,7 @@ def group(key: str, da: sc.DataArray) -> sc.DataArray: res = da.group("sumo", *elements) else: res = da.group(*elements) - res.coords['position'] = res.bins.coords.pop('position').bins.mean() + res.coords["position"] = res.bins.coords.pop("position").bins.mean() res.bins.coords.pop("sector", None) res.bins.coords.pop("sumo", None) return res @@ -246,18 +249,68 @@ def geant4_load_calibration(filename: CalibrationFilename) -> CalibrationData: return CalibrationData(None) -def dummy_assemble_detector_data( +def assemble_detector_data( detector: CalibratedBeamline[RunType], ) -> DetectorData[RunType]: - """Dummy assembly of detector data, detector already contains neutron data.""" - return DetectorData[RunType](detector) + """ + In the raw data, the tofs extend beyond 71ms, this is thus not an event_time_offset. + We convert the detector data to data which resembles NeXus data, with + event_time_zero and event_time_offset coordinates. + Parameters + ---------- + detector: + The calibrated detector data. + """ -def dummy_assemble_monitor_data( + da = detector.copy(deep=False) + da.bins.coords["tof"] = da.bins.coords["tof"].to(unit="us") + + period = (1.0 / sc.scalar(14.0, unit="Hz")).to(unit="us") + # Bin the data into bins with a 71ms period. + npulses = int((da.bins.coords["tof"].max() / period).ceil().value) + da = da.bin(tof=sc.arange("tof", npulses + 1) * period) + # Add a event_time_zero coord for each bin, but not as bin edges, + # as all events in the same pulse have the same event_time_zero, hence the `[:2]` + # We need to pick a start time. The actual value does not matter. We chose the + # random date of Friday, November 1, 2024 8:40:34.078 + da.coords["event_time_zero"] = ( + sc.scalar(1730450434078980000, unit="ns").to(unit="us") + da.coords["tof"] + )[:npulses] + # Remove the meaningless tof coord at the top level + del da.coords["tof"] + da = da.rename_dims(tof="event_time_zero") + # Compute a event_time_offset as tof % period + da.bins.coords["event_time_offset"] = (da.bins.coords.pop("tof") % period).to( + unit="us" + ) + # Add a event_time_zero coord for each event + da.bins.coords["event_time_zero"] = sc.bins_like( + da.bins.coords["event_time_offset"], da.coords["event_time_zero"] + ) + da = da.bins.concat('event_time_zero') + # Add a useful Ltotal coordinate + graph = scn.conversion.graph.beamline.beamline(scatter=True) + da = da.transform_coords("Ltotal", graph=graph) + return DetectorData[RunType](da) + + +def assemble_monitor_data( monitor: CalibratedMonitor[RunType, MonitorType], ) -> MonitorData[RunType, MonitorType]: - """Dummy assembly of monitor data, monitor already contains neutron data.""" - return MonitorData[RunType, MonitorType](monitor) + """ + Dummy assembly of monitor data, monitor already contains neutron data. + We simply add a Ltotal coordinate necessary to calculate the time-of-flight. + + Parameters + ---------- + monitor: + The calibrated monitor data. + """ + graph = scn.conversion.graph.beamline.beamline(scatter=False) + return MonitorData[RunType, MonitorType]( + monitor.transform_coords("Ltotal", graph=graph) + ) def dummy_source_position() -> Position[snx.NXsource, RunType]: @@ -272,6 +325,28 @@ def dummy_sample_position() -> Position[snx.NXsample, RunType]: ) +def extract_detector_ltotal(detector: DetectorData[RunType]) -> DetectorLtotal[RunType]: + """ + Extract Ltotal from the detector data. + TODO: This is a temporary implementation. We should instead read the positions + separately from the event data, so we don't need to re-load the positions every time + new events come in while streaming live data. + """ + return DetectorLtotal[RunType](detector.coords["Ltotal"]) + + +def extract_monitor_ltotal( + monitor: MonitorData[RunType, MonitorType], +) -> MonitorLtotal[RunType, MonitorType]: + """ + Extract Ltotal from the monitor data. + TODO: This is a temporary implementation. We should instead read the positions + separately from the event data, so we don't need to re-load the positions every time + new events come in while streaming live data. + """ + return MonitorLtotal[RunType, MonitorType](monitor.coords["Ltotal"]) + + def dream_beamline() -> Beamline: return Beamline( name="DREAM", @@ -296,10 +371,12 @@ def LoadGeant4Workflow() -> sciline.Pipeline: wf.insert(load_mcstas_monitor) wf.insert(geant4_load_calibration) wf.insert(get_calibrated_geant4_detector) - wf.insert(dummy_assemble_detector_data) - wf.insert(dummy_assemble_monitor_data) + wf.insert(assemble_detector_data) + wf.insert(assemble_monitor_data) wf.insert(dummy_source_position) wf.insert(dummy_sample_position) + wf.insert(extract_detector_ltotal) + wf.insert(extract_monitor_ltotal) wf.insert(dream_beamline) wf.insert(ess_source) return wf diff --git a/src/ess/dream/parameters.py b/src/ess/dream/parameters.py index 5dba17c3..70e2a434 100644 --- a/src/ess/dream/parameters.py +++ b/src/ess/dream/parameters.py @@ -16,6 +16,7 @@ PixelMaskFilename, ReducedTofCIF, SampleRun, + TimeOfFlightLookupTableFilename, UncertaintyBroadcastMode, VanadiumRun, ) @@ -40,6 +41,9 @@ parameter_registry[CalibrationFilename] = FilenameParameter.from_type( CalibrationFilename ) +parameter_registry[TimeOfFlightLookupTableFilename] = FilenameParameter.from_type( + TimeOfFlightLookupTableFilename +) parameter_registry[MonitorFilename[SampleRun]] = FilenameParameter.from_type( MonitorFilename[SampleRun] ) diff --git a/src/ess/dream/workflow.py b/src/ess/dream/workflow.py index c9720976..bd9e63bf 100644 --- a/src/ess/dream/workflow.py +++ b/src/ess/dream/workflow.py @@ -28,6 +28,7 @@ VanadiumRun, WavelengthMask, ) +from ess.reduce import time_of_flight from ess.reduce.parameter import parameter_mappers from ess.reduce.workflow import register_workflow @@ -81,7 +82,9 @@ def DreamGeant4Workflow(*, run_norm: RunNormalization) -> sciline.Pipeline: for provider in itertools.chain(powder_providers, _dream_providers): wf.insert(provider) insert_run_normalization(wf, run_norm) - for key, value in default_parameters().items(): + for key, value in itertools.chain( + default_parameters().items(), time_of_flight.default_parameters().items() + ): wf[key] = value wf.typical_outputs = typical_outputs return wf diff --git a/src/ess/powder/conversion.py b/src/ess/powder/conversion.py index ed16f8e0..0fa62e8b 100644 --- a/src/ess/powder/conversion.py +++ b/src/ess/powder/conversion.py @@ -4,24 +4,43 @@ Coordinate transformations for powder diffraction. """ +import numpy as np +import sciline as sl import scipp as sc import scippneutron as scn +from ess.reduce import time_of_flight + from .calibration import OutputCalibrationData from .correction import merge_calibration from .logging import get_logger from .types import ( CalibrationData, DataWithScatteringCoordinates, + DetectorData, + DetectorLtotal, + DistanceResolution, DspacingData, ElasticCoordTransformGraph, FilteredData, IofDspacing, IofTof, + LookupTableRelativeErrorThreshold, + LtotalRange, MaskedData, MonitorData, + MonitorLtotal, MonitorType, + PulsePeriod, + PulseStride, + PulseStrideOffset, RunType, + SimulationResults, + TimeOfFlightLookupTable, + TimeOfFlightLookupTableFilename, + TimeResolution, + TofData, + TofMonitorData, WavelengthMonitor, ) @@ -213,11 +232,11 @@ def convert_to_dspacing( out = data.transform_coords(["dspacing"], graph=graph, keep_intermediate=False) else: out = to_dspacing_with_calibration(data, calibration=calibration) - for key in ('wavelength', 'two_theta'): + for key in ("wavelength", "two_theta"): if key in out.coords.keys(): out.coords.set_aligned(key, False) - out.bins.coords.pop('tof', None) - out.bins.coords.pop('wavelength', None) + out.bins.coords.pop("tof", None) + out.bins.coords.pop("wavelength", None) return DspacingData[RunType](out) @@ -229,8 +248,82 @@ def convert_reduced_to_tof( ) -def convert_monitor_do_wavelength( +def build_tof_lookup_table( + simulation: SimulationResults, + ltotal_range: LtotalRange, + pulse_period: PulsePeriod, + pulse_stride: PulseStride, + pulse_stride_offset: PulseStrideOffset, + distance_resolution: DistanceResolution, + time_resolution: TimeResolution, + error_threshold: LookupTableRelativeErrorThreshold, +) -> TimeOfFlightLookupTable: + wf = sl.Pipeline( + time_of_flight.providers(), params=time_of_flight.default_parameters() + ) + wf[time_of_flight.SimulationResults] = simulation + wf[time_of_flight.LtotalRange] = ltotal_range + wf[time_of_flight.PulsePeriod] = pulse_period + wf[time_of_flight.PulseStride] = pulse_stride + wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset + wf[time_of_flight.DistanceResolution] = distance_resolution + wf[time_of_flight.TimeResolution] = time_resolution + wf[time_of_flight.LookupTableRelativeErrorThreshold] = error_threshold + return wf.compute(time_of_flight.TimeOfFlightLookupTable) + + +def load_tof_lookup_table( + filename: TimeOfFlightLookupTableFilename, +) -> TimeOfFlightLookupTable: + return TimeOfFlightLookupTable(sc.io.load_hdf5(filename)) + + +def compute_detector_time_of_flight( + detector_data: DetectorData[RunType], + lookup: TimeOfFlightLookupTable, + ltotal: DetectorLtotal[RunType], + pulse_period: PulsePeriod, + pulse_stride: PulseStride, + pulse_stride_offset: PulseStrideOffset, +) -> TofData[RunType]: + wf = sl.Pipeline( + time_of_flight.providers(), params=time_of_flight.default_parameters() + ) + wf[time_of_flight.RawData] = detector_data + wf[time_of_flight.TimeOfFlightLookupTable] = lookup + wf[time_of_flight.Ltotal] = ltotal + wf[time_of_flight.PulsePeriod] = pulse_period + wf[time_of_flight.PulseStride] = pulse_stride + wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset + return TofData[RunType](wf.compute(time_of_flight.TofData)) + + +def compute_monitor_time_of_flight( monitor: MonitorData[RunType, MonitorType], + lookup: TimeOfFlightLookupTable, + ltotal: MonitorLtotal[RunType, MonitorType], + pulse_period: PulsePeriod, + pulse_stride: PulseStride, + pulse_stride_offset: PulseStrideOffset, +) -> TofMonitorData[RunType, MonitorType]: + wf = sl.Pipeline( + time_of_flight.providers(), params=time_of_flight.default_parameters() + ) + wf.insert(time_of_flight.resample_tof_data) + wf[time_of_flight.RawData] = monitor + wf[time_of_flight.TimeOfFlightLookupTable] = lookup + wf[time_of_flight.Ltotal] = ltotal + wf[time_of_flight.PulsePeriod] = pulse_period + wf[time_of_flight.PulseStride] = pulse_stride + wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset + out = wf.compute(time_of_flight.ResampledTofData) + inds = out.values == 0.0 + out.values[inds] = np.nan + return TofMonitorData[RunType, MonitorType](out) + + +def convert_monitor_to_wavelength( + monitor: TofMonitorData[RunType, MonitorType], ) -> WavelengthMonitor[RunType, MonitorType]: graph = { **scn.conversion.graph.beamline.beamline(scatter=False), @@ -246,5 +339,8 @@ def convert_monitor_do_wavelength( add_scattering_coordinates_from_positions, convert_to_dspacing, convert_reduced_to_tof, - convert_monitor_do_wavelength, + convert_monitor_to_wavelength, + compute_detector_time_of_flight, + compute_monitor_time_of_flight, + load_tof_lookup_table, ) diff --git a/src/ess/powder/correction.py b/src/ess/powder/correction.py index 7f46e8c1..0a7c37c1 100644 --- a/src/ess/powder/correction.py +++ b/src/ess/powder/correction.py @@ -55,7 +55,7 @@ def normalize_by_monitor_histogram( norm = broadcast_uncertainties( monitor, prototype=detector, mode=uncertainty_broadcast_mode ) - return detector.bins / sc.lookup(norm, dim="wavelength") + return NormalizedRunData[RunType](detector.bins / sc.lookup(norm, dim="wavelength")) def normalize_by_monitor_integrated( @@ -104,8 +104,8 @@ def normalize_by_monitor_integrated( det_coord = ( detector.coords[dim] if dim in detector.coords else detector.bins.coords[dim] ) - lo = det_coord.min() - hi = det_coord.max() + lo = det_coord.nanmin() + hi = det_coord.nanmax() monitor = monitor[dim, lo:hi] # Strictly limit `monitor` to the range of `detector`. edges = sc.concat([lo, monitor.coords[dim][1:-1], hi], dim=dim) diff --git a/src/ess/powder/filtering.py b/src/ess/powder/filtering.py index 4c7b0719..7ab27e8a 100644 --- a/src/ess/powder/filtering.py +++ b/src/ess/powder/filtering.py @@ -12,7 +12,7 @@ import scipp as sc -from .types import DetectorData, FilteredData, RunType +from .types import FilteredData, RunType, TofData def _equivalent_bin_indices(a, b) -> bool: @@ -72,7 +72,7 @@ def remove_bad_pulses( return filtered -def filter_events(data: DetectorData[RunType]) -> FilteredData[RunType]: +def filter_events(data: TofData[RunType]) -> FilteredData[RunType]: """Remove bad events. Attention diff --git a/src/ess/powder/types.py b/src/ess/powder/types.py index 6c853b9f..fd2cfe77 100644 --- a/src/ess/powder/types.py +++ b/src/ess/powder/types.py @@ -17,6 +17,7 @@ from scippneutron.metadata import Person, Software from ess.reduce.nexus import types as reduce_t +from ess.reduce.time_of_flight import types as tof_t from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode # 1 TypeVars used to parametrize the generic parts of the workflow @@ -41,6 +42,16 @@ DetectorBankSizes = reduce_t.DetectorBankSizes +PulsePeriod = tof_t.PulsePeriod +PulseStride = tof_t.PulseStride +PulseStrideOffset = tof_t.PulseStrideOffset +DistanceResolution = tof_t.DistanceResolution +TimeResolution = tof_t.TimeResolution +LtotalRange = tof_t.LtotalRange +LookupTableRelativeErrorThreshold = tof_t.LookupTableRelativeErrorThreshold +TimeOfFlightLookupTable = tof_t.TimeOfFlightLookupTable +SimulationResults = tof_t.SimulationResults + RunType = TypeVar("RunType", SampleRun, VanadiumRun) MonitorType = TypeVar("MonitorType", CaveMonitor, BunkerMonitor) @@ -90,6 +101,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: """Detector calibration data.""" +class TofData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Data with time-of-flight coordinate.""" + + class DataWithScatteringCoordinates(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Data with scattering coordinates computed for all events: wavelength, 2theta, d-spacing.""" @@ -193,7 +208,28 @@ class RawDataAndMetadata(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): CIFAuthors = NewType('CIFAuthors', list[Person]) """List of authors to save to output CIF files.""" -ReducedTofCIF = NewType('ReducedTofCIF', cif.CIF) +ReducedTofCIF = NewType("ReducedTofCIF", cif.CIF) """Reduced data in time-of-flight, ready to be saved to a CIF file.""" + +class DetectorLtotal(sciline.Scope[RunType, sc.Variable], sc.Variable): + """Total path length of neutrons from source to detector (L1 + L2).""" + + +class MonitorLtotal( + sciline.ScopeTwoParams[RunType, MonitorType, sc.Variable], sc.Variable +): + """Total path length of neutrons from source to monitor.""" + + +class TofMonitorData( + sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray +): + """Monitor data with time-of-flight coordinate.""" + + +TimeOfFlightLookupTableFilename = NewType("TimeOfFlightLookupTableFilename", str) +"""Filename of the time-of-flight lookup table.""" + + del sc, sciline, NewType, TypeVar diff --git a/src/ess/snspowder/powgen/data.py b/src/ess/snspowder/powgen/data.py index 6ba18112..4d433fdf 100644 --- a/src/ess/snspowder/powgen/data.py +++ b/src/ess/snspowder/powgen/data.py @@ -10,11 +10,11 @@ CalibrationData, CalibrationFilename, DetectorBankSizes, - DetectorData, Filename, ProtonCharge, RawDataAndMetadata, RunType, + TofData, ) _version = "1" @@ -120,14 +120,14 @@ def pooch_load_calibration( def extract_raw_data( dg: RawDataAndMetadata[RunType], sizes: DetectorBankSizes -) -> DetectorData[RunType]: +) -> TofData[RunType]: """Return the events from a loaded data group.""" # Remove the tof binning and dimension, as it is not needed and it gets in the way # of masking. out = dg["data"].squeeze() out.coords.pop("tof", None) out = out.fold(dim="spectrum", sizes=sizes) - return DetectorData[RunType](out) + return TofData[RunType](out) def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunType]: @@ -136,7 +136,7 @@ def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunTy def extract_accumulated_proton_charge( - data: DetectorData[RunType], + data: TofData[RunType], ) -> AccumulatedProtonCharge[RunType]: """Return the stored accumulated proton charge from a loaded data group.""" return AccumulatedProtonCharge[RunType](data.coords["gd_prtn_chrg"]) diff --git a/tests/dream/geant4_reduction_test.py b/tests/dream/geant4_reduction_test.py index fe880b0f..2ac9fc94 100644 --- a/tests/dream/geant4_reduction_test.py +++ b/tests/dream/geant4_reduction_test.py @@ -23,12 +23,15 @@ CalibrationFilename, CaveMonitorPosition, CIFAuthors, + DistanceResolution, DspacingBins, DspacingData, Filename, IofDspacing, IofDspacingTwoTheta, IofTof, + LookupTableRelativeErrorThreshold, + LtotalRange, MaskedData, MonitorFilename, NeXusDetectorName, @@ -36,6 +39,9 @@ Position, ReducedTofCIF, SampleRun, + SimulationResults, + TimeOfFlightLookupTableFilename, + TimeResolution, TofMask, TwoThetaBins, TwoThetaMask, @@ -43,6 +49,7 @@ VanadiumRun, WavelengthMask, ) +from ess.reduce import time_of_flight from ess.reduce import workflow as reduce_workflow sample = sc.vector([0.0, 0.0, 0.0], unit='mm') @@ -56,11 +63,12 @@ MonitorFilename[SampleRun]: dream.data.simulated_monitor_diamond_sample(), MonitorFilename[VanadiumRun]: dream.data.simulated_monitor_vanadium_sample(), MonitorFilename[BackgroundRun]: dream.data.simulated_monitor_empty_can(), + TimeOfFlightLookupTableFilename: dream.data.tof_lookup_table_high_flux(), CalibrationFilename: None, UncertaintyBroadcastMode: UncertaintyBroadcastMode.drop, DspacingBins: sc.linspace('dspacing', 0.0, 2.3434, 201, unit='angstrom'), - TofMask: lambda x: (x < sc.scalar(0.0, unit='ns')) - | (x > sc.scalar(86e6, unit='ns')), + TofMask: lambda x: (x < sc.scalar(0.0, unit='us')) + | (x > sc.scalar(86e3, unit='us')), Position[snx.NXsample, SampleRun]: sample, Position[snx.NXsample, VanadiumRun]: sample, Position[snx.NXsource, SampleRun]: source, @@ -108,6 +116,24 @@ def test_pipeline_can_compute_dspacing_result(workflow): assert sc.identical(result.coords['dspacing'], params[DspacingBins]) +def test_pipeline_can_compute_dspacing_result_using_custom_built_tof_lookup(workflow): + workflow.insert(powder.conversion.build_tof_lookup_table) + workflow = powder.with_pixel_mask_filenames(workflow, []) + workflow[SimulationResults] = time_of_flight.simulate_beamline( + choppers=dream.beamline.choppers( + dream.beamline.InstrumentConfiguration.high_flux + ), + neutrons=2_000_000, + ) + workflow[LtotalRange] = sc.scalar(60.0, unit="m"), sc.scalar(80.0, unit="m") + workflow[DistanceResolution] = sc.scalar(0.1, unit="m") + workflow[TimeResolution] = sc.scalar(250.0, unit='us') + workflow[LookupTableRelativeErrorThreshold] = 0.02 + result = workflow.compute(IofDspacing) + assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} + assert sc.identical(result.coords['dspacing'], params[DspacingBins]) + + def test_pipeline_can_compute_dspacing_result_with_hist_monitor_norm(params_for_det): workflow = make_workflow( params_for_det, run_norm=powder.RunNormalization.monitor_histogram @@ -150,7 +176,7 @@ def test_pipeline_can_compute_intermediate_results(workflow): if detector_name in ('endcap_backward', 'endcap_forward'): expected_dims.add('sumo') - assert set(result.dims) == expected_dims + assert expected_dims.issubset(set(result.dims)) def test_pipeline_group_by_two_theta(workflow): @@ -160,10 +186,8 @@ def test_pipeline_group_by_two_theta(workflow): workflow[TwoThetaBins] = two_theta_bins workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(IofDspacingTwoTheta) - assert result.sizes == { - 'two_theta': 16, - 'dspacing': len(params[DspacingBins]) - 1, - } + assert result.sizes['two_theta'] == 16 + assert result.sizes['dspacing'] == len(params[DspacingBins]) - 1 assert sc.identical(result.coords['dspacing'], params[DspacingBins]) assert sc.allclose(result.coords['two_theta'], two_theta_bins) @@ -202,13 +226,6 @@ def test_pipeline_two_theta_masking(workflow): ) -def test_use_workflow_helper(workflow): - workflow = powder.with_pixel_mask_filenames(workflow, []) - result = workflow.compute(IofDspacing) - assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} - assert sc.identical(result.coords['dspacing'], params[DspacingBins]) - - def test_pipeline_can_save_data(workflow): workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(ReducedTofCIF) diff --git a/tools/dream-make-tof-lookup-table.ipynb b/tools/dream-make-tof-lookup-table.ipynb new file mode 100644 index 00000000..03f07082 --- /dev/null +++ b/tools/dream-make-tof-lookup-table.ipynb @@ -0,0 +1,147 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Create a time-of-flight lookup table for DREAM" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "import scipp as sc\n", + "import sciline as sl\n", + "from ess.reduce import time_of_flight\n", + "from ess.dream.beamline import InstrumentConfiguration, choppers" + ] + }, + { + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## Select the choppers" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "disk_choppers = choppers(InstrumentConfiguration.high_flux)" + ] + }, + { + "cell_type": "markdown", + "id": "4", + "metadata": {}, + "source": [ + "## Setting up the workflow" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "wf = sl.Pipeline(\n", + " time_of_flight.providers(), params=time_of_flight.default_parameters()\n", + ")\n", + "\n", + "wf[time_of_flight.LtotalRange] = sc.scalar(60.0, unit=\"m\"), sc.scalar(80.0, unit=\"m\")\n", + "wf[time_of_flight.SimulationResults] = time_of_flight.simulate_beamline(\n", + " choppers=disk_choppers, neutrons=5_000_000\n", + ")\n", + "\n", + "wf[time_of_flight.DistanceResolution] = sc.scalar(0.1, unit=\"m\")\n", + "wf[time_of_flight.TimeResolution] = sc.scalar(250.0, unit='us')\n", + "wf[time_of_flight.LookupTableRelativeErrorThreshold] = 0.02\n", + "wf[time_of_flight.PulsePeriod] = 1.0 / sc.scalar(14.0, unit=\"Hz\")\n", + "wf[time_of_flight.PulseStride] = 1\n", + "wf[time_of_flight.PulseStrideOffset] = None" + ] + }, + { + "cell_type": "markdown", + "id": "6", + "metadata": {}, + "source": [ + "## Compute the table" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "table = wf.compute(time_of_flight.TimeOfFlightLookupTable)\n", + "table" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "table.squeeze().plot()" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "## Save to file" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "# Save chopper metadata\n", + "# TODO: storing the choppers as a PyObject is skipped when saving to disk\n", + "table.coords['choppers'] = sc.scalar(disk_choppers)\n", + "# Write to file\n", + "table.save_hdf5('DREAM-high-flux-tof-lookup-table.h5')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/tox.ini b/tox.ini index d9003072..d43e6145 100644 --- a/tox.ini +++ b/tox.ini @@ -70,5 +70,5 @@ deps = tomli skip_install = true changedir = requirements -commands = python ./make_base.py --nightly scipp,scippnexus,sciline,plopp,scippneutron,essreduce +commands = python ./make_base.py --nightly scipp,scippnexus,sciline,plopp,scippneutron,essreduce,tof pip-compile-multi -d . --backtracking --annotate-index