Skip to content

Commit

Permalink
Merge pull request #214 from scipp/update-for-next-scipp
Browse files Browse the repository at this point in the history
Update for next scipp
  • Loading branch information
jl-wynen authored Nov 9, 2023
2 parents bfbfa94 + f9e7082 commit b034d63
Show file tree
Hide file tree
Showing 21 changed files with 182 additions and 155 deletions.
6 changes: 2 additions & 4 deletions docs/instruments/amor/amor_reduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@
"import scipp as sc\n",
"import plopp as pp\n",
"from ess import amor, reflectometry\n",
"import ess\n",
"\n",
"pp.patch_scipp()"
"import ess"
]
},
{
Expand Down Expand Up @@ -858,7 +856,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.17"
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
3 changes: 2 additions & 1 deletion docs/instruments/loki/sans2d_reduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -547,7 +547,8 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
5 changes: 3 additions & 2 deletions docs/instruments/loki/sans2d_to_I_of_Q.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -687,7 +687,7 @@
"# Insert a copy of coords needed for conversion to Q.\n",
"# TODO: can this be avoided by copying the Q coords from the converted numerator?\n",
"for coord in ['position', 'sample_position', 'source_position']:\n",
" denominator.coords[coord] = sample.meta[coord]\n",
" denominator.coords[coord] = sample.coords[coord]\n",
"\n",
"denominator"
]
Expand Down Expand Up @@ -901,7 +901,8 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
3 changes: 2 additions & 1 deletion docs/techniques/sans/sans-beam-center-finder.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,8 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
25 changes: 12 additions & 13 deletions docs/techniques/wfm/introduction-to-wfm.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@
" ax.text(t_B, -height * z_det, r\"$t_{B}$\", ha=\"center\", va=\"top\", fontsize=8)\n",
"\n",
" z_wfm = sc.norm(\n",
" 0.5 * (chopper_wfm1[\"position\"].data + chopper_wfm2[\"position\"].data)\n",
" 0.5 * (chopper_wfm1[\"position\"] + chopper_wfm2[\"position\"])\n",
" ).value\n",
" xmin = ch.time_open(chopper_wfm1).values[0]\n",
" xmax = ch.time_closed(chopper_wfm1).values[0]\n",
Expand Down Expand Up @@ -369,7 +369,7 @@
" chopper_wfm1 = coords[\"chopper_wfm_1\"].value\n",
" chopper_wfm2 = coords[\"chopper_wfm_2\"].value\n",
" z_wfm = sc.norm(\n",
" 0.5 * (chopper_wfm1[\"position\"].data + chopper_wfm2[\"position\"].data)\n",
" 0.5 * (chopper_wfm1[\"position\"] + chopper_wfm2[\"position\"])\n",
" ).value\n",
" xmax = ch.time_closed(chopper_wfm1).values[0]\n",
" z_foc = 12.0\n",
Expand Down Expand Up @@ -403,7 +403,7 @@
"\n",
" xmid = (\n",
" 0.5\n",
" * ((frames[\"time_min\"] + frames[\"time_min\"] + frames[\"delta_time_min\"]).data)\n",
" * ((frames[\"time_min\"] + frames[\"time_min\"] + frames[\"delta_time_min\"]))\n",
" ).values[0]\n",
" ax.plot([xmid] * 2, [z_det, z_det + 0.5], lw=1, color='k')\n",
"\n",
Expand Down Expand Up @@ -432,9 +432,7 @@
" (\n",
" 0.5\n",
" * (\n",
" (\n",
" frames[\"time_max\"] + frames[\"time_max\"] - frames[\"delta_time_max\"]\n",
" ).data\n",
" frames[\"time_max\"] + frames[\"time_max\"] - frames[\"delta_time_max\"]\n",
" )\n",
" ).values,\n",
" z_det + 1.0,\n",
Expand Down Expand Up @@ -488,7 +486,7 @@
" chopper_wfm1 = coords[\"chopper_wfm_1\"].value\n",
" chopper_wfm2 = coords[\"chopper_wfm_2\"].value\n",
" z_wfm = sc.norm(\n",
" 0.5 * (chopper_wfm1[\"position\"].data + chopper_wfm2[\"position\"].data)\n",
" 0.5 * (chopper_wfm1[\"position\"] + chopper_wfm2[\"position\"])\n",
" ).value\n",
" z_det = sc.norm(ds.coords[\"position\"]).value\n",
"\n",
Expand Down Expand Up @@ -516,11 +514,11 @@
"\n",
" xmid_min = (\n",
" 0.5\n",
" * ((frames[\"time_min\"] + frames[\"time_min\"] + frames[\"delta_time_min\"]).data)\n",
" * ((frames[\"time_min\"] + frames[\"time_min\"] + frames[\"delta_time_min\"]))\n",
" ).values\n",
" xmid_max = (\n",
" 0.5\n",
" * ((frames[\"time_max\"] + frames[\"time_max\"] - frames[\"delta_time_max\"]).data)\n",
" * ((frames[\"time_max\"] + frames[\"time_max\"] - frames[\"delta_time_max\"]))\n",
" ).values\n",
"\n",
" ax.text(\n",
Expand Down Expand Up @@ -1046,12 +1044,12 @@
"source": [
"# Distance between WFM choppers\n",
"dz_wfm = sc.norm(\n",
" ds.coords[\"chopper_wfm_2\"].value[\"position\"].data\n",
" - ds.coords[\"chopper_wfm_1\"].value[\"position\"].data\n",
" ds.coords[\"chopper_wfm_2\"].value[\"position\"]\n",
" - ds.coords[\"chopper_wfm_1\"].value[\"position\"]\n",
")\n",
"# Delta_lambda / lambda\n",
"dlambda_over_lambda = dz_wfm / sc.norm(\n",
" coords['position'] - frames['wfm_chopper_mid_point'].data\n",
" coords['position'] - frames['wfm_chopper_mid_point']\n",
")\n",
"(1.5 * sc.units.angstrom) * dlambda_over_lambda"
]
Expand Down Expand Up @@ -1291,7 +1289,8 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
25 changes: 15 additions & 10 deletions docs/techniques/wfm/reducing-wfm-data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -335,16 +335,16 @@
" # Compute event arrival times at wfm choppers 1 and 2\n",
" slopes = 1.0 / (alpha * item[\"wavelengths\"])\n",
" intercepts = -slopes * item[\"birth_times\"]\n",
" times_at_wfm1 = (sc.norm(near_wfm_chopper[\"position\"].data) - intercepts) / slopes\n",
" times_at_wfm2 = (sc.norm(far_wfm_chopper[\"position\"].data) - intercepts) / slopes\n",
" times_at_wfm1 = (sc.norm(near_wfm_chopper[\"position\"]) - intercepts) / slopes\n",
" times_at_wfm2 = (sc.norm(far_wfm_chopper[\"position\"]) - intercepts) / slopes\n",
" # Create a mask to see if neutrons go through one of the openings\n",
" mask = sc.zeros(dims=times_at_wfm1.dims, shape=times_at_wfm1.shape, dtype=bool)\n",
" for i in range(len(frames[\"time_min\"])):\n",
" mask |= (\n",
" (times_at_wfm1 >= near_time_open[\"frame\", i])\n",
" & (times_at_wfm1 <= near_time_close[\"frame\", i])\n",
" & (item[\"wavelengths\"] >= frames[\"wavelength_min\"][\"frame\", i]).data\n",
" & (item[\"wavelengths\"] <= frames[\"wavelength_max\"][\"frame\", i]).data\n",
" & (item[\"wavelengths\"] >= frames[\"wavelength_min\"][\"frame\", i])\n",
" & (item[\"wavelengths\"] <= frames[\"wavelength_max\"][\"frame\", i])\n",
" )\n",
" item[\"valid_indices\"] = np.ravel(np.where(mask.values))"
]
Expand Down Expand Up @@ -387,6 +387,7 @@
")\n",
"\n",
"# Histogram the data\n",
"histogrammed_data = {}\n",
"for key, item in events.items():\n",
" da = sc.DataArray(\n",
" data=sc.ones(\n",
Expand All @@ -401,8 +402,9 @@
" )\n",
" },\n",
" )\n",
" ds[key] = da.hist(time=time_coord)\n",
" histogrammed_data[key] = da.hist(time=time_coord)\n",
"\n",
"ds = sc.Dataset(histogrammed_data, coords=ds.coords)\n",
"ds"
]
},
Expand Down Expand Up @@ -700,7 +702,7 @@
" unit=events_no_wfm[\"sample\"][\"arrival_times\"].unit,\n",
")\n",
"\n",
"ds_no_wfm = sc.Dataset(coords=coords)\n",
"data_no_wfm = {}\n",
"\n",
"# Histogram the data\n",
"for key, item in events_no_wfm.items():\n",
Expand All @@ -717,8 +719,9 @@
" )\n",
" },\n",
" )\n",
" ds_no_wfm[key] = da.hist(tof=time_coord_no_wfm)\n",
" data_no_wfm[key] = da.hist(tof=time_coord_no_wfm)\n",
"\n",
"ds_no_wfm = sc.Dataset(data_no_wfm, coords=coords)\n",
"ds_no_wfm"
]
},
Expand Down Expand Up @@ -875,7 +878,7 @@
" unit=events[\"sample\"][\"arrival_times\"].unit,\n",
")\n",
"\n",
"ds_event = sc.Dataset(coords=coords)\n",
"data_event = {}\n",
"\n",
"# Bin the data\n",
"for key, item in events.items():\n",
Expand All @@ -892,8 +895,9 @@
" )\n",
" },\n",
" )\n",
" ds_event[key] = da.bin(time=time_coord)\n",
" data_event[key] = da.bin(time=time_coord)\n",
"\n",
"ds_event = sc.Dataset(data_event, coords=coords)\n",
"ds_event"
]
},
Expand Down Expand Up @@ -1114,7 +1118,8 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
"pygments_lexer": "ipython3",
"version": "3.8.17"
}
},
"nbformat": 4,
Expand Down
32 changes: 32 additions & 0 deletions src/ess/_migration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)

from typing import MutableMapping

import scipp as sc


def get_attrs(da: sc.DataArray) -> MutableMapping[str, sc.Variable]:
try:
# During deprecation phase
return da.deprecated_attrs
except AttributeError:
try:
# Before deprecation phase
return da.attrs
except AttributeError:
# After deprecation phase / removal of attrs
return da.coords


def get_meta(da: sc.DataArray) -> MutableMapping[str, sc.Variable]:
try:
# During deprecation phase
return da.deprecated_meta
except AttributeError:
try:
# Before deprecation phase
return da.meta
except AttributeError:
# After deprecation phase / removal of attrs
return da.coords
3 changes: 1 addition & 2 deletions src/ess/amor/conversions.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@ def incident_beam(
instead of the source_position vector.
"""
chopper_midpoint = (
source_chopper_1.value['position'].data
+ source_chopper_2.value['position'].data
source_chopper_1.value['position'] + source_chopper_2.value['position']
) * sc.scalar(0.5)
return sample_position - chopper_midpoint

Expand Down
4 changes: 2 additions & 2 deletions src/ess/amor/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@ def _tof_correction(data: sc.DataArray, dim: str = 'tof') -> sc.DataArray:
data.attrs['orso'].value.reduction.corrections += ['chopper ToF correction']
tof_unit = data.bins.coords[dim].bins.unit
tau = sc.to_unit(
1 / (2 * data.coords['source_chopper_2'].value['frequency'].data),
1 / (2 * data.coords['source_chopper_2'].value['frequency']),
tof_unit,
)
chopper_phase = data.coords['source_chopper_2'].value['phase'].data
chopper_phase = data.coords['source_chopper_2'].value['phase']
tof_offset = tau * chopper_phase / (180.0 * sc.units.deg)
# Make 2 bins, one for each pulse
edges = sc.concat([-tof_offset, tau - tof_offset, 2 * tau - tof_offset], dim)
Expand Down
6 changes: 2 additions & 4 deletions src/ess/amor/resolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,9 @@ def wavelength_resolution(
The angular resolution variable, as standard deviation.
"""
distance_between_choppers = (
chopper_2_position.data.fields.z - chopper_1_position.data.fields.z
)
chopper_midpoint = (chopper_1_position.data + chopper_2_position.data) * sc.scalar(
0.5
chopper_2_position.fields.z - chopper_1_position.fields.z
)
chopper_midpoint = (chopper_1_position + chopper_2_position) * sc.scalar(0.5)
chopper_detector_distance = pixel_position.fields.z - chopper_midpoint.fields.z
return fwhm_to_std(distance_between_choppers / chopper_detector_distance)

Expand Down
21 changes: 10 additions & 11 deletions src/ess/choppers/make_chopper.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@ def make_chopper(
cutout_angles_begin: sc.Variable = None,
cutout_angles_end: sc.Variable = None,
kind: str = None,
) -> sc.Dataset:
) -> sc.DataGroup:
"""
Create a Dataset that holds chopper parameters.
This ensures the Dataset is compatible with the other functions in the choppers
Create a DataGroup that holds chopper parameters.
This ensures the DataGroup is compatible with the other functions in the choppers
module.
Defining a chopper's cutout angles can either constructed from an array of cutout
centers and an array of angular widths, or two arrays containing the begin (leading)
Expand All @@ -33,20 +33,19 @@ def make_chopper(
:param kind: The chopper kind. Any string can be supplied, but WFM choppers should
be given 'wfm' as their kind.
"""
data = {"frequency": frequency, "position": position}
chopper = sc.DataGroup({"frequency": frequency, "position": position})
if phase is not None:
data["phase"] = phase
chopper["phase"] = phase
if cutout_angles_center is not None:
data["cutout_angles_center"] = cutout_angles_center
chopper["cutout_angles_center"] = cutout_angles_center
if cutout_angles_width is not None:
data["cutout_angles_width"] = cutout_angles_width
chopper["cutout_angles_width"] = cutout_angles_width
if cutout_angles_begin is not None:
data["cutout_angles_begin"] = cutout_angles_begin
chopper["cutout_angles_begin"] = cutout_angles_begin
if cutout_angles_end is not None:
data["cutout_angles_end"] = cutout_angles_end
chopper["cutout_angles_end"] = cutout_angles_end
if kind is not None:
data["kind"] = kind
chopper = sc.Dataset(data=data)
chopper["kind"] = kind

# Sanitize input parameters
if (None not in [cutout_angles_begin, cutout_angles_end]) or (
Expand Down
Loading

0 comments on commit b034d63

Please sign in to comment.