Skip to content

Commit

Permalink
account for difference in initial stacked array dimensionality when p…
Browse files Browse the repository at this point in the history
…arsing single-channel images; #422
  • Loading branch information
vreuter committed Jan 29, 2025
1 parent 3707dcf commit d10c3f2
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 12 deletions.
26 changes: 19 additions & 7 deletions looptrace/conversion_to_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
EMBL Heidelberg
"""

from collections import OrderedDict
import os
from pathlib import *
from typing import *
Expand All @@ -17,25 +18,36 @@
from looptrace import image_io, nd2io, ArrayDimensionalityError
from looptrace.ImageHandler import ImageHandler
from looptrace.integer_naming import get_fov_name_short
from looptrace.nd2io import CHANNEL_COUNT_KEY
from looptrace.voxel_stack import VoxelSize


def workflow(n_pos: int, input_folders: Iterable[Path], output_folder: Path) -> None:
for fov_index in tqdm.tqdm(range(int(n_pos))):
imgs = []
for f in input_folders:
folder_imgs, _, folder_metadata = nd2io.stack_nd2_to_dask(f, fov_index=fov_index)
folder_imgs, _, sample_file_metadata = nd2io.stack_nd2_to_dask(f, fov_index=fov_index)
imgs.append(folder_imgs[0])
imgs = da.concatenate(imgs, axis=0)
if len(imgs.shape) == 5:
chunks = (1, 1, 1, imgs.shape[-2], imgs.shape[-1]), # 1 chunk per xy-plane (z-slice)

num_channels: int = sample_file_metadata[CHANNEL_COUNT_KEY]
exp_num_dim: int
if num_channels == 1:
exp_num_dim = 4
imgs = imgs.reshape((imgs.shape[0], 1) + imgs.shape[1:]) # Create channel axis.
elif num_channels > 1:
exp_num_dim = 5
else:
raise RuntimeError(f"Channel count isn't positive: {num_channels}")
if len(imgs.shape) != exp_num_dim:
raise ArrayDimensionalityError(
f"Expected a 5D array to write to ZARR, but got {len(imgs.shape)}D; shape: {imgs.shape}"
f"Expected a {exp_num_dim}-D array to write to ZARR, but got {len(imgs.shape)}-D; shape: {imgs.shape}"
)
voxel_size: VoxelSize = folder_metadata["voxel_size"]

voxel_size: VoxelSize = sample_file_metadata["voxel_size"]
chunks = (1, 1, 1, imgs.shape[-2], imgs.shape[-1]), # 1 chunk per xy-plane (z-slice)

# TODO: why is it justified to use just the last folder_metadata value (associated with a
# TODO: why is it justified to use just the last sample_file_metadata value (associated with a
# single f in input_folders) in a function call where the concatenation of values from
# all input_folders is being passed to .zarr creation?
# See: https://github.com/gerlichlab/looptrace/issues/118
Expand All @@ -46,7 +58,7 @@ def workflow(n_pos: int, input_folders: Iterable[Path], output_folder: Path) ->
shape = imgs.shape,
dtype = np.uint16,
chunks = chunks,
metadata = folder_metadata,
metadata = sample_file_metadata,
voxel_size = voxel_size,
)
n_t = imgs.shape[0]
Expand Down
8 changes: 5 additions & 3 deletions looptrace/nd2io.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@
]


_AXIS_SIZES_KEY = "axis_sizes"
AXIS_SIZES_KEY = "axis_sizes"
CHANNEL_COUNT_KEY = "channelCount"


class EmptyImagesError(Exception):
Expand Down Expand Up @@ -77,7 +78,8 @@ def parse_nd2_metadata(image_file: str) -> Mapping[str, Any]:
metadata = {}
with nd2.ND2File(image_file) as sample:
metadata["voxel_size"] = parse_voxel_size(sample)
metadata[_AXIS_SIZES_KEY] = sample.sizes
metadata[AXIS_SIZES_KEY] = sample.sizes
metadata[CHANNEL_COUNT_KEY] = getattr(sample.attributes, CHANNEL_COUNT_KEY)
microscope = sample.metadata.channels[0].microscope
metadata['microscope'] = {
'objectiveMagnification': microscope.objectiveMagnification,
Expand Down Expand Up @@ -169,7 +171,7 @@ def _shift_axes_of_stacked_array_from_nd2(
arr: da.Array,
metadata: Mapping[str, Any],
) -> Result[da.Array, str]:
match list(metadata[_AXIS_SIZES_KEY].keys()):
match list(metadata[AXIS_SIZES_KEY].keys()):
case ["Z", "C", "Y", "X"]:
return Result.Ok(da.moveaxis(arr, -4, -3))
case ["Z", "Y", "X"]:
Expand Down
4 changes: 2 additions & 2 deletions tests/test__stack_nd2_to_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
__email__ = "[email protected]"

from looptrace.nd2io import *
from looptrace.nd2io import _AXIS_SIZES_KEY
from looptrace.nd2io import AXIS_SIZES_KEY
from looptrace.integer_naming import get_fov_names_N

POSITION_PREFIX = "Point000"
Expand Down Expand Up @@ -179,7 +179,7 @@ def test_underscore_prefixed_and_or_non_nd2_files_are_skipped_and_good_ones_have
unique_fields_of_view.add(p)
# Patch the metadata parser to be a no-op, the ND2 reader to be context manager-like,
# and dask call to be identity.
with mock.patch("looptrace.nd2io.parse_nd2_metadata", return_value={_AXIS_SIZES_KEY: OrderedDict((dim, 0) for dim in ["Z", "C", "Y", "X"])}), \
with mock.patch("looptrace.nd2io.parse_nd2_metadata", return_value={AXIS_SIZES_KEY: OrderedDict((dim, 0) for dim in ["Z", "C", "Y", "X"])}), \
mock.patch("looptrace.nd2io.nd2.ND2File", side_effect=lambda *_, **__: mocked_nd2_handle()) as mock_nd2_read, \
mock.patch("looptrace.nd2io.da.stack", side_effect=lambda arrs: arrs), \
mock.patch("looptrace.nd2io.da.moveaxis", side_effect=lambda _1, _2, _3: mock.Mock(shape=None)):
Expand Down

0 comments on commit d10c3f2

Please sign in to comment.