Skip to content

Commit

Permalink
ENH(psoct.single_volume): more robust default key
Browse files Browse the repository at this point in the history
  • Loading branch information
balbasty committed Nov 22, 2024
1 parent 4f71ddd commit 2e3d12c
Showing 1 changed file with 24 additions and 16 deletions.
40 changes: 24 additions & 16 deletions linc_convert/modalities/psoct/single_volume.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from contextlib import contextmanager
from functools import wraps
from itertools import product
from typing import Any, Callable, Optional
from typing import Callable, Optional
from warnings import warn

import cyclopts
Expand All @@ -38,10 +38,10 @@


def _automap(func: Callable) -> Callable:
"""Decorator to automatically map the array in the mat file.""" # noqa: D401
"""Automatically map the array in the mat file."""

@wraps(func)
def wrapper(inp: str, out: str = None, **kwargs: dict) -> Any: # noqa: ANN401
def wrapper(inp: str, out: str = None, **kwargs: dict) -> None:
if out is None:
out = os.path.splitext(inp[0])[0]
out += ".nii.zarr" if kwargs.get("nii", False) else ".ome.zarr"
Expand All @@ -65,9 +65,12 @@ def _mapmat(fname: str, key: str = None) -> None:
if key is None:
if not len(f.keys()):
raise Exception(f"{fname} is empty")
key = list(f.keys())[0]
for key in f.keys():
if key[:1] != '_':
break
if len(f.keys()) > 1:
warn(f'More than one key in .mat file {fname}, arbitrarily loading "{key}"')
warn(f'More than one key in .mat file {fname}, '
f'arbitrarily loading "{key}"')

if key not in f.keys():
raise Exception(f"Key {key} not found in file {fname}")
Expand Down Expand Up @@ -153,9 +156,9 @@ def convert(
omz = zarr.group(store=omz, overwrite=True)

if not hasattr(inp, "dtype"):
raise Exception("Input is not a numpy array. This is likely unexpected")
raise Exception("Input is not a numpy array. This is unexpected.")
if len(inp.shape) < 3:
raise Exception("Input array is not 3d")
raise Exception("Input array is not 3d:", inp.shape)
# Prepare chunking options
opt = {
"dimension_separator": r"/",
Expand All @@ -171,7 +174,9 @@ def convert(
ni = ceildiv(inp.shape[2], inp_chunk[2])

nblevels = min(
[int(math.ceil(math.log2(x))) for i, x in enumerate(inp.shape) if i != no_pool]
[int(math.ceil(math.log2(x)))
for i, x in enumerate(inp.shape)
if i != no_pool]
)
nblevels = min(nblevels, int(math.ceil(math.log2(max_load))))
nblevels = min(nblevels, max_levels)
Expand All @@ -183,9 +188,9 @@ def convert(
# iterate across input chunks
for i, j, k in product(range(ni), range(nj), range(nk)):
loaded_chunk = inp[
k * inp_chunk[0] : (k + 1) * inp_chunk[0],
j * inp_chunk[1] : (j + 1) * inp_chunk[1],
i * inp_chunk[2] : (i + 1) * inp_chunk[2],
k * inp_chunk[0]: (k + 1) * inp_chunk[0],
j * inp_chunk[1]: (j + 1) * inp_chunk[1],
i * inp_chunk[2]: (i + 1) * inp_chunk[2],
]

print(
Expand All @@ -198,9 +203,9 @@ def convert(

# save current chunk
omz["0"][
k * inp_chunk[0] : k * inp_chunk[0] + loaded_chunk.shape[0],
j * inp_chunk[1] : j * inp_chunk[1] + loaded_chunk.shape[1],
i * inp_chunk[2] : i * inp_chunk[2] + loaded_chunk.shape[2],
k * inp_chunk[0]: k * inp_chunk[0] + loaded_chunk.shape[0],
j * inp_chunk[1]: j * inp_chunk[1] + loaded_chunk.shape[1],
i * inp_chunk[2]: i * inp_chunk[2] + loaded_chunk.shape[2],
] = loaded_chunk

generate_pyramid(omz, nblevels - 1, mode="mean")
Expand All @@ -217,7 +222,9 @@ def convert(
no_pool=no_pool,
space_unit=ome_unit,
space_scale=vx,
multiscales_type=("2x2x2" if no_pool is None else "2x2") + "mean window",
multiscales_type=(
("2x2x2" if no_pool is None else "2x2") + "mean window"
),
)

if not nii:
Expand All @@ -233,5 +240,6 @@ def convert(
if center:
affine = center_affine(affine, shape[:3])
niftizarr_write_header(
omz, shape, affine, omz["0"].dtype, to_nifti_unit(unit), nifti_version=2
omz, shape, affine, omz["0"].dtype, to_nifti_unit(unit),
nifti_version=2
)

0 comments on commit 2e3d12c

Please sign in to comment.