diff --git a/CHANGELOG.md b/CHANGELOG.md index b9f64da86..d06dfaf5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,6 @@ This release includes work on Pydantic models for NGFF specs and on ROI tables. * Extract `num_levels` and `coarsening_xy` parameters from NGFF objects, rather than from `metadata` task input (\#528). * Transform several `lib_zattrs_utils.py` functions (`get_axes_names`, `extract_zyx_pixel_sizes` and `get_acquisition_paths`) into `lib_ngff.py` methods (\#528). * Load Zarr attributes from groups, rather than from `.zattrs` files (\#528). - * Regions of interest: * Set `FOV_ROI_table` and `well_ROI_table` ZYX origin to zero (\#524). * Remove heuristics to determine whether to reset origin, in `cellpose_segmentation` task (\#524). @@ -20,6 +19,8 @@ This release includes work on Pydantic models for NGFF specs and on ROI tables. * Fix bug in creation of bounding-box ROIs when `cellpose_segmentation` loops of FOVs (\#524). * Update type of `metadata` parameter of `prepare_FOV_ROI_table` and `prepare_well_ROI_table` functions (\#524). * Fix `reset_origin` so that it returns an updated copy of its input (\#524). +* Dependency: + * Relax `fsspec<=2023.6` constraint into `fsspec!=2023.9.0` (\#536). # 0.11.0 diff --git a/fractal_tasks_core/__FRACTAL_MANIFEST__.json b/fractal_tasks_core/__FRACTAL_MANIFEST__.json index 63190fb8e..d3f96380b 100644 --- a/fractal_tasks_core/__FRACTAL_MANIFEST__.json +++ b/fractal_tasks_core/__FRACTAL_MANIFEST__.json @@ -25,7 +25,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "This parameter is not used by this task (standard argument for Fractal tasks, managed by Fractal server)." + "description": "This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server)." }, "allowed_channels": { "title": "Allowed Channels", @@ -331,7 +331,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "Dictionary containing metadata about the OME-Zarr. This task requires the following elements to be present in the metadata. `num_levels (int)`: number of pyramid levels in the image (this determines how many pyramid levels are built for the segmentation); `coarsening_xy (int)`: coarsening factor in XY of the downsampling when building the pyramid `plate`: List of plates (e.g. `[\"MyPlate.zarr\"]`); `well`: List of wells in the OME-Zarr plate (e.g. `[\"MyPlate.zarr/B/03\", \"MyPlate.zarr/B/05\"]`); `image: List of images in the OME-Zarr plate (e.g. `[\"MyPlate.zarr/B/03/0\", \"MyPlate.zarr/B/05/0\"]`). (standard argument for Fractal tasks, managed by Fractal server)." + "description": "Dictionary containing metadata about the OME-Zarr. This task requires the key `copy_ome_zarr` to be present in the metadata (as defined in `copy_ome_zarr` task). (standard argument for Fractal tasks, managed by Fractal server)." }, "overwrite": { "title": "Overwrite", @@ -386,7 +386,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "dictionary containing metadata about the OME-Zarr. This task requires the following elements to be present in the metadata. `num_levels (int)`: number of pyramid levels in the image; this determines how many pyramid levels are built for the segmentation. `coarsening_xy (int)`: coarsening factor in XY of the downsampling when building the pyramid. (standard argument for Fractal tasks, managed by Fractal server)." + "description": "This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server)." }, "level": { "title": "Level", @@ -564,7 +564,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "Dictionary containing metadata about the OME-Zarr. This task requires the following elements to be present in the metadata. `num_levels (int)`: number of pyramid levels in the image (this determines how many pyramid levels are built for the segmentation), `coarsening_xy (int)`: coarsening factor in XY of the downsampling when building the pyramid. (standard argument for Fractal tasks, managed by Fractal server)." + "description": "This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server)." }, "illumination_profiles_folder": { "title": "Illumination Profiles Folder", @@ -645,7 +645,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "Dictionary containing metadata about the OME-Zarr. This task requires the following elements to be present in the metadata. `num_levels (int)`: number of pyramid levels in the image (this determines how many pyramid levels are built for the segmentation); `coarsening_xy (int)`: coarsening factor in XY of the downsampling when building the pyramid. (standard argument for Fractal tasks, managed by Fractal server)." + "description": "This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server)." }, "workflow_file": { "title": "Workflow File", @@ -1014,7 +1014,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "Dictionary containing metadata about the OME-Zarr. This task requires the following elements to be present in the metadata. `coarsening_xy (int)`: coarsening factor in XY of the downsampling when building the pyramid. (standard argument for Fractal tasks, managed by Fractal server)." + "description": "This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server)." }, "wavelength_id": { "title": "Wavelength Id", @@ -1087,7 +1087,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "Dictionary containing metadata about the OME-Zarr. This task does not use the metadata." + "description": "This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server)." }, "roi_table": { "title": "Roi Table", @@ -1153,7 +1153,7 @@ "metadata": { "title": "Metadata", "type": "object", - "description": "Dictionary containing metadata about the OME-Zarr. This task requires the following elements to be present in the metadata. `coarsening_xy (int)`: coarsening factor in XY of the downsampling when building the pyramid. (standard argument for Fractal tasks, managed by Fractal server). `num_levels (int)`: number of pyramid levels in the image; this determines how many pyramid levels are built for the segmentation." + "description": "This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server)." }, "registered_roi_table": { "title": "Registered Roi Table", diff --git a/fractal_tasks_core/lib_ngff.py b/fractal_tasks_core/lib_ngff.py new file mode 100644 index 000000000..d5308b703 --- /dev/null +++ b/fractal_tasks_core/lib_ngff.py @@ -0,0 +1,412 @@ +""" +Pydantic models related to OME-NGFF 0.4 specs. +""" +import logging +from typing import Literal +from typing import Optional +from typing import Union + +import zarr +from pydantic import BaseModel +from pydantic import Field +from pydantic import validator + + +class Window(BaseModel): + """ + Model for `Channel.window`. + + Note that we deviate by NGFF specs by making `start` and `end` optional. + See https://ngff.openmicroscopy.org/0.4/#omero-md. + """ + + max: float + min: float + start: Optional[float] = None + end: Optional[float] = None + + +class Channel(BaseModel): + """ + Model for an element of `Omero.channels`. + + See https://ngff.openmicroscopy.org/0.4/#omero-md. + """ + + window: Window + label: Optional[str] = None + family: Optional[str] = None + color: str + active: Optional[bool] = None + + +class Omero(BaseModel): + """ + Model for `NgffImageMeta.omero`. + + See https://ngff.openmicroscopy.org/0.4/#omero-md. + """ + + channels: list[Channel] + + +class Axis(BaseModel): + """ + Model for an element of `Multiscale.axes`. + + See https://ngff.openmicroscopy.org/0.4/#axes-md. + """ + + name: str + type: Optional[str] = None + + +class ScaleCoordinateTransformation(BaseModel): + """ + Model for a scale transformation. + + This corresponds to scale-type elements of + `Dataset.coordinateTransformations` or + `Multiscale.coordinateTransformations`. + See https://ngff.openmicroscopy.org/0.4/#trafo-md + """ + + type: Literal["scale"] + scale: list[float] = Field(..., min_items=2) + + +class TranslationCoordinateTransformation(BaseModel): + """ + Model for a translation transformation. + + This corresponds to translation-type elements of + `Dataset.coordinateTransformations` or + `Multiscale.coordinateTransformations`. + See https://ngff.openmicroscopy.org/0.4/#trafo-md + """ + + type: Literal["translation"] + translation: list[float] = Field(..., min_items=2) + + +class Dataset(BaseModel): + """ + Model for an element of `Multiscale.datasets`. + + See https://ngff.openmicroscopy.org/0.4/#multiscale-md + """ + + path: str + coordinateTransformations: list[ + Union[ + ScaleCoordinateTransformation, TranslationCoordinateTransformation + ] + ] = Field(..., min_items=1) + + @property + def scale_transformation(self) -> ScaleCoordinateTransformation: + """ + Extract the unique scale transformation, or fail otherwise. + """ + _transformations = [ + t for t in self.coordinateTransformations if t.type == "scale" + ] + if len(_transformations) == 0: + raise ValueError( + "Missing scale transformation in dataset.\n" + "Current coordinateTransformations:\n" + f"{self.coordinateTransformations}" + ) + elif len(_transformations) > 1: + raise ValueError( + "More than one scale transformation in dataset.\n" + "Current coordinateTransformations:\n" + f"{self.coordinateTransformations}" + ) + else: + return _transformations[0] + + +class Multiscale(BaseModel): + """ + Model for an elemeng of `NgffImageMeta.multiscales`. + + See https://ngff.openmicroscopy.org/0.4/#multiscale-md. + """ + + name: Optional[str] = None + datasets: list[Dataset] = Field(..., min_items=1) + version: Optional[str] = None + axes: list[Axis] = Field(..., max_items=5, min_items=2, unique_items=True) + coordinateTransformations: Optional[ + list[ + Union[ + ScaleCoordinateTransformation, + TranslationCoordinateTransformation, + ] + ] + ] = None + + @validator("coordinateTransformations", always=True) + def _no_global_coordinateTransformations(cls, v): + """ + Fail if Multiscale has a (global) coordinateTransformations attribute. + """ + if v is not None: + raise NotImplementedError( + "Global coordinateTransformations at the multiscales " + "level are not currently supported." + ) + + +class NgffImageMeta(BaseModel): + """ + Model for the metadata of a NGFF image. + + See https://ngff.openmicroscopy.org/0.4/#image-layout. + """ + + multiscales: list[Multiscale] = Field( + ..., + description="The multiscale datasets for this image", + min_items=1, + unique_items=True, + ) + omero: Optional[Omero] = None + + @property + def multiscale(self) -> Multiscale: + """ + The single element of `self.multiscales`. + + Raises: + NotImplementedError: + If there are no multiscales or more than one. + """ + if len(self.multiscales) > 1: + raise NotImplementedError( + "Only images with one multiscale are supported " + f"(given: {len(self.multiscales)}" + ) + return self.multiscales[0] + + @property + def datasets(self) -> list[Dataset]: + """ + The `datasets` attribute of `self.multiscale`. + """ + return self.multiscale.datasets + + @property + def num_levels(self) -> int: + return len(self.datasets) + + @property + def axes_names(self) -> list[str]: + """ + List of axes names. + """ + return [ax.name for ax in self.multiscale.axes] + + @property + def pixel_sizes_zyx(self) -> list[list[float]]: + """ + Pixel sizes extracted from scale transformations of datasets. + + Raises: + ValueError: + If pixel sizes are below a given threshold (1e-9). + """ + x_index = self.axes_names.index("x") + y_index = self.axes_names.index("y") + try: + z_index = self.axes_names.index("z") + except ValueError: + z_index = None + logging.warning( + f"Z axis is not present (axes: {self.axes_names}), and Z pixel" + " size is set to 1. This may work, by accident, but it is " + "not fully supported." + ) + _pixel_sizes_zyx = [] + for level in range(self.num_levels): + scale = self.datasets[level].scale_transformation.scale + pixel_size_x = scale[x_index] + pixel_size_y = scale[y_index] + if z_index is not None: + pixel_size_z = scale[z_index] + else: + pixel_size_z = 1.0 + _pixel_sizes_zyx.append([pixel_size_z, pixel_size_y, pixel_size_x]) + if min(_pixel_sizes_zyx[-1]) < 1e-9: + raise ValueError( + f"Pixel sizes at level {level} are too small: " + f"{_pixel_sizes_zyx[-1]}" + ) + + return _pixel_sizes_zyx + + def get_pixel_sizes_zyx(self, *, level: int = 0) -> list[float]: + return self.pixel_sizes_zyx[level] + + @property + def coarsening_xy(self) -> int: + """ + Linear coarsening factor in the YX plane. + + We only support coarsening factors that are homogeneous (both in the + X/Y directions and across pyramid levels). + + Raises: + NotImplementedError: + If coarsening ratios are not homogeneous. + """ + current_ratio = None + for ind in range(1, self.num_levels): + ratio_x = round( + self.pixel_sizes_zyx[ind][2] / self.pixel_sizes_zyx[ind - 1][2] + ) + ratio_y = round( + self.pixel_sizes_zyx[ind][1] / self.pixel_sizes_zyx[ind - 1][1] + ) + if ratio_x != ratio_y: + raise NotImplementedError( + "Inhomogeneous coarsening in X/Y directions " + "is not supported.\n" + f"ZYX pixel sizes:\n {self.pixel_sizes_zyx}" + ) + if current_ratio is None: + current_ratio = ratio_x + else: + if current_ratio != ratio_x: + raise NotImplementedError( + "Inhomogeneous coarsening across levels " + "is not supported.\n" + f"ZYX pixel sizes:\n {self.pixel_sizes_zyx}" + ) + + return current_ratio + + +class Image(BaseModel): + """ + Model for an element of `Well.images`. + + See https://ngff.openmicroscopy.org/0.4/#well-md. + + NOTE: we deviate from NGFF specs, since we allow `path` to be an arbitrary + string. + TODO: include a check like `constr(regex=r'^[A-Za-z0-9]+$')`, through a + Pydantic validator. + """ + + acquisition: Optional[int] = Field( + None, description="A unique identifier within the context of the plate" + ) + path: str = Field( + ..., description="The path for this field of view subgroup" + ) + + +class Well(BaseModel): + """ + Model for `NgffWellMeta.well`. + + See https://ngff.openmicroscopy.org/0.4/#well-md. + """ + + images: list[Image] = Field( + ..., + description="The images included in this well", + min_items=1, + unique_items=True, + ) + version: Optional[str] = Field( + None, description="The version of the specification" + ) + + +class NgffWellMeta(BaseModel): + """ + Model for the metadata of a NGFF well. + + See https://ngff.openmicroscopy.org/0.4/#well-md. + """ + + well: Optional[Well] = None + + def get_acquisition_paths(self) -> dict[int, str]: + """ + Create mapping from acquisition indices to corresponding paths. + + Runs on the well zarr attributes and loads the relative paths in the + well. + + Returns: + Dictionary with `(acquisition index: image path)` key/value pairs. + + Raises: + ValueError: + If an element of `self.well.images` has no `acquisition` + attribute. + NotImplementedError: + If acquisitions are not unique. + """ + acquisition_dict = {} + for image in self.well.images: + if image.acquisition is None: + raise ValueError( + "Cannot get acquisition paths for Zarr files without " + "'acquisition' metadata at the well level" + ) + if image.acquisition in acquisition_dict: + raise NotImplementedError( + "This task is not implemented for wells with multiple " + "images of the same acquisition" + ) + acquisition_dict[image.acquisition] = image.path + return acquisition_dict + + +def load_NgffImageMeta(zarr_path: str) -> NgffImageMeta: + """ + Load the attributes of a zarr group and cast them to `NgffImageMeta`. + + Args: + zarr_path: Path to the zarr group. + + Returns: + A new `NgffImageMeta` object. + """ + zarr_group = zarr.open_group(zarr_path, mode="r") + zarr_attrs = zarr_group.attrs.asdict() + try: + return NgffImageMeta(**zarr_attrs) + except Exception as e: + logging.error( + f"Contents of {zarr_path} cannot be cast to NgffImageMeta.\n" + f"Original error:\n{str(e)}" + ) + raise e + + +def load_NgffWellMeta(zarr_path: str) -> NgffWellMeta: + """ + Load the attributes of a zarr group and cast them to `NgffWellMeta`. + + Args: + zarr_path: Path to the zarr group. + + Returns: + A new `NgffWellMeta` object. + """ + zarr_group = zarr.open_group(zarr_path, mode="r") + zarr_attrs = zarr_group.attrs.asdict() + try: + return NgffWellMeta(**zarr_attrs) + except Exception as e: + logging.error( + f"Contents of {zarr_path} cannot be cast to NgffWellMeta.\n" + f"Original error:\n{str(e)}" + ) + raise e diff --git a/fractal_tasks_core/lib_zattrs_utils.py b/fractal_tasks_core/lib_zattrs_utils.py index f32dc13a9..28a5751a4 100644 --- a/fractal_tasks_core/lib_zattrs_utils.py +++ b/fractal_tasks_core/lib_zattrs_utils.py @@ -10,86 +10,16 @@ # Institute for Biomedical Research and Pelkmans Lab from the University of # Zurich. """ -Functions to handle `.zattrs` files and their contents. +Helper functions for operations on OME-NGFF metadata. """ -import json import logging from pathlib import Path from typing import Any +import zarr -logger = logging.getLogger(__name__) - - -def extract_zyx_pixel_sizes(zattrs_path: str, level: int = 0) -> list[float]: - """ - Load multiscales/datasets from `.zattrs` file and read the pixel sizes for - a given resoluion level. - - Args: - zattrs_path: Path to `.zattrs` file. - level: Resolution level for which the pixe sizes are required. - - Returns: - ZYX pixel sizes. - """ - with open(zattrs_path, "r") as jsonfile: - zattrs = json.load(jsonfile) - - try: - - # Identify multiscales - multiscales = zattrs["multiscales"] - - # Check that there is a single multiscale - if len(multiscales) > 1: - raise ValueError( - f"ERROR: There are {len(multiscales)} multiscales" - ) - - # Check that Z axis is present, raise a warning otherwise - axes = [ax["name"] for ax in multiscales[0]["axes"]] - if "z" not in axes: - logger.warning( - f"Z axis is not present in {axes=}. This case may work " - "by accident, but it is not fully supported." - ) - - # Check that there are no datasets-global transformations - if "coordinateTransformations" in multiscales[0].keys(): - raise NotImplementedError( - "Global coordinateTransformations at the multiscales " - "level are not currently supported" - ) - - # Identify all datasets (AKA pyramid levels) - datasets = multiscales[0]["datasets"] - - # Select highest-resolution dataset - transformations = datasets[level]["coordinateTransformations"] - for t in transformations: - if t["type"] == "scale": - # FIXME: Using [-3:] indices is a hack to deal with the fact - # that the coordinationTransformation can contain additional - # entries (e.g. scaling for the channels) - # https://github.com/fractal-analytics-platform/fractal-tasks-core/issues/420 - pixel_sizes = t["scale"][-3:] - if min(pixel_sizes) < 1e-9: - raise ValueError( - f"pixel_sizes in {zattrs_path} are {pixel_sizes}" - ) - return pixel_sizes - - raise ValueError( - f"No scale transformation found for level {level} in {zattrs_path}" - ) - - except KeyError as e: - raise KeyError( - f"extract_zyx_pixel_sizes_from_zattrs failed, for {zattrs_path}\n", - e, - ) +logger = logging.getLogger(__name__) def rescale_datasets( @@ -143,39 +73,11 @@ def rescale_datasets( return new_datasets -def get_acquisition_paths(zattrs: dict) -> dict[int, str]: - """ - Create mapping from acquisition indices to corresponding paths. - - Runs on the well .zattrs content and loads the relative paths in the well. - - Args: - zattrs: - Attributes of a well zarr group. - - Returns: - Dictionary with `(acquisition index: image path)` key/value pairs. - """ - acquisition_dict = {} - for image in zattrs["well"]["images"]: - if "acquisition" not in image: - raise ValueError( - "Cannot get acquisition paths for Zarr files without " - "'acquisition' metadata at the well level" - ) - if image["acquisition"] in acquisition_dict: - raise NotImplementedError( - "This task is not implemented for wells with multiple images " - "of the same acquisition" - ) - acquisition_dict[image["acquisition"]] = image["path"] - return acquisition_dict - - def get_table_path_dict(input_path: Path, component: str) -> dict[str, str]: """ Compile dictionary of (table name, table path) key/value pairs. + Args: input_path: Path to the parent folder of a plate zarr group (e.g. @@ -185,13 +87,15 @@ def get_table_path_dict(input_path: Path, component: str) -> dict[str, str]: `plate.zarr/B/03/0`). Returns: - Dictionary with table names as keys and table paths as values. + Dictionary with table names as keys and table paths as values. If + `tables` Zarr group is missing, or if it does not have a `tables` + key, then return an empty dictionary. """ try: - with open(f"{input_path / component}/tables/.zattrs", "r") as f_zattrs: - table_list = json.load(f_zattrs)["tables"] - except FileNotFoundError: + tables_group = zarr.open_group(f"{input_path / component}/tables", "r") + table_list = tables_group.attrs["tables"] + except (zarr.errors.GroupNotFoundError, KeyError): table_list = [] table_path_dict = {} @@ -199,30 +103,3 @@ def get_table_path_dict(input_path: Path, component: str) -> dict[str, str]: table_path_dict[table] = f"{input_path / component}/tables/{table}" return table_path_dict - - -def get_axes_names(attrs: dict) -> list: - """ - Get the axes names of a .zattrs dictionary - - .zattrs dicts usually contain their axes in the multiscales metadata. - This function returns a list of the axes names in the order they appeared - in the metadata. - - Args: - attrs: The .zattrs group of an OME-Zarr image as a dict - - Returns: - List of access names - """ - try: - axes = attrs["multiscales"][0]["axes"] - except (KeyError, TypeError) as e: - raise ValueError( - f"{attrs=} does not contain the necessary information to get " - f"axes, raising an exception {e=}" - ) - names = [] - for ax in axes: - names.append(ax["name"]) - return names diff --git a/fractal_tasks_core/tasks/apply_registration_to_ROI_tables.py b/fractal_tasks_core/tasks/apply_registration_to_ROI_tables.py index 68bacf3d6..a397889ee 100644 --- a/fractal_tasks_core/tasks/apply_registration_to_ROI_tables.py +++ b/fractal_tasks_core/tasks/apply_registration_to_ROI_tables.py @@ -13,7 +13,6 @@ Applies the multiplexing translation to all ROI tables """ import copy -import json import logging from typing import Any from typing import Optional @@ -26,10 +25,10 @@ from anndata._io.specs import write_elem from pydantic.decorator import validate_arguments +from fractal_tasks_core.lib_ngff import load_NgffWellMeta from fractal_tasks_core.lib_regions_of_interest import ( are_ROI_table_columns_valid, ) -from fractal_tasks_core.lib_zattrs_utils import get_acquisition_paths logger = logging.getLogger(__name__) @@ -66,8 +65,8 @@ def apply_registration_to_ROI_tables( component: Path to the OME-Zarr image in the OME-Zarr plate that is processed. Example: `"some_plate.zarr/B/03/0"`. (standard argument for Fractal tasks, managed by Fractal server). - metadata: Dictionary containing metadata about the OME-Zarr. This task - does not use the metadata. + metadata: This parameter is not used by this task. + (standard argument for Fractal tasks, managed by Fractal server). roi_table: Name of the ROI table over which the task loops to calculate the registration. Examples: `FOV_ROI_table` => loop over the field of views, `well_ROI_table` => process the whole well as @@ -88,10 +87,8 @@ def apply_registration_to_ROI_tables( ) well_zarr = f"{input_paths[0]}/{component}" - with open(f"{well_zarr}/.zattrs", "r") as jsonfile: - zattrs = json.load(jsonfile) - - acquisition_dict = get_acquisition_paths(zattrs) + ngff_well_meta = load_NgffWellMeta(well_zarr) + acquisition_dict = ngff_well_meta.get_acquisition_paths() logger.info( "Calculating common registration for the following cycles: " f"{acquisition_dict}" diff --git a/fractal_tasks_core/tasks/apply_registration_to_image.py b/fractal_tasks_core/tasks/apply_registration_to_image.py index 3a0385efd..e19d945f7 100644 --- a/fractal_tasks_core/tasks/apply_registration_to_image.py +++ b/fractal_tasks_core/tasks/apply_registration_to_image.py @@ -11,7 +11,6 @@ """ Calculates translation for 2D image-based registration """ -import json import logging import os import shutil @@ -28,6 +27,7 @@ from anndata._io.specs import write_elem from pydantic.decorator import validate_arguments +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_pyramid_creation import build_pyramid from fractal_tasks_core.lib_regions_of_interest import ( convert_indices_to_regions, @@ -37,8 +37,6 @@ ) from fractal_tasks_core.lib_regions_of_interest import is_standard_roi_table from fractal_tasks_core.lib_regions_of_interest import load_region -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes -from fractal_tasks_core.lib_zattrs_utils import get_axes_names from fractal_tasks_core.lib_zattrs_utils import get_table_path_dict logger = logging.getLogger(__name__) @@ -86,13 +84,8 @@ def apply_registration_to_image( component: Path to the OME-Zarr image in the OME-Zarr plate that is processed. Example: `"some_plate.zarr/B/03/0"`. (standard argument for Fractal tasks, managed by Fractal server). - metadata: Dictionary containing metadata about the OME-Zarr. This task - requires the following elements to be present in the metadata. - `coarsening_xy (int)`: coarsening factor in XY of the downsampling - when building the pyramid. (standard argument for Fractal tasks, - managed by Fractal server). - `num_levels (int)`: number of pyramid levels in the image; this - determines how many pyramid levels are built for the segmentation. + metadata: This parameter is not used by this task. + (standard argument for Fractal tasks, managed by Fractal server). registered_roi_table: Name of the ROI table which has been registered and will be applied to mask and shift the images. Examples: `registered_FOV_ROI_table` => loop over the field of @@ -116,8 +109,7 @@ def apply_registration_to_image( f"{component=}, {registered_roi_table=} and {reference_cycle=}. " f"Using {overwrite_input=}" ) - coarsening_xy = metadata["coarsening_xy"] - num_levels = metadata["num_levels"] + input_path = Path(input_paths[0]) new_component = "/".join( component.split("/")[:-1] + [component.split("/")[-1] + "_registered"] @@ -133,6 +125,10 @@ def apply_registration_to_image( f"{input_path / component}/tables/{registered_roi_table}" ) + ngff_image_meta = load_NgffImageMeta(str(input_path / component)) + coarsening_xy = ngff_image_meta.coarsening_xy + num_levels = ngff_image_meta.num_levels + #################### # Process images #################### @@ -152,9 +148,9 @@ def apply_registration_to_image( # Process labels #################### try: - with open(f"{input_path / component}/labels/.zattrs", "r") as f_zattrs: - label_list = json.load(f_zattrs)["labels"] - except FileNotFoundError: + labels_group = zarr.open_group(f"{input_path / component}/labels", "r") + label_list = labels_group.attrs["labels"] + except (zarr.errors.GroupNotFoundError, KeyError): label_list = [] if label_list: @@ -285,10 +281,9 @@ def write_registered_zarr( of `build_pyramid`). """ - # Read pixel sizes from zattrs file - pxl_sizes_zyx = extract_zyx_pixel_sizes( - f"{str(input_path / component)}/.zattrs", level=0 - ) + # Read pixel sizes from Zarr attributes + ngff_image_meta = load_NgffImageMeta(str(input_path / component)) + pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) # Create list of indices for 3D ROIs list_indices = convert_ROI_table_to_indices( @@ -305,6 +300,7 @@ def write_registered_zarr( ) old_image_group = zarr.open_group(f"{input_path / component}", mode="r") + old_ngff_image_meta = load_NgffImageMeta(str(input_path / component)) new_image_group = zarr.group(f"{input_path / new_component}") new_image_group.attrs.put(old_image_group.attrs.asdict()) @@ -322,7 +318,7 @@ def write_registered_zarr( reference_region = convert_indices_to_regions(list_indices_ref[i]) region = convert_indices_to_regions(roi_indices) - axes_list = get_axes_names(old_image_group.attrs.asdict()) + axes_list = old_ngff_image_meta.axes_names if axes_list == ["c", "z", "y", "x"]: num_channels = data_array.shape[0] diff --git a/fractal_tasks_core/tasks/calculate_registration_image_based.py b/fractal_tasks_core/tasks/calculate_registration_image_based.py index 52573e6bb..b09d06783 100644 --- a/fractal_tasks_core/tasks/calculate_registration_image_based.py +++ b/fractal_tasks_core/tasks/calculate_registration_image_based.py @@ -28,6 +28,7 @@ from fractal_tasks_core.lib_channels import get_channel_from_image_zarr from fractal_tasks_core.lib_channels import OmeroChannel +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_regions_of_interest import check_valid_ROI_indices from fractal_tasks_core.lib_regions_of_interest import ( convert_indices_to_regions, @@ -36,7 +37,6 @@ convert_ROI_table_to_indices, ) from fractal_tasks_core.lib_regions_of_interest import load_region -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes logger = logging.getLogger(__name__) @@ -77,11 +77,8 @@ def calculate_registration_image_based( component: Path to the OME-Zarr image in the OME-Zarr plate that is processed. Example: `"some_plate.zarr/B/03/0"`. (standard argument for Fractal tasks, managed by Fractal server). - metadata: Dictionary containing metadata about the OME-Zarr. This task - requires the following elements to be present in the metadata. - `coarsening_xy (int)`: coarsening factor in XY of the downsampling - when building the pyramid. (standard argument for Fractal tasks, - managed by Fractal server). + metadata: This parameter is not used by this task. + (standard argument for Fractal tasks, managed by Fractal server). wavelength_id: Wavelength that will be used for image-based registration; e.g. `A01_C01` for Yokogawa, `C01` for MD. roi_table: Name of the ROI table over which the task loops to @@ -122,8 +119,9 @@ def calculate_registration_image_based( zarr_img_ref_cycle = zarr_img_cycle_x.parent / str(reference_cycle) - # Read some parameters from metadata - coarsening_xy = metadata["coarsening_xy"] + # Read some parameters from Zarr metadata + ngff_image_meta = load_NgffImageMeta(str(zarr_img_ref_cycle)) + coarsening_xy = ngff_image_meta.coarsening_xy # Get channel_index via wavelength_id. # Intially only allow registration of the same wavelength @@ -168,12 +166,11 @@ def calculate_registration_image_based( # If we relax this, downstream assumptions on matching based on order # in the list will break. - # Read pixel sizes from zattrs file for full_res - pxl_sizes_zyx = extract_zyx_pixel_sizes( - f"{zarr_img_ref_cycle}/.zattrs", level=0 - ) - pxl_sizes_zyx_cycle_x = extract_zyx_pixel_sizes( - f"{zarr_img_cycle_x}/.zattrs", level=0 + # Read pixel sizes from zarr attributes + ngff_image_meta_cycle_x = load_NgffImageMeta(str(zarr_img_cycle_x)) + pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) + pxl_sizes_zyx_cycle_x = ngff_image_meta_cycle_x.get_pixel_sizes_zyx( + level=0 ) if pxl_sizes_zyx != pxl_sizes_zyx_cycle_x: diff --git a/fractal_tasks_core/tasks/cellpose_segmentation.py b/fractal_tasks_core/tasks/cellpose_segmentation.py index 2ba866f3e..764f01918 100644 --- a/fractal_tasks_core/tasks/cellpose_segmentation.py +++ b/fractal_tasks_core/tasks/cellpose_segmentation.py @@ -13,7 +13,6 @@ """ Image segmentation via Cellpose library. """ -import json import logging import os import time @@ -37,6 +36,7 @@ from fractal_tasks_core.lib_channels import OmeroChannel from fractal_tasks_core.lib_input_models import Channel from fractal_tasks_core.lib_masked_loading import masked_loading_wrapper +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_pyramid_creation import build_pyramid from fractal_tasks_core.lib_regions_of_interest import ( array_to_bounding_box_table, @@ -51,7 +51,6 @@ from fractal_tasks_core.lib_ROI_overlaps import get_overlapping_pairs_3D from fractal_tasks_core.lib_write import prepare_label_group from fractal_tasks_core.lib_write import write_table -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes from fractal_tasks_core.lib_zattrs_utils import rescale_datasets logger = logging.getLogger(__name__) @@ -188,13 +187,8 @@ def cellpose_segmentation( component: Path to the OME-Zarr image in the OME-Zarr plate that is processed. Example: `"some_plate.zarr/B/03/0"`. (standard argument for Fractal tasks, managed by Fractal server). - metadata: dictionary containing metadata about the OME-Zarr. This task - requires the following elements to be present in the metadata. - `num_levels (int)`: number of pyramid levels in the image; this - determines how many pyramid levels are built for the segmentation. - `coarsening_xy (int)`: coarsening factor in XY of the downsampling - when building the pyramid. (standard argument for Fractal tasks, - managed by Fractal server). + metadata: This parameter is not used by this task. + (standard argument for Fractal tasks, managed by Fractal server). level: Pyramid level of the image to be segmented. Choose `0` to process at full resolution. channel: Primary channel for segmentation; requires either @@ -270,9 +264,10 @@ def cellpose_segmentation( if not os.path.exists(pretrained_model): raise ValueError(f"{pretrained_model=} does not exist.") - # Read useful parameters from metadata - num_levels = metadata["num_levels"] - coarsening_xy = metadata["coarsening_xy"] + # Read some parameters from metadata + ngff_image_meta = load_NgffImageMeta(zarrurl) + num_levels = ngff_image_meta.num_levels + coarsening_xy = ngff_image_meta.coarsening_xy plate, well = component.split(".zarr/") @@ -339,14 +334,10 @@ def cellpose_segmentation( use_masks = False logger.info(f"{use_masks=}") - # Read pixel sizes from zattrs file - full_res_pxl_sizes_zyx = extract_zyx_pixel_sizes( - f"{zarrurl}/.zattrs", level=0 - ) + # Read pixel sizes from Zarr attributes + full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) logger.info(f"{full_res_pxl_sizes_zyx=}") - actual_res_pxl_sizes_zyx = extract_zyx_pixel_sizes( - f"{zarrurl}/.zattrs", level=level - ) + actual_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=level) logger.info(f"{actual_res_pxl_sizes_zyx=}") # Create list of indices for 3D ROIs spanning the entire Z direction @@ -371,10 +362,8 @@ def cellpose_segmentation( do_3D = data_zyx.shape[0] > 1 and len(data_zyx.shape) == 3 if do_3D: if anisotropy is None: - # Read pixel sizes from zattrs file - pxl_zyx = extract_zyx_pixel_sizes( - f"{zarrurl}/.zattrs", level=level - ) + # Read pixel sizes from Zarr attributes + pxl_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=level) pixel_size_z, pixel_size_y, pixel_size_x = pxl_zyx[:] logger.info(f"{pxl_zyx=}") if not np.allclose(pixel_size_x, pixel_size_y): @@ -385,33 +374,15 @@ def cellpose_segmentation( ) anisotropy = pixel_size_z / pixel_size_x - # Load zattrs file - zattrs_file = f"{zarrurl}/.zattrs" - with open(zattrs_file, "r") as jsonfile: - zattrs = json.load(jsonfile) - - # Preliminary checks on multiscales - multiscales = zattrs["multiscales"] - if len(multiscales) > 1: - raise NotImplementedError( - f"Found {len(multiscales)} multiscales, " - "but only one is currently supported." - ) - if "coordinateTransformations" in multiscales[0].keys(): - raise NotImplementedError( - "global coordinateTransformations at the multiscales " - "level are not currently supported" - ) - # Rescale datasets (only relevant for level>0) - if not multiscales[0]["axes"][0]["name"] == "c": + if ngff_image_meta.axes_names[0] != "c": raise ValueError( "Cannot set `remove_channel_axis=True` for multiscale " - f'metadata with axes={multiscales[0]["axes"]}. ' + f"metadata with axes={ngff_image_meta.axes_names}. " 'First axis should have name "c".' ) new_datasets = rescale_datasets( - datasets=multiscales[0]["datasets"], + datasets=[ds.dict() for ds in ngff_image_meta.datasets], coarsening_xy=coarsening_xy, reference_level=level, remove_channel_axis=True, @@ -427,9 +398,9 @@ def cellpose_segmentation( "name": output_label_name, "version": __OME_NGFF_VERSION__, "axes": [ - ax - for ax in multiscales[0]["axes"] - if ax["type"] != "channel" + ax.dict() + for ax in ngff_image_meta.multiscale.axes + if ax.type != "channel" ], "datasets": new_datasets, } diff --git a/fractal_tasks_core/tasks/copy_ome_zarr.py b/fractal_tasks_core/tasks/copy_ome_zarr.py index c9ffa168c..f4eeda91d 100644 --- a/fractal_tasks_core/tasks/copy_ome_zarr.py +++ b/fractal_tasks_core/tasks/copy_ome_zarr.py @@ -23,11 +23,11 @@ from pydantic.decorator import validate_arguments import fractal_tasks_core +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_regions_of_interest import ( convert_ROIs_from_3D_to_2D, ) from fractal_tasks_core.lib_write import open_zarr_group_with_overwrite -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes logger = logging.getLogger(__name__) @@ -173,11 +173,10 @@ def copy_ome_zarr( new_tables_group = new_image_group.create_group("tables/") new_tables_group.attrs["tables"] = ROI_table_names if project_to_2D: - path_FOV_zattrs = ( - f"{zarrurl_old}/{well_path}/{image_path}/.zattrs" - ) - pxl_sizes_zyx = extract_zyx_pixel_sizes( - path_FOV_zattrs, level=0 + path_image = f"{zarrurl_old}/{well_path}/{image_path}" + ngff_image_meta = load_NgffImageMeta(path_image) + pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx( + level=0 ) pxl_size_z = pxl_sizes_zyx[0] @@ -193,7 +192,7 @@ def copy_ome_zarr( f"{zarrurl_old}/{well_path}/{image_path}/" f"tables/{ROI_table_name}" ) - # Convert 3D FOVs to 2D + # Convert 3D ROIs to 2D if project_to_2D: new_ROI_table = convert_ROIs_from_3D_to_2D( ROI_table, pxl_size_z diff --git a/fractal_tasks_core/tasks/create_ome_zarr.py b/fractal_tasks_core/tasks/create_ome_zarr.py index a9c93f023..ea8aef11a 100644 --- a/fractal_tasks_core/tasks/create_ome_zarr.py +++ b/fractal_tasks_core/tasks/create_ome_zarr.py @@ -83,7 +83,7 @@ def create_ome_zarr( Example: "/some/path/" => puts the new OME-Zarr file in the "/some/path/". (standard argument for Fractal tasks, managed by Fractal server). - metadata: This parameter is not used by this task + metadata: This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server). allowed_channels: A list of `OmeroChannel` s, where each channel must include the `wavelength_id` attribute and where the diff --git a/fractal_tasks_core/tasks/illumination_correction.py b/fractal_tasks_core/tasks/illumination_correction.py index 9a3fb5842..93d926ca8 100644 --- a/fractal_tasks_core/tasks/illumination_correction.py +++ b/fractal_tasks_core/tasks/illumination_correction.py @@ -29,12 +29,12 @@ from fractal_tasks_core.lib_channels import get_omero_channel_list from fractal_tasks_core.lib_channels import OmeroChannel +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_pyramid_creation import build_pyramid from fractal_tasks_core.lib_regions_of_interest import check_valid_ROI_indices from fractal_tasks_core.lib_regions_of_interest import ( convert_ROI_table_to_indices, ) -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes logger = logging.getLogger(__name__) @@ -125,12 +125,7 @@ def illumination_correction( component: Path to the OME-Zarr image in the OME-Zarr plate that is processed. Example: `"some_plate.zarr/B/03/0"`. (standard argument for Fractal tasks, managed by Fractal server). - metadata: Dictionary containing metadata about the OME-Zarr. This task - requires the following elements to be present in the metadata. - `num_levels (int)`: number of pyramid levels in the image (this - determines how many pyramid levels are built for the segmentation), - `coarsening_xy (int)`: coarsening factor in XY of the downsampling - when building the pyramid. + metadata: This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server). illumination_profiles_folder: Path of folder of illumination profiles. dict_corr: Dictionary where keys match the `wavelength_id` attributes @@ -167,10 +162,6 @@ def illumination_correction( ) raise NotImplementedError(msg) - # Read some parameters from metadata - num_levels = metadata["num_levels"] - coarsening_xy = metadata["coarsening_xy"] - # Defione old/new zarrurls plate, well = component.split(".zarr/") in_path = Path(input_paths[0]) @@ -183,13 +174,18 @@ def illumination_correction( raise ValueError(f"{well=}, {new_well=}") zarrurl_new = (Path(output_path) / new_component).as_posix() + # Read some parameters from metadata + ngff_image_meta = load_NgffImageMeta(zarrurl_old) + num_levels = ngff_image_meta.num_levels + coarsening_xy = ngff_image_meta.coarsening_xy + t_start = time.perf_counter() logger.info("Start illumination_correction") logger.info(f" {overwrite_input=}") logger.info(f" {zarrurl_old=}") logger.info(f" {zarrurl_new=}") - # Read channels from .zattrs + # Read channels from .zattrs # FIXME replace with NgffImageMeta? channels: list[OmeroChannel] = get_omero_channel_list( image_zarr_path=zarrurl_old ) @@ -199,9 +195,7 @@ def illumination_correction( FOV_ROI_table = ad.read_zarr(f"{zarrurl_old}/tables/FOV_ROI_table") # Read pixel sizes from zattrs file - full_res_pxl_sizes_zyx = extract_zyx_pixel_sizes( - f"{zarrurl_old}/.zattrs", level=0 - ) + full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) # Create list of indices for 3D FOVs spanning the entire Z direction list_indices = convert_ROI_table_to_indices( diff --git a/fractal_tasks_core/tasks/maximum_intensity_projection.py b/fractal_tasks_core/tasks/maximum_intensity_projection.py index 266d50b14..df84919aa 100644 --- a/fractal_tasks_core/tasks/maximum_intensity_projection.py +++ b/fractal_tasks_core/tasks/maximum_intensity_projection.py @@ -22,13 +22,13 @@ from pydantic.decorator import validate_arguments from zarr.errors import ContainsArrayError +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_pyramid_creation import build_pyramid from fractal_tasks_core.lib_regions_of_interest import check_valid_ROI_indices from fractal_tasks_core.lib_regions_of_interest import ( convert_ROI_table_to_indices, ) from fractal_tasks_core.lib_write import OverwriteNotAllowedError -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes logger = logging.getLogger(__name__) @@ -61,17 +61,8 @@ def maximum_intensity_projection( Example: `"some_plate_mip.zarr/B/03/0"`. (standard argument for Fractal tasks, managed by Fractal server). metadata: Dictionary containing metadata about the OME-Zarr. - This task requires the following elements to be present in the - metadata. - `num_levels (int)`: number of pyramid levels in the image (this - determines how many pyramid levels are built for the segmentation); - `coarsening_xy (int)`: coarsening factor in XY of the - downsampling when building the pyramid - `plate`: List of plates (e.g. `["MyPlate.zarr"]`); - `well`: List of wells in the OME-Zarr plate - (e.g. `["MyPlate.zarr/B/03", "MyPlate.zarr/B/05"]`); - `image: List of images in the OME-Zarr plate - (e.g. `["MyPlate.zarr/B/03/0", "MyPlate.zarr/B/05/0"]`). + This task requires the key `copy_ome_zarr` to be present in the + metadata (as defined in `copy_ome_zarr` task). (standard argument for Fractal tasks, managed by Fractal server). overwrite: If `True`, overwrite the task output. """ @@ -80,22 +71,21 @@ def maximum_intensity_projection( if len(input_paths) > 1: raise NotImplementedError - # Read some parameters from metadata - num_levels = metadata["num_levels"] - coarsening_xy = metadata["coarsening_xy"] plate, well = component.split(".zarr/") - zarrurl_old = metadata["copy_ome_zarr"]["sources"][plate] + "/" + well clean_output_path = Path(output_path).resolve() zarrurl_new = (clean_output_path / component).as_posix() logger.info(f"{zarrurl_old=}") logger.info(f"{zarrurl_new=}") + # Read some parameters from metadata + ngff_image = load_NgffImageMeta(zarrurl_old) + num_levels = ngff_image.num_levels + coarsening_xy = ngff_image.coarsening_xy + # This whole block finds (chunk_size_y,chunk_size_x) FOV_ROI_table = ad.read_zarr(f"{zarrurl_old}/tables/FOV_ROI_table") - full_res_pxl_sizes_zyx = extract_zyx_pixel_sizes( - f"{zarrurl_old}/.zattrs", level=0 - ) + full_res_pxl_sizes_zyx = ngff_image.get_pixel_sizes_zyx(level=0) # Create list of indices for 3D FOVs spanning the entire Z direction list_indices = convert_ROI_table_to_indices( FOV_ROI_table, diff --git a/fractal_tasks_core/tasks/napari_workflows_wrapper.py b/fractal_tasks_core/tasks/napari_workflows_wrapper.py index e1ea100ab..0505473d9 100644 --- a/fractal_tasks_core/tasks/napari_workflows_wrapper.py +++ b/fractal_tasks_core/tasks/napari_workflows_wrapper.py @@ -12,7 +12,6 @@ """ Wrapper of napari-workflows. """ -import json import logging from pathlib import Path from typing import Any @@ -31,6 +30,7 @@ from fractal_tasks_core.lib_channels import get_channel_from_image_zarr from fractal_tasks_core.lib_input_models import NapariWorkflowsInput from fractal_tasks_core.lib_input_models import NapariWorkflowsOutput +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_pyramid_creation import build_pyramid from fractal_tasks_core.lib_regions_of_interest import check_valid_ROI_indices from fractal_tasks_core.lib_regions_of_interest import ( @@ -40,7 +40,6 @@ from fractal_tasks_core.lib_upscale_array import upscale_array from fractal_tasks_core.lib_write import prepare_label_group from fractal_tasks_core.lib_write import write_table -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes from fractal_tasks_core.lib_zattrs_utils import rescale_datasets @@ -110,12 +109,7 @@ def napari_workflows_wrapper( processed. Example: `"some_plate.zarr/B/03/0"`. (standard argument for Fractal tasks, managed by Fractal server). - metadata: Dictionary containing metadata about the OME-Zarr. This task - requires the following elements to be present in the metadata. - `num_levels (int)`: number of pyramid levels in the image (this - determines how many pyramid levels are built for the segmentation); - `coarsening_xy (int)`: coarsening factor in XY of the downsampling - when building the pyramid. + metadata: This parameter is not used by this task. (standard argument for Fractal tasks, managed by Fractal server). workflow_file: Absolute path to napari-workflows YAML file input_specs: A dictionary of `NapariWorkflowsInput` values. @@ -195,32 +189,19 @@ def napari_workflows_wrapper( "We currently only support a single input path" ) in_path = Path(input_paths[0]).as_posix() - num_levels = metadata["num_levels"] - coarsening_xy = metadata["coarsening_xy"] label_dtype = np.uint32 - # Load zattrs file and multiscales - zattrs_file = f"{in_path}/{component}/.zattrs" - with open(zattrs_file, "r") as jsonfile: - zattrs = json.load(jsonfile) - multiscales = zattrs["multiscales"] - if len(multiscales) > 1: - raise NotImplementedError( - f"Found {len(multiscales)} multiscales, " - "but only one is currently supported." - ) - if "coordinateTransformations" in multiscales[0].keys(): - raise NotImplementedError( - "global coordinateTransformations at the multiscales " - "level are not currently supported" - ) - - # Read ROI table + # Read mROI table zarrurl = f"{in_path}/{component}" ROI_table = ad.read_zarr(f"{in_path}/{component}/tables/{input_ROI_table}") + # Load image metadata + ngff_image_meta = load_NgffImageMeta(zarrurl) + num_levels = ngff_image_meta.num_levels + coarsening_xy = ngff_image_meta.coarsening_xy + # Read pixel sizes from zattrs file - full_res_pxl_sizes_zyx = extract_zyx_pixel_sizes(zattrs_file, level=0) + full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) # Create list of indices for 3D FOVs spanning the entire Z direction list_indices = convert_ROI_table_to_indices( @@ -385,12 +366,11 @@ def napari_workflows_wrapper( reference_array = list(input_label_arrays.values())[0] # Re-load pixel size, matching to the correct level input_label_name = label_inputs[0][1].label_name - zattrs_file = ( - f"{in_path}/{component}/labels/{input_label_name}/.zattrs" + ngff_label_image_meta = load_NgffImageMeta( + f"{in_path}/{component}/labels/{input_label_name}" ) - # Read pixel sizes from zattrs file - full_res_pxl_sizes_zyx = extract_zyx_pixel_sizes( - zattrs_file, level=0 + full_res_pxl_sizes_zyx = ngff_label_image_meta.get_pixel_sizes_zyx( + level=0 ) # Create list of indices for 3D FOVs spanning the whole Z direction list_indices = convert_ROI_table_to_indices( @@ -435,14 +415,16 @@ def napari_workflows_wrapper( for (name, out_params) in label_outputs: # (1a) Rescale OME-NGFF datasets (relevant for level>0) - if not multiscales[0]["axes"][0]["name"] == "c": + if not ngff_image_meta.multiscale.axes[0].name == "c": raise ValueError( "Cannot set `remove_channel_axis=True` for multiscale " - f'metadata with axes={multiscales[0]["axes"]}. ' + f"metadata with axes={ngff_image_meta.multiscale.axes}. " 'First axis should have name "c".' ) new_datasets = rescale_datasets( - datasets=multiscales[0]["datasets"], + datasets=[ + ds.dict() for ds in ngff_image_meta.multiscale.datasets + ], coarsening_xy=coarsening_xy, reference_level=level, remove_channel_axis=True, @@ -460,9 +442,9 @@ def napari_workflows_wrapper( "name": label_name, "version": __OME_NGFF_VERSION__, "axes": [ - ax - for ax in multiscales[0]["axes"] - if ax["type"] != "channel" + ax.dict() + for ax in ngff_image_meta.multiscale.axes + if ax.type != "channel" ], "datasets": new_datasets, } diff --git a/fractal_tasks_core/tasks/yokogawa_to_ome_zarr.py b/fractal_tasks_core/tasks/yokogawa_to_ome_zarr.py index 16271eda7..2b002ac39 100644 --- a/fractal_tasks_core/tasks/yokogawa_to_ome_zarr.py +++ b/fractal_tasks_core/tasks/yokogawa_to_ome_zarr.py @@ -27,6 +27,7 @@ from fractal_tasks_core.lib_channels import get_omero_channel_list from fractal_tasks_core.lib_channels import OmeroChannel from fractal_tasks_core.lib_glob import glob_with_multiple_patterns +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_parse_filename_metadata import parse_filename from fractal_tasks_core.lib_pyramid_creation import build_pyramid from fractal_tasks_core.lib_read_fractal_metadata import ( @@ -37,7 +38,6 @@ convert_ROI_table_to_indices, ) from fractal_tasks_core.lib_write import OverwriteNotAllowedError -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes logger = logging.getLogger(__name__) @@ -111,11 +111,13 @@ def yokogawa_to_ome_zarr( raise NotImplementedError zarrurl = Path(input_paths[0]).as_posix() + f"/{component}" + ngff_image_meta = load_NgffImageMeta(zarrurl) + num_levels = ngff_image_meta.num_levels + coarsening_xy = ngff_image_meta.coarsening_xy + parameters = get_parameters_from_metadata( keys=[ "original_paths", - "num_levels", - "coarsening_xy", "image_extension", "image_glob_patterns", ], @@ -126,8 +128,6 @@ def yokogawa_to_ome_zarr( image_zarr_path=(Path(output_path) / component), ) original_path_list = parameters["original_paths"] - num_levels = parameters["num_levels"] - coarsening_xy = parameters["coarsening_xy"] image_extension = parameters["image_extension"] image_glob_patterns = parameters["image_glob_patterns"] @@ -146,7 +146,7 @@ def yokogawa_to_ome_zarr( # Read useful information from ROI table and .zattrs adata = read_zarr(f"{zarrurl}/tables/FOV_ROI_table") - pxl_size = extract_zyx_pixel_sizes(f"{zarrurl}/.zattrs") + pxl_size = ngff_image_meta.get_pixel_sizes_zyx(level=0) fov_indices = convert_ROI_table_to_indices( adata, full_res_pxl_sizes_zyx=pxl_size ) diff --git a/tests/data/generate_zarr_ones.py b/tests/data/generate_zarr_ones.py index b054a44d2..4d78a31c8 100644 --- a/tests/data/generate_zarr_ones.py +++ b/tests/data/generate_zarr_ones.py @@ -62,6 +62,7 @@ { "type": "scale", "scale": [ + 1.0, pxl_z, pxl_y * cxy**level, pxl_x * cxy**level, @@ -79,10 +80,14 @@ { "wavelength_id": "A01_C01", "label": "some-label-1", + "window": {"min": "0", "max": "10", "start": "0", "end": "10"}, + "color": "00FFFF", }, { "wavelength_id": "A01_C02", "label": "some-label-2", + "window": {"min": "0", "max": "10", "start": "0", "end": "10"}, + "color": "00FFFF", }, ] }, diff --git a/tests/data/ngff_examples/dataset.json b/tests/data/ngff_examples/dataset.json new file mode 100644 index 000000000..d81fe3fba --- /dev/null +++ b/tests/data/ngff_examples/dataset.json @@ -0,0 +1,9 @@ +{ + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [1.0, 1.0, 0.1625, 0.1625] + } + ] +} diff --git a/tests/data/ngff_examples/dataset_error_1.json b/tests/data/ngff_examples/dataset_error_1.json new file mode 100644 index 000000000..b81702c3d --- /dev/null +++ b/tests/data/ngff_examples/dataset_error_1.json @@ -0,0 +1,9 @@ +{ + "path": 0, + "coordinateTransformations": [ + { + "type": "translation", + "translation": [1, 2, 3] + } + ] +} diff --git a/tests/data/ngff_examples/dataset_error_2.json b/tests/data/ngff_examples/dataset_error_2.json new file mode 100644 index 000000000..16c717084 --- /dev/null +++ b/tests/data/ngff_examples/dataset_error_2.json @@ -0,0 +1,13 @@ +{ + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [1.0, 1.0, 0.1625, 0.1625] + }, + { + "type": "scale", + "scale": [1.0, 1.0, 0.3, 0.3] + } + ] +} diff --git a/tests/data/ngff_examples/image.json b/tests/data/ngff_examples/image.json new file mode 100644 index 000000000..ed7b031cc --- /dev/null +++ b/tests/data/ngff_examples/image.json @@ -0,0 +1,126 @@ +{ + "multiscales": [ + { + "axes": [ + { + "name": "c", + "type": "channel" + }, + { + "name": "z", + "type": "space", + "unit": "micrometer" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.1625, + 0.1625 + ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.325, + 0.325 + ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.65, + 0.65 + ] + } + ] + }, + { + "path": 3, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 1.3, + 1.3 + ] + } + ] + }, + { + "path": 4, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 2.6, + 2.6 + ] + } + ] + } + ], + "version": "0.4" + } + ], + "omero": { + "channels": [ + { + "wavelength_id": "A01_C01", + "label": "some-label-1", + "window": { + "min": "0", + "max": "10", + "start": "0", + "end": "10" + }, + "color": "00FFFF" + }, + { + "wavelength_id": "A01_C02", + "label": "some-label-2", + "window": { + "min": "0", + "max": "10", + "start": "0", + "end": "10" + }, + "color": "00FFFF" + } + ] + } +} diff --git a/tests/data/ngff_examples/image_CYX.json b/tests/data/ngff_examples/image_CYX.json new file mode 100644 index 000000000..32e4f352a --- /dev/null +++ b/tests/data/ngff_examples/image_CYX.json @@ -0,0 +1,90 @@ +{ + "multiscales": [ + { + "axes": [ + { + "name": "c", + "type": "channel" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 0.1625, + 0.1625 + ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 0.325, + 0.325 + ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 0.65, + 0.65 + ] + } + ] + }, + { + "path": 3, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.3, + 1.3 + ] + } + ] + }, + { + "path": 4, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 2.6, + 2.6 + ] + } + ] + } + ], + "version": "0.4" + } + ] +} diff --git a/tests/data/ngff_examples/image_ZYX.json b/tests/data/ngff_examples/image_ZYX.json new file mode 100644 index 000000000..b9bfeb4b8 --- /dev/null +++ b/tests/data/ngff_examples/image_ZYX.json @@ -0,0 +1,91 @@ +{ + "multiscales": [ + { + "axes": [ + { + "name": "z", + "type": "space", + "unit": "micrometer" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 0.1625, + 0.1625 + ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 0.325, + 0.325 + ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 0.65, + 0.65 + ] + } + ] + }, + { + "path": 3, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.3, + 1.3 + ] + } + ] + }, + { + "path": 4, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 2.6, + 2.6 + ] + } + ] + } + ], + "version": "0.4" + } + ] +} diff --git a/tests/data/ngff_examples/image_error.json b/tests/data/ngff_examples/image_error.json new file mode 100644 index 000000000..e327a0baa --- /dev/null +++ b/tests/data/ngff_examples/image_error.json @@ -0,0 +1,142 @@ +{ + "multiscales": [ + { + "axes": [ + { "name": "c", "type": "channel" }, + { "name": "z", "type": "space", "unit": "micrometer" }, + { "name": "y", "type": "space", "unit": "micrometer" }, + { "name": "x", "type": "space", "unit": "micrometer" } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 0.1625, 0.1625 ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 0.325, 0.325 ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 0.65, 0.65 ] + } + ] + }, + { + "path": 3, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 1.3, 1.3 ] + } + ] + }, + { + "path": 4, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 2.6, 2.6 ] + } + ] + } + ], + "version": "0.4" + }, + { + "axes": [ + { "name": "c", "type": "channel" }, + { "name": "z", "type": "space", "unit": "micrometer" }, + { "name": "y", "type": "space", "unit": "micrometer" }, + { "name": "x", "type": "space", "unit": "micrometer" } + ], + "datasets": [ + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 0.1625, 0.1625 ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 0.325, 0.325 ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 0.65, 0.65 ] + } + ] + }, + { + "path": 3, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 1.3, 1.3 ] + } + ] + }, + { + "path": 4, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ 1.0, 1.0, 2.6, 2.6 ] + } + ] + } + ], + "version": "0.4" + } + ], + "omero": { + "channels": [ + { + "wavelength_id": "A01_C01", + "label": "some-label-1", + "window": { + "min": "0", + "max": "10", + "start": "0", + "end": "10" + }, + "color": "00FFFF" + }, + { + "wavelength_id": "A01_C02", + "label": "some-label-2", + "window": { + "min": "0", + "max": "10", + "start": "0", + "end": "10" + }, + "color": "00FFFF" + } + ] + } +} diff --git a/tests/data/ngff_examples/image_error_coarsening_1.json b/tests/data/ngff_examples/image_error_coarsening_1.json new file mode 100644 index 000000000..7f11ba9fd --- /dev/null +++ b/tests/data/ngff_examples/image_error_coarsening_1.json @@ -0,0 +1,72 @@ +{ + "multiscales": [ + { + "axes": [ + { + "name": "c", + "type": "channel" + }, + { + "name": "z", + "type": "space", + "unit": "micrometer" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.1, + 0.1 + ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.2, + 0.3 + ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.4, + 0.9 + ] + } + ] + } + ], + "version": "0.4" + } + ] +} diff --git a/tests/data/ngff_examples/image_error_coarsening_2.json b/tests/data/ngff_examples/image_error_coarsening_2.json new file mode 100644 index 000000000..085a48014 --- /dev/null +++ b/tests/data/ngff_examples/image_error_coarsening_2.json @@ -0,0 +1,72 @@ +{ + "multiscales": [ + { + "axes": [ + { + "name": "c", + "type": "channel" + }, + { + "name": "z", + "type": "space", + "unit": "micrometer" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.1, + 0.1 + ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.2, + 0.2 + ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.8, + 0.8 + ] + } + ] + } + ], + "version": "0.4" + } + ] +} diff --git a/tests/data/ngff_examples/image_error_pixels.json b/tests/data/ngff_examples/image_error_pixels.json new file mode 100644 index 000000000..44130ff46 --- /dev/null +++ b/tests/data/ngff_examples/image_error_pixels.json @@ -0,0 +1,44 @@ +{ + "multiscales": [ + { + "axes": [ + { + "name": "c", + "type": "channel" + }, + { + "name": "z", + "type": "space", + "unit": "micrometer" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.1625, + 0.000000000000000000000001 + ] + } + ] + } + ], + "version": "0.4" + } + ] +} diff --git a/tests/data/ngff_examples/multiscale.json b/tests/data/ngff_examples/multiscale.json new file mode 100644 index 000000000..5a722aac9 --- /dev/null +++ b/tests/data/ngff_examples/multiscale.json @@ -0,0 +1,96 @@ + { + "axes": [ + { + "name": "c", + "type": "channel" + }, + { + "name": "z", + "type": "space", + "unit": "micrometer" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.1625, + 0.1625 + ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.325, + 0.325 + ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.65, + 0.65 + ] + } + ] + }, + { + "path": 3, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 1.3, + 1.3 + ] + } + ] + }, + { + "path": 4, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 2.6, + 2.6 + ] + } + ] + } + ], + "version": "0.4" + } diff --git a/tests/data/ngff_examples/multiscale_error.json b/tests/data/ngff_examples/multiscale_error.json new file mode 100644 index 000000000..5e8d119de --- /dev/null +++ b/tests/data/ngff_examples/multiscale_error.json @@ -0,0 +1,107 @@ + { + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.1625, + 0.1625 + ] + } + ], + "axes": [ + { + "name": "c", + "type": "channel" + }, + { + "name": "z", + "type": "space", + "unit": "micrometer" + }, + { + "name": "y", + "type": "space", + "unit": "micrometer" + }, + { + "name": "x", + "type": "space", + "unit": "micrometer" + } + ], + "datasets": [ + { + "path": 0, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.1625, + 0.1625 + ] + } + ] + }, + { + "path": 1, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.325, + 0.325 + ] + } + ] + }, + { + "path": 2, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 0.65, + 0.65 + ] + } + ] + }, + { + "path": 3, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 1.3, + 1.3 + ] + } + ] + }, + { + "path": 4, + "coordinateTransformations": [ + { + "type": "scale", + "scale": [ + 1.0, + 1.0, + 2.6, + 2.6 + ] + } + ] + } + ], + "version": "0.4" + } diff --git a/tests/data/ngff_examples/well.json b/tests/data/ngff_examples/well.json new file mode 100644 index 000000000..afe52e509 --- /dev/null +++ b/tests/data/ngff_examples/well.json @@ -0,0 +1,10 @@ +{ + "well": { + "images": [ + { + "path": "0" + } + ], + "version": "0.4" + } +} diff --git a/tests/data/ngff_examples/well_acquisitions.json b/tests/data/ngff_examples/well_acquisitions.json new file mode 100644 index 000000000..07153d3b5 --- /dev/null +++ b/tests/data/ngff_examples/well_acquisitions.json @@ -0,0 +1,15 @@ +{ + "well": { + "images": [ + { + "path": "nine", + "acquisition": 9 + }, + { + "path": "seven", + "acquisition": 7 + } + ], + "version": "0.4" + } +} diff --git a/tests/data/ngff_examples/well_acquisitions_error.json b/tests/data/ngff_examples/well_acquisitions_error.json new file mode 100644 index 000000000..7df7ae7ba --- /dev/null +++ b/tests/data/ngff_examples/well_acquisitions_error.json @@ -0,0 +1,15 @@ +{ + "well": { + "images": [ + { + "path": "nine", + "acquisition": 9 + }, + { + "path": "seven", + "acquisition": 9 + } + ], + "version": "0.4" + } +} diff --git a/tests/data/plate_ones.zarr/B/03/0/.zattrs b/tests/data/plate_ones.zarr/B/03/0/.zattrs index 9b8f984c6..ed7b031cc 100644 --- a/tests/data/plate_ones.zarr/B/03/0/.zattrs +++ b/tests/data/plate_ones.zarr/B/03/0/.zattrs @@ -29,6 +29,7 @@ { "type": "scale", "scale": [ + 1.0, 1.0, 0.1625, 0.1625 @@ -42,6 +43,7 @@ { "type": "scale", "scale": [ + 1.0, 1.0, 0.325, 0.325 @@ -55,6 +57,7 @@ { "type": "scale", "scale": [ + 1.0, 1.0, 0.65, 0.65 @@ -68,6 +71,7 @@ { "type": "scale", "scale": [ + 1.0, 1.0, 1.3, 1.3 @@ -81,6 +85,7 @@ { "type": "scale", "scale": [ + 1.0, 1.0, 2.6, 2.6 @@ -96,11 +101,25 @@ "channels": [ { "wavelength_id": "A01_C01", - "label": "some-label-1" + "label": "some-label-1", + "window": { + "min": "0", + "max": "10", + "start": "0", + "end": "10" + }, + "color": "00FFFF" }, { "wavelength_id": "A01_C02", - "label": "some-label-2" + "label": "some-label-2", + "window": { + "min": "0", + "max": "10", + "start": "0", + "end": "10" + }, + "color": "00FFFF" } ] } diff --git a/tests/tasks/_validation.py b/tests/tasks/_validation.py index e16043c09..83fbc3fbe 100644 --- a/tests/tasks/_validation.py +++ b/tests/tasks/_validation.py @@ -12,10 +12,10 @@ from jsonschema import validate from fractal_tasks_core import __OME_NGFF_VERSION__ +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_regions_of_interest import ( convert_ROI_table_to_indices, ) -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes def validate_schema(*, path: str, type: str): @@ -98,10 +98,10 @@ def validate_labels_and_measurements( list_label_values = list(np.unique(labels)) # Create list of FOV-ROI indices - zattrs_file = str(image_zarr / ".zattrs") FOV_table_path = str(image_zarr / "tables/FOV_ROI_table") ROI_table = ad.read_zarr(FOV_table_path) - full_res_pxl_sizes_zyx = extract_zyx_pixel_sizes(zattrs_file, level=0) + ngff_image_meta = load_NgffImageMeta(str(image_zarr)) + full_res_pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) list_indices = convert_ROI_table_to_indices( ROI_table, level=0, diff --git a/tests/tasks/test_registration.py b/tests/tasks/test_registration.py index ece2e0f96..c10f647b1 100644 --- a/tests/tasks/test_registration.py +++ b/tests/tasks/test_registration.py @@ -14,6 +14,7 @@ from pytest import MonkeyPatch from fractal_tasks_core.lib_input_models import Channel +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_regions_of_interest import ( convert_indices_to_regions, ) @@ -21,7 +22,6 @@ convert_ROI_table_to_indices, ) from fractal_tasks_core.lib_regions_of_interest import load_region -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes from fractal_tasks_core.tasks.apply_registration_to_image import ( apply_registration_to_image, ) @@ -318,9 +318,8 @@ def test_multiplexing_registration( # b) none when loading the registered ROI for component in metadata["image"]: # Read pixel sizes from zattrs file - pxl_sizes_zyx = extract_zyx_pixel_sizes( - f"{str(zarr_path_mip / component)}/.zattrs", level=0 - ) + ngff_image_meta = load_NgffImageMeta(str(zarr_path_mip / component)) + pxl_sizes_zyx = ngff_image_meta.get_pixel_sizes_zyx(level=0) original_table = ad.read_zarr( f"{zarr_path_mip / component}/tables/{roi_table}" diff --git a/tests/tasks/test_unit_illumination_correction.py b/tests/tasks/test_unit_illumination_correction.py index cae31b08e..31486404f 100644 --- a/tests/tasks/test_unit_illumination_correction.py +++ b/tests/tasks/test_unit_illumination_correction.py @@ -10,10 +10,10 @@ from pytest import LogCaptureFixture from pytest import MonkeyPatch +from fractal_tasks_core.lib_ngff import load_NgffImageMeta from fractal_tasks_core.lib_regions_of_interest import ( convert_ROI_table_to_indices, ) -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes from fractal_tasks_core.tasks.illumination_correction import correct from fractal_tasks_core.tasks.illumination_correction import ( illumination_correction, @@ -88,7 +88,8 @@ def test_illumination_correction( num_levels = metadata["num_levels"] # Read FOV ROIs and create corresponding indices - pixels = extract_zyx_pixel_sizes(zarrurl + ".zattrs", level=0) + ngff_image_meta = load_NgffImageMeta(zarrurl) + pixels = ngff_image_meta.get_pixel_sizes_zyx(level=0) ROIs = ad.read_zarr(zarrurl + "tables/FOV_ROI_table/") list_indices = convert_ROI_table_to_indices( ROIs, level=0, full_res_pxl_sizes_zyx=pixels diff --git a/tests/test_unit_ngff.py b/tests/test_unit_ngff.py new file mode 100644 index 000000000..669841f20 --- /dev/null +++ b/tests/test_unit_ngff.py @@ -0,0 +1,171 @@ +import json +from pathlib import Path + +import numpy as np +import pytest +import zarr +from devtools import debug + +from fractal_tasks_core.lib_ngff import Dataset +from fractal_tasks_core.lib_ngff import load_NgffImageMeta +from fractal_tasks_core.lib_ngff import Multiscale +from fractal_tasks_core.lib_ngff import NgffImageMeta +from fractal_tasks_core.lib_ngff import NgffWellMeta + + +def test_load_NgffWellMeta(tmp_path): + path = str(tmp_path / "error.zarr") + group = zarr.open_group(path) + group.attrs.put({"something": "else"}) + with pytest.raises(ValueError) as e: + load_NgffImageMeta(path) + debug(e.value) + + +def _load_and_validate(path, Model): + with open(path, "r") as f: + data = json.load(f) + return Model(**data) + + +@pytest.fixture +def ngffdata_path(testdata_path: Path) -> Path: + return testdata_path / "ngff_examples" + + +def test_Dataset(ngffdata_path): + _load_and_validate(ngffdata_path / "dataset.json", Dataset) + + # Fail due to missing scale transformation + dataset = _load_and_validate( + ngffdata_path / "dataset_error_1.json", Dataset + ) + with pytest.raises(ValueError) as e: + dataset.scale_transformation + assert "Missing scale transformation" in str(e.value) + + # Fail due to multiple scale transformations + dataset = _load_and_validate( + ngffdata_path / "dataset_error_2.json", Dataset + ) + with pytest.raises(ValueError) as e: + dataset.scale_transformation + assert "More than one scale transformation" in str(e.value) + + +def test_Multiscale(ngffdata_path): + # Fail due to global coordinateTransformation + with pytest.raises(NotImplementedError): + _load_and_validate(ngffdata_path / "multiscale_error.json", Multiscale) + + # Success + _load_and_validate(ngffdata_path / "multiscale.json", Multiscale) + + +def test_NgffImageMeta(ngffdata_path): + + # Fail when accessing multiscale, if there are more than one + ngff_image_meta = _load_and_validate( + ngffdata_path / "image_error.json", NgffImageMeta + ) + with pytest.raises(NotImplementedError): + ngff_image_meta.multiscale + + # Success CZYX + ngff_image_meta = _load_and_validate( + ngffdata_path / "image.json", NgffImageMeta + ) + assert ngff_image_meta.multiscale + assert len(ngff_image_meta.datasets) == 5 + assert len(ngff_image_meta.datasets) == ngff_image_meta.num_levels + assert ngff_image_meta.axes_names == ["c", "z", "y", "x"] + assert np.allclose( + ngff_image_meta.get_pixel_sizes_zyx(), [1.0, 0.1625, 0.1625] + ) + assert np.allclose( + ngff_image_meta.get_pixel_sizes_zyx(level=0), [1.0, 0.1625, 0.1625] + ) + assert np.allclose( + ngff_image_meta.get_pixel_sizes_zyx(level=1), [1.0, 0.325, 0.325] + ) + assert ngff_image_meta.coarsening_xy == 2 + + # Success ZYX + ngff_image_meta = _load_and_validate( + ngffdata_path / "image_ZYX.json", NgffImageMeta + ) + assert ngff_image_meta.multiscale + assert len(ngff_image_meta.datasets) == 5 + assert len(ngff_image_meta.datasets) == ngff_image_meta.num_levels + assert ngff_image_meta.axes_names == ["z", "y", "x"] + assert np.allclose( + ngff_image_meta.get_pixel_sizes_zyx(), [1.0, 0.1625, 0.1625] + ) + assert np.allclose( + ngff_image_meta.get_pixel_sizes_zyx(level=0), [1.0, 0.1625, 0.1625] + ) + assert np.allclose( + ngff_image_meta.get_pixel_sizes_zyx(level=1), [1.0, 0.325, 0.325] + ) + assert ngff_image_meta.coarsening_xy == 2 + + # Pixel sizes are too small + ngff_image_meta = _load_and_validate( + ngffdata_path / "image_error_pixels.json", NgffImageMeta + ) + with pytest.raises(ValueError) as e: + ngff_image_meta.pixel_sizes_zyx + debug(e.value) + assert "are too small" in str(e.value) + + +def test_ImageNgffMeta_missing_Z(ngffdata_path, caplog): + ngff_image_meta = _load_and_validate( + ngffdata_path / "image_CYX.json", NgffImageMeta + ) + caplog.clear() + ngff_image_meta.pixel_sizes_zyx + debug(caplog.text) + assert "Z axis is not present" in caplog.text + + +def test_ImageNgffMeta_inhomogeneous_coarsening(ngffdata_path): + # Case 1 + ngff_image_meta = _load_and_validate( + ngffdata_path / "image_error_coarsening_1.json", NgffImageMeta + ) + with pytest.raises(NotImplementedError) as e: + ngff_image_meta.coarsening_xy + assert "Inhomogeneous coarsening in X/Y directions" in str(e.value) + # Case 2 + ngff_image_meta = _load_and_validate( + ngffdata_path / "image_error_coarsening_2.json", NgffImageMeta + ) + with pytest.raises(NotImplementedError) as e: + ngff_image_meta.coarsening_xy + assert "Inhomogeneous coarsening across levels" in str(e.value) + + +def test_NgffWellMeta_get_acquisition_paths(ngffdata_path): + # Fail for no acquisition keys + ngff_well_meta = _load_and_validate( + ngffdata_path / "well.json", NgffWellMeta + ) + with pytest.raises(ValueError) as e: + ngff_well_meta.get_acquisition_paths() + assert "Cannot get acquisition paths" in str(e.value) + + # Fail for repeated acquisitions + ngff_well_meta = _load_and_validate( + ngffdata_path / "well_acquisitions_error.json", NgffWellMeta + ) + with pytest.raises(NotImplementedError) as e: + ngff_well_meta.get_acquisition_paths() + assert "multiple images of the same acquisition" in str(e.value) + + # Success + ngff_well_meta = _load_and_validate( + ngffdata_path / "well_acquisitions.json", NgffWellMeta + ) + debug(ngff_well_meta.get_acquisition_paths()) + assert ngff_well_meta.get_acquisition_paths() == {9: "nine", 7: "seven"} diff --git a/tests/test_unit_zattrs_utils.py b/tests/test_unit_zattrs_utils.py index bf77d7290..da80e13d6 100644 --- a/tests/test_unit_zattrs_utils.py +++ b/tests/test_unit_zattrs_utils.py @@ -1,116 +1,10 @@ -import json - -import pytest import zarr from devtools import debug -from fractal_tasks_core.lib_zattrs_utils import extract_zyx_pixel_sizes -from fractal_tasks_core.lib_zattrs_utils import ( - get_acquisition_paths, -) -from fractal_tasks_core.lib_zattrs_utils import get_axes_names from fractal_tasks_core.lib_zattrs_utils import ( get_table_path_dict, ) -from fractal_tasks_core.lib_zattrs_utils import rescale_datasets # noqa - - -def test_extract_zyx_pixel_sizes(tmp_path): - """ - Test multiple invalid/valid calls to extract_zyx_pixel_sizes - """ - - zattrs_path = tmp_path / ".zattrs" - - def _call_extract_zyx_pixel_sizes(_metadata): - """ - Auxiliary function, to make the test more compact - """ - with zattrs_path.open("w") as f: - json.dump(metadata, f) - return extract_zyx_pixel_sizes(zattrs_path=str(zattrs_path)) - - # Case 1: fail for multiple multiscales - metadata = dict(multiscales=[1, 2]) - with pytest.raises(ValueError) as e: - _call_extract_zyx_pixel_sizes(metadata) - debug(e.value) - assert "There are 2 multiscales" in str(e.value) - - # Case 2: fail for global coordinateTransformations - metadata = dict(multiscales=[dict(axes=[], coordinateTransformations=[])]) - with pytest.raises(NotImplementedError) as e: - _call_extract_zyx_pixel_sizes(metadata) - debug(e.value) - assert "Global coordinateTransformations" in str(e.value) - - # Case 3: fail for missing scale transformation - metadata = dict( - multiscales=[ - dict(axes=[], datasets=[dict(coordinateTransformations=[])]) - ] - ) - with pytest.raises(ValueError) as e: - _call_extract_zyx_pixel_sizes(metadata) - debug(e.value) - assert "No scale transformation found" in str(e.value) - - # Case 4: success, with 4D (CZXY) scale transformation - metadata = dict( - multiscales=[ - dict( - axes=[], - datasets=[ - dict( - coordinateTransformations=[ - dict(type="scale", scale=[1, 2, 2, 2]) - ] - ) - ], - ) - ] - ) - out = _call_extract_zyx_pixel_sizes(metadata) - debug(out) - assert out == [2, 2, 2] - - # Case 5: success, with 3D (ZYX) scale transformation - metadata = dict( - multiscales=[ - dict( - axes=[], - datasets=[ - dict( - coordinateTransformations=[ - dict(type="scale", scale=[2, 2, 2]) - ] - ) - ], - ) - ] - ) - out = _call_extract_zyx_pixel_sizes(metadata) - debug(out) - assert out == [2, 2, 2] - - # Case 6: fail because pixel sizes are too small - metadata = dict( - multiscales=[ - dict( - axes=[], - datasets=[ - dict( - coordinateTransformations=[ - dict(type="scale", scale=[2, 2, 1e-20]) - ] - ) - ], - ) - ] - ) - with pytest.raises(ValueError) as e: - _call_extract_zyx_pixel_sizes(metadata) - debug(e.value) +from fractal_tasks_core.lib_zattrs_utils import rescale_datasets def test_rescale_datasets(tmp_path): @@ -151,31 +45,6 @@ def test_rescale_datasets(tmp_path): ] -def test_get_acquisition_paths(): - - # Successful call - image_1 = dict(path="path1", acquisition=1) - image_2 = dict(path="path2", acquisition=2) - zattrs = dict(well=dict(images=[image_1, image_2])) - res = get_acquisition_paths(zattrs) - debug(res) - assert res == {1: "path1", 2: "path2"} - - # Fail (missing acquisition key) - image_1 = dict(path="path1", acquisition=1) - image_2 = dict(path="path2") - zattrs = dict(well=dict(images=[image_1, image_2])) - with pytest.raises(ValueError): - get_acquisition_paths(zattrs) - - # Fail (non-unique acquisition value) - image_1 = dict(path="path1", acquisition=1) - image_2 = dict(path="path2", acquisition=1) - zattrs = dict(well=dict(images=[image_1, image_2])) - with pytest.raises(NotImplementedError): - get_acquisition_paths(zattrs) - - def test_get_table_path_dict(tmp_path): input_path = tmp_path @@ -201,33 +70,3 @@ def test_get_table_path_dict(tmp_path): assert table_path_dict.pop("table2") == str( input_path / component / "tables/table2" ) - - -def test_get_axes_names(): - attrs = { - "multiscales": [ - { - "axes": [ - {"name": "c", "type": "channel"}, - {"name": "z", "type": "space", "unit": "micrometer"}, - {"name": "y", "type": "space", "unit": "micrometer"}, - {"name": "x", "type": "space", "unit": "micrometer"}, - ] - } - ] - } - target_axes = ["c", "z", "y", "x"] - axes = get_axes_names(attrs) - - assert target_axes == axes - - -def test_get_axes_names_errors(): - attrs_list = [ - {}, - {"multiscales": "Test"}, - {"multiscales": [{"Test": "test1"}]}, - ] - for attrs in attrs_list: - with pytest.raises(ValueError): - get_axes_names(attrs)