From 9c4676321cf27d0510ca3325e966cb09b9aa2b21 Mon Sep 17 00:00:00 2001 From: Alan Liddell Date: Tue, 29 Oct 2024 07:37:55 -0700 Subject: [PATCH] Python tests 2: Streaming boogaloo (#12) * bindings created * pip install . working * python -m build working (on Windows anyway) * get it building on linux * Remove test stub (save for the next PR) * Undo an overzealous rename * Add Python wheel build job * Prepare for Python bindings * Don't export the enum values into the module base namespace. * (wip) some basic tests * Revert CMake minimum version and use cmake_policy. Using builtin `BUILD_TESTING` cmake option. * Update build.yml * Update release.yml * some simple tests * add python tests to CI * (wip): first stream test (crash!) * Remove deprecated workflows * A rectification of names * whitespace * Fix double free error * Slightly improve debug output. * Add some more streaming tests. * concurrency? * Explicit nullopt * Get it working ok on Windows * remove stub * Use PROJECT_SOURCE_DIR instead of CMAKE_SOURCE_DIR * Respond to PR comments --- .github/workflows/build.yml | 38 ++- .github/workflows/release.yml | 24 +- .github/workflows/test.yml | 24 +- pyproject.toml | 33 ++- python/CMakeLists.txt | 2 +- python/acquire-zarr-py.cpp | 61 ++--- python/tests/test_settings.py | 1 - python/tests/test_stream.py | 390 +++++++++++++++++++++++++++++++ src/streaming/CMakeLists.txt | 6 +- src/streaming/zarr.stream.cpp | 30 +-- tests/integration/CMakeLists.txt | 4 +- tests/unit-tests/CMakeLists.txt | 6 +- 12 files changed, 506 insertions(+), 113 deletions(-) create mode 100644 python/tests/test_stream.py diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a2723f4..0c46a95 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,12 +4,10 @@ on: push: branches: - "main" - pull_request: # TODO (aliddell): remove this - branches: - - "main" jobs: windows-and-linux-build: + name: Build on ${{ matrix.platform }} with ${{ matrix.build_type }} configuration strategy: matrix: build_type: @@ -29,12 +27,11 @@ jobs: permissions: actions: write - steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + steps: - uses: actions/checkout@v3 with: submodules: true @@ -58,7 +55,7 @@ jobs: cmake --build ${{github.workspace}}/build --config ${{matrix.build_type}} cpack --config ${{github.workspace}}/build/CPackConfig.cmake -C ${{matrix.build_type}} -G ZIP - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.platform}} ${{matrix.build_type}} binaries path: ${{github.workspace}}/*.zip @@ -75,12 +72,11 @@ jobs: permissions: actions: write - steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + steps: - uses: actions/checkout@v3 with: submodules: true @@ -114,7 +110,7 @@ jobs: run: | cpack --config ${{github.workspace}}/build/CPackConfig.cmake -C ${{matrix.build_type}} -G ZIP - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: macos-latest ${{matrix.build_type}} binaries path: ${{github.workspace}}/*.zip @@ -132,12 +128,11 @@ jobs: permissions: actions: write - steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + steps: - uses: actions/checkout@v3 with: submodules: true @@ -163,6 +158,7 @@ jobs: run: python -m build - name: Upload wheel - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: + name: ${{matrix.platform}} wheel path: ${{github.workspace}}/dist/*.whl diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 070250a..5259927 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,6 +11,7 @@ env: jobs: windows-and-linux-build: + name: Build on ${{ matrix.platform }} strategy: matrix: platform: @@ -27,12 +28,11 @@ jobs: permissions: actions: write - steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + steps: - uses: actions/checkout@v3 with: submodules: true @@ -61,23 +61,23 @@ jobs: cmake --build ${{github.workspace}}/pack --config Release cpack --config ${{github.workspace}}/pack/CPackConfig.cmake -C Release -G ZIP - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{matrix.platform}} binaries path: ${{github.workspace}}/*.zip mac-build: + name: Build on macos-latest runs-on: "macos-latest" permissions: actions: write - steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + steps: - uses: actions/checkout@v3 with: submodules: true @@ -115,7 +115,7 @@ jobs: run: | cpack --config ${{github.workspace}}/build/CPackConfig.cmake -C Release -G ZIP - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: macos-latest binaries path: ${{github.workspace}}/*.zip diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1704f4f..4557686 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,6 +11,10 @@ on: env: BUILD_TYPE: Release +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + jobs: test: name: Test on ${{ matrix.platform }} @@ -34,11 +38,6 @@ jobs: actions: write steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} - - uses: actions/checkout@v3 with: submodules: true @@ -78,11 +77,6 @@ jobs: MINIO_SECRET_KEY: 12345678 steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} - - uses: actions/checkout@v3 with: submodules: true @@ -135,7 +129,7 @@ jobs: run: ctest -C ${{env.BUILD_TYPE}} -L s3 --output-on-failure test_python: - name: Test on ${{ matrix.platform }} + name: Test Python on ${{ matrix.platform }} runs-on: ${{ matrix.platform }} timeout-minutes: 20 strategy: @@ -147,11 +141,6 @@ jobs: - "macos-latest" steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} - - uses: actions/checkout@v3 with: submodules: true @@ -175,8 +164,7 @@ jobs: run: python -m pip install -U pip "pybind11[global]" cmake build numpy pytest - name: Build and install Python bindings - run: python -m pip install . + run: python -m pip install ".[testing]" - name: Run tests run: python -m pytest -v - \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 7c5060c..a9f6537 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,40 @@ [build-system] requires = [ - "setuptools>=42", - "wheel", - "ninja", "cmake>=3.12", + "ninja", "pybind11[global]", + "setuptools>=42", + "wheel", ] build-backend = "setuptools.build_meta" +[project] +name = "acquire-zarr" +requires-python = ">=3.9" +version = "0.0.1" + +[project.optional-dependencies] +testing = [ + "black", + "dask", + "mypy", + "ome-zarr", + "pytest>=7", + "pytest-cov", + "python-dotenv", + "ruff", + "s3fs", + "tifffile", + "zarr", +] + +[tool.black] +target-version = ['py39', 'py310', 'py311', 'py312'] +line-length = 79 + +[tool.isort] +profile = "black" + [tool.pytest.ini_options] minversion = "6.0" addopts = "-ra -q --color=yes" diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index b353c55..c569451 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -18,7 +18,7 @@ find_package(pybind11 REQUIRED) pybind11_add_module(acquire_zarr acquire-zarr-py.cpp) -target_include_directories(acquire_zarr PRIVATE ${CMAKE_SOURCE_DIR}/include) +target_include_directories(acquire_zarr PRIVATE ${PROJECT_SOURCE_DIR}/include) target_link_libraries(acquire_zarr PRIVATE acquire-zarr) set_target_properties(acquire_zarr PROPERTIES diff --git a/python/acquire-zarr-py.cpp b/python/acquire-zarr-py.cpp index ae78506..3683d4f 100644 --- a/python/acquire-zarr-py.cpp +++ b/python/acquire-zarr-py.cpp @@ -257,9 +257,11 @@ class PyZarrStreamSettings private: std::string store_path_; - std::optional custom_metadata_; - std::optional s3_settings_; - std::optional compression_settings_; + std::optional custom_metadata_{ std::nullopt }; + std::optional s3_settings_{ std::nullopt }; + std::optional compression_settings_{ + std::nullopt + }; bool multiscale_ = false; ZarrDataType data_type_{ ZarrDataType_uint8 }; ZarrVersion version_{ ZarrVersion_2 }; @@ -288,11 +290,11 @@ class PyZarrStream auto store_path = settings.store_path(); stream_settings.store_path = store_path.c_str(); - auto metadata = settings.custom_metadata(); - stream_settings.custom_metadata = - settings.custom_metadata().has_value() - ? settings.custom_metadata()->c_str() - : nullptr; + std::string metadata; + if (settings.custom_metadata()) { + metadata = settings.custom_metadata().value(); + stream_settings.custom_metadata = metadata.c_str(); + } if (settings.s3().has_value()) { s3_settings.endpoint = settings.s3()->endpoint().c_str(); @@ -340,13 +342,6 @@ class PyZarrStream } } - ~PyZarrStream() - { - if (is_active()) { - ZarrStream_destroy(stream_.get()); - } - } - void append(py::array image_data) { if (!is_active()) { @@ -534,18 +529,23 @@ PYBIND11_MODULE(acquire_zarr, m) if (kwargs.contains("store_path")) settings.set_store_path(kwargs["store_path"].cast()); - if (kwargs.contains("custom_metadata")) - settings.set_custom_metadata( - kwargs["custom_metadata"].cast>()); + if (kwargs.contains("custom_metadata") && + !kwargs["custom_metadata"].is_none()) { + auto cm = kwargs["custom_metadata"].cast(); + settings.set_custom_metadata(cm); + } - if (kwargs.contains("s3")) - settings.set_s3( - kwargs["s3"].cast>()); + if (kwargs.contains("s3") && !kwargs["s3"].is_none()) { + auto s3 = kwargs["s3"].cast(); + settings.set_s3(s3); + } - if (kwargs.contains("compression")) - settings.set_compression( - kwargs["compression"] - .cast>()); + if (kwargs.contains("compression") && + !kwargs["compression"].is_none()) { + auto compression = + kwargs["compression"].cast(); + settings.set_compression(compression); + } if (kwargs.contains("dimensions")) settings.dimensions = @@ -566,10 +566,10 @@ PYBIND11_MODULE(acquire_zarr, m) .def("__repr__", [](const PyZarrStreamSettings& self) { std::string repr = - "StreamSettings(store_path='" + self.store_path(); + "StreamSettings(store_path='" + self.store_path() + "'"; if (self.custom_metadata().has_value()) { - repr += - ", custom_metadata='" + self.custom_metadata().value(); + repr += ", custom_metadata='" + + self.custom_metadata().value() + "'"; } if (self.s3().has_value()) { @@ -582,9 +582,10 @@ PYBIND11_MODULE(acquire_zarr, m) for (const auto& dim : self.dimensions) { repr += dim.repr() + ", "; } + + std::string multiscale = self.multiscale() ? "True" : "False"; repr += - "], multiscale=" + std::to_string(self.multiscale()) + - ", data_type=DataType." + + "], multiscale=" + multiscale + ", data_type=DataType." + std::string(data_type_to_str(self.data_type())) + ", version=ZarrVersion." + std::string(self.version() == ZarrVersion_2 ? "V2" : "V3") + diff --git a/python/tests/test_settings.py b/python/tests/test_settings.py index 2ca7afd..7b097df 100644 --- a/python/tests/test_settings.py +++ b/python/tests/test_settings.py @@ -3,7 +3,6 @@ import json from pathlib import Path -import numpy as np import pytest import acquire_zarr diff --git a/python/tests/test_stream.py b/python/tests/test_stream.py new file mode 100644 index 0000000..c96d8cf --- /dev/null +++ b/python/tests/test_stream.py @@ -0,0 +1,390 @@ +#!/usr/bin/env python3 + +import dotenv + +dotenv.load_dotenv() + +import json +from pathlib import Path +import os +import shutil +from typing import Optional + +os.environ["ZARR_V3_EXPERIMENTAL_API"] = "1" +os.environ["ZARR_V3_SHARDING"] = "1" + +import numpy as np +import pytest +import zarr +from numcodecs import blosc +import s3fs + +from acquire_zarr import ( + StreamSettings, + ZarrStream, + Compressor, + CompressionCodec, + CompressionSettings, + S3Settings, + Dimension, + DimensionType, + ZarrVersion, +) + + +@pytest.fixture(scope="function") +def settings(): + s = StreamSettings() + s.custom_metadata = json.dumps({"foo": "bar"}) + s.dimensions.extend( + [ + Dimension( + name="t", + kind=DimensionType.TIME, + array_size_px=0, + chunk_size_px=32, + shard_size_chunks=1, + ), + Dimension( + name="y", + kind=DimensionType.SPACE, + array_size_px=48, + chunk_size_px=16, + shard_size_chunks=1, + ), + Dimension( + name="x", + kind=DimensionType.SPACE, + array_size_px=64, + chunk_size_px=32, + shard_size_chunks=1, + ), + ] + ) + + return s + + +@pytest.fixture(scope="function") +def store_path(tmp_path): + yield tmp_path + shutil.rmtree(tmp_path) + + +def validate_v2_metadata(store_path: Path): + assert (store_path / ".zattrs").is_file() + with open(store_path / ".zattrs", "r") as fh: + data = json.load(fh) + axes = data["multiscales"][0]["axes"] + assert axes[0]["name"] == "t" + assert axes[0]["type"] == "time" + + assert axes[1]["name"] == "y" + assert axes[1]["type"] == "space" + assert axes[1]["unit"] == "micrometer" + + assert axes[2]["name"] == "x" + assert axes[2]["type"] == "space" + assert axes[2]["unit"] == "micrometer" + + assert (store_path / ".zgroup").is_file() + with open(store_path / ".zgroup", "r") as fh: + data = json.load(fh) + assert data["zarr_format"] == 2 + + assert (store_path / "acquire.json").is_file() + with open(store_path / "acquire.json", "r") as fh: + data = json.load(fh) + assert data["foo"] == "bar" + + assert (store_path / "0").is_dir() + + +def validate_v3_metadata(store_path: Path): + assert (store_path / "zarr.json").is_file() + with open(store_path / "zarr.json", "r") as fh: + data = json.load(fh) + assert data["extensions"] == [] + assert ( + data["metadata_encoding"] + == "https://purl.org/zarr/spec/protocol/core/3.0" + ) + assert ( + data["zarr_format"] + == "https://purl.org/zarr/spec/protocol/core/3.0" + ) + assert data["metadata_key_suffix"] == ".json" + + assert (store_path / "meta").is_dir() + assert (store_path / "meta" / "root.group.json").is_file() + with open(store_path / "meta" / "root.group.json", "r") as fh: + data = json.load(fh) + axes = data["attributes"]["multiscales"][0]["axes"] + assert axes[0]["name"] == "t" + assert axes[0]["type"] == "time" + + assert axes[1]["name"] == "y" + assert axes[1]["type"] == "space" + assert axes[1]["unit"] == "micrometer" + + assert axes[2]["name"] == "x" + assert axes[2]["type"] == "space" + assert axes[2]["unit"] == "micrometer" + + assert (store_path / "meta" / "acquire.json").is_file() + with open(store_path / "meta" / "acquire.json", "r") as fh: + data = json.load(fh) + assert data["foo"] == "bar" + + +def get_directory_store(version: ZarrVersion, store_path: str): + if version == ZarrVersion.V2: + return zarr.DirectoryStore(store_path) + else: + return zarr.DirectoryStoreV3(store_path) + +def make_s3_settings(store_path: str): + if "ZARR_S3_ENDPOINT" not in os.environ or "ZARR_S3_BUCKET_NAME" not in os.environ or "ZARR_S3_ACCESS_KEY_ID" not in os.environ or "ZARR_S3_SECRET_ACCESS_KEY" not in os.environ: + return None + + return S3Settings( + endpoint=os.environ["ZARR_S3_ENDPOINT"], + bucket_name=os.environ["ZARR_S3_BUCKET_NAME"], + access_key_id=os.environ["ZARR_S3_ACCESS_KEY_ID"], + secret_access_key=os.environ["ZARR_S3_SECRET_ACCESS_KEY"], + ) + + +@pytest.mark.parametrize( + ("version",), + [ + (ZarrVersion.V2,), + (ZarrVersion.V3,), + ], +) +def test_create_stream( + settings: StreamSettings, + store_path: Path, + request: pytest.FixtureRequest, + version: ZarrVersion, +): + settings.store_path = str(store_path / f"{request.node.name}.zarr") + settings.version = version + stream = ZarrStream(settings) + assert stream + + store_path = Path(settings.store_path) + + del stream # close the stream, flush the files + + # check that the stream created the zarr store + assert store_path.is_dir() + + if version == ZarrVersion.V2: + validate_v2_metadata(store_path) + + # no data written, so no array metadata + assert not (store_path / "0" / ".zarray").exists() + else: + validate_v3_metadata(store_path) + + # no data written, so no array metadata + assert not (store_path / "meta" / "0.array.json").exists() + + +@pytest.mark.parametrize( + ( + "version", + "compression_codec", + ), + [ + ( + ZarrVersion.V2, + None, + ), + ( + ZarrVersion.V2, + CompressionCodec.BLOSC_LZ4, + ), + ( + ZarrVersion.V2, + CompressionCodec.BLOSC_ZSTD, + ), + ( + ZarrVersion.V3, + None, + ), + ( + ZarrVersion.V3, + CompressionCodec.BLOSC_LZ4, + ), + ( + ZarrVersion.V3, + CompressionCodec.BLOSC_ZSTD, + ), + ], +) +def test_stream_data_to_filesystem( + settings: StreamSettings, + store_path: Path, + request: pytest.FixtureRequest, + version: ZarrVersion, + compression_codec: Optional[CompressionCodec], +): + settings.store_path = str(store_path / f"{request.node.name}.zarr") + settings.version = version + if compression_codec is not None: + settings.compression = CompressionSettings( + compressor=Compressor.BLOSC1, + codec=compression_codec, + level=1, + shuffle=1, + ) + + stream = ZarrStream(settings) + assert stream + + data = np.random.randint( + 0, + 255, + ( + settings.dimensions[0].chunk_size_px, + settings.dimensions[1].array_size_px, + settings.dimensions[2].array_size_px, + ), + dtype=np.uint8, + ) + stream.append(data) + + del stream # close the stream, flush the files + + group = zarr.open( + store=get_directory_store(version, settings.store_path), mode="r" + ) + data = group["0"] + + assert data.shape == ( + settings.dimensions[0].chunk_size_px, + settings.dimensions[1].array_size_px, + settings.dimensions[2].array_size_px, + ) + + if compression_codec is not None: + cname = ( + "lz4" + if compression_codec == CompressionCodec.BLOSC_LZ4 + else "zstd" + ) + assert data.compressor.cname == cname + assert data.compressor.clevel == 1 + assert data.compressor.shuffle == blosc.SHUFFLE + else: + assert data.compressor is None + + +@pytest.mark.parametrize( + ( + "version", + "compression_codec", + ), + [ + ( + ZarrVersion.V2, + None, + ), + ( + ZarrVersion.V2, + CompressionCodec.BLOSC_LZ4, + ), + ( + ZarrVersion.V2, + CompressionCodec.BLOSC_ZSTD, + ), + ( + ZarrVersion.V3, + None, + ), + ( + ZarrVersion.V3, + CompressionCodec.BLOSC_LZ4, + ), + ( + ZarrVersion.V3, + CompressionCodec.BLOSC_ZSTD, + ), + ], +) +@pytest.mark.skip(reason="Temporary; needs debugging") +def test_stream_data_to_s3( + settings: StreamSettings, + store_path: Path, + request: pytest.FixtureRequest, + version: ZarrVersion, + compression_codec: Optional[CompressionCodec], +): + s3_settings = make_s3_settings(store_path) + if s3_settings is None: + pytest.skip("S3 settings not set") + + settings.store_path = str(store_path / f"{request.node.name}.zarr") + settings.version = version + settings.s3 = s3_settings + if compression_codec is not None: + settings.compression = CompressionSettings( + compressor=Compressor.BLOSC1, + codec=compression_codec, + level=1, + shuffle=1, + ) + + stream = ZarrStream(settings) + assert stream + + data = np.random.randint( + -255, + 255, + ( + settings.dimensions[0].chunk_size_px, + settings.dimensions[1].array_size_px, + settings.dimensions[2].array_size_px, + ), + dtype=np.int16, + ) + stream.append(data) + + del stream # close the stream, flush the files + + s3 = s3fs.S3FileSystem( + key=settings.s3.access_key_id, + secret=settings.s3_secret_access_key, + client_kwargs={"endpoint_url": settings.s3_endpoint}, + ) + store = s3fs.S3Map( + root=f"{s3_settings.bucket_name}/{settings.store_path}", s3=s3 + ) + cache = zarr.LRUStoreCache(store, max_size=2**28) + group = zarr.group(store=cache) + + data = group["0"] + + assert data.shape == ( + settings.dimensions[0].chunk_size_px, + settings.dimensions[1].array_size_px, + settings.dimensions[2].array_size_px, + ) + + if compression_codec is not None: + cname = ( + "lz4" + if compression_codec == CompressionCodec.BLOSC_LZ4 + else "zstd" + ) + assert data.compressor.cname == cname + assert data.compressor.clevel == 1 + assert data.compressor.shuffle == blosc.SHUFFLE + else: + assert data.compressor is None + + # cleanup + s3.rm(store.root, recursive=True) + \ No newline at end of file diff --git a/src/streaming/CMakeLists.txt b/src/streaming/CMakeLists.txt index 3ab50ac..cfa734c 100644 --- a/src/streaming/CMakeLists.txt +++ b/src/streaming/CMakeLists.txt @@ -33,10 +33,10 @@ add_library(${tgt} target_include_directories(${tgt} PUBLIC - $ + $ PRIVATE $ - $ + $ ) target_link_libraries(${tgt} PRIVATE @@ -60,6 +60,6 @@ install(TARGETS ${tgt} ) # Install public header files -install(DIRECTORY ${CMAKE_SOURCE_DIR}/include/ +install(DIRECTORY ${PROJECT_SOURCE_DIR}/include/ DESTINATION include ) \ No newline at end of file diff --git a/src/streaming/zarr.stream.cpp b/src/streaming/zarr.stream.cpp index ccb3d01..1f1f8cb 100644 --- a/src/streaming/zarr.stream.cpp +++ b/src/streaming/zarr.stream.cpp @@ -29,8 +29,7 @@ is_compressed_acquisition(const struct ZarrStreamSettings_s* settings) return nullptr != settings->compression_settings; } -[[nodiscard]] -bool +[[nodiscard]] bool validate_s3_settings(const ZarrS3Settings* settings) { if (zarr::is_empty_string(settings->endpoint, "S3 endpoint is empty")) { @@ -57,8 +56,7 @@ validate_s3_settings(const ZarrS3Settings* settings) return true; } -[[nodiscard]] -bool +[[nodiscard]] bool validate_filesystem_store_path(std::string_view data_root) { fs::path path(data_root); @@ -89,8 +87,7 @@ validate_filesystem_store_path(std::string_view data_root) return true; } -[[nodiscard]] -bool +[[nodiscard]] bool validate_compression_settings(const ZarrCompressionSettings* settings) { if (settings->compressor >= ZarrCompressorCount) { @@ -135,8 +132,7 @@ validate_compression_settings(const ZarrCompressionSettings* settings) return true; } -[[nodiscard]] -bool +[[nodiscard]] bool validate_custom_metadata(const char* metadata) { if (metadata == nullptr || !*metadata) { @@ -151,15 +147,14 @@ validate_custom_metadata(const char* metadata) ); if (val.is_discarded()) { - LOG_ERROR("Invalid JSON: ", metadata); + LOG_ERROR("Invalid JSON: '", metadata, "'"); return false; } return true; } -[[nodiscard]] -bool +[[nodiscard]] bool validate_dimension(const ZarrDimensionProperties* dimension, ZarrVersion version, bool is_append) @@ -191,8 +186,7 @@ validate_dimension(const ZarrDimensionProperties* dimension, return true; } -[[nodiscard]] -bool +[[nodiscard]] bool validate_settings(const struct ZarrStreamSettings_s* settings) { if (!settings) { @@ -292,8 +286,7 @@ dimension_type_to_string(ZarrDimensionType type) } template -[[nodiscard]] -std::byte* +[[nodiscard]] std::byte* scale_image(const std::byte* const src, size_t& bytes_of_src, size_t& width, @@ -587,8 +580,8 @@ ZarrStream_s::create_store_() { std::error_code ec; if (!fs::create_directories(store_path_, ec)) { - set_error_("Failed to create store path '" + - store_path_ + "': " + ec.message()); + set_error_("Failed to create store path '" + store_path_ + + "': " + ec.message()); return false; } } @@ -647,7 +640,6 @@ ZarrStream_s::create_writers_() writers_.push_back(std::make_unique( downsampled_config, thread_pool_, s3_connection_pool_)); } - // scaled_frames_.emplace(level++, std::nullopt); config = std::move(downsampled_config); downsampled_config = {}; @@ -1005,4 +997,4 @@ finalize_stream(struct ZarrStream_s* stream) } return true; -} \ No newline at end of file +} diff --git a/tests/integration/CMakeLists.txt b/tests/integration/CMakeLists.txt index bb50e1c..006c490 100644 --- a/tests/integration/CMakeLists.txt +++ b/tests/integration/CMakeLists.txt @@ -19,8 +19,8 @@ foreach (name ${tests}) MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>" ) target_include_directories(${tgt} PRIVATE - ${CMAKE_SOURCE_DIR}/include - ${CMAKE_SOURCE_DIR}/src/logger + ${PROJECT_SOURCE_DIR}/include + ${PROJECT_SOURCE_DIR}/src/logger ) target_link_libraries(${tgt} PRIVATE acquire-logger diff --git a/tests/unit-tests/CMakeLists.txt b/tests/unit-tests/CMakeLists.txt index ee9d165..aefbc34 100644 --- a/tests/unit-tests/CMakeLists.txt +++ b/tests/unit-tests/CMakeLists.txt @@ -34,9 +34,9 @@ foreach (name ${tests}) MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>" ) target_include_directories(${tgt} PRIVATE - ${CMAKE_SOURCE_DIR}/include - ${CMAKE_SOURCE_DIR}/src/logger - ${CMAKE_SOURCE_DIR}/src/streaming + ${PROJECT_SOURCE_DIR}/include + ${PROJECT_SOURCE_DIR}/src/logger + ${PROJECT_SOURCE_DIR}/src/streaming ) target_link_libraries(${tgt} PRIVATE acquire-logger