From becec97a98a31b4cf3ad60f08cee3ac1ab87db80 Mon Sep 17 00:00:00 2001 From: Nosheen Adil Date: Fri, 27 Sep 2024 09:38:21 -0700 Subject: [PATCH] IT-1228: Refactor repo to use isxcore C++ API (#54) --- .github/workflows/main.yml | 61 - .github/workflows/pypi.yml | 48 - .gitignore | 1 + .gitmodules | 3 + CMakeLists.txt | 99 + MANIFEST.in | 3 + Makefile | 130 +- README.md | 96 +- install-poetry.sh | 7 - isx/__init__.py | 435 +--- isx/_internal.py | 1320 ++++++++++ isx/algo.py | 1530 ++++++++++++ isx/core.py | 351 +++ isx/io.py | 2682 ++++++++++++++++++++ isx/test/__init__.py | 0 isx/test/asserts.py | 281 +++ isx/test/test_algos.py | 2207 +++++++++++++++++ isx/test/test_core.py | 153 ++ isx/test/test_file_io.py | 2883 ++++++++++++++++++++++ isx/test/test_file_metadata.py | 93 + isx/test/utilities/__init__.py | 0 isx/test/utilities/create_sample_data.py | 79 + isx/test/utilities/setup.py | 24 + isx/util.py | 86 + isxcore | 1 + pyproject.toml | 64 +- setup.py | 71 +- tests/data.py | 48 - tests/test_isx.py | 222 -- 29 files changed, 11994 insertions(+), 984 deletions(-) delete mode 100644 .github/workflows/main.yml delete mode 100644 .github/workflows/pypi.yml create mode 100644 .gitmodules create mode 100644 CMakeLists.txt create mode 100644 MANIFEST.in delete mode 100644 install-poetry.sh create mode 100644 isx/_internal.py create mode 100644 isx/algo.py create mode 100644 isx/core.py create mode 100644 isx/io.py create mode 100644 isx/test/__init__.py create mode 100644 isx/test/asserts.py create mode 100644 isx/test/test_algos.py create mode 100644 isx/test/test_core.py create mode 100644 isx/test/test_file_io.py create mode 100644 isx/test/test_file_metadata.py create mode 100644 isx/test/utilities/__init__.py create mode 100644 isx/test/utilities/create_sample_data.py create mode 100644 isx/test/utilities/setup.py create mode 100644 isx/util.py create mode 160000 isxcore delete mode 100644 tests/data.py delete mode 100644 tests/test_isx.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 49c3c2c..0000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: pre-merge-checks - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - - -jobs: - initial_checks: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Check code using ruff - uses: chartboost/ruff-action@v1 - - - name: Check docstrings using interrogate - run: | - pip install interrogate - if [ $(interrogate -c pyproject.toml -v . -f 100 | grep "FAILED" | wc -l) = 1 ]; then - echo "necessary docstrings missing:" - interrogate -vv . -f 100 - exit 1 - fi - - - name: Check code formatting using black - uses: psf/black@stable - with: - options: "--check --verbose" - version: "~= 22.6" - - - name: Check pyproject.toml matches setup.py - run: | - pip install poetry2setup - poetry2setup > setup.py - bash scripts/check-git-clean.sh - - standard_tests: - needs: [initial_checks] - strategy: - fail-fast: true - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies and run tests - run: | - pip install -e .[test] - make test-pip - shell: bash # Ensure using bash for cross-platform compatibility, especially on Windows diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml deleted file mode 100644 index 300208a..0000000 --- a/.github/workflows/pypi.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: publish-to-pypi - -on: - push: - tags: - - '*' - -jobs: - pypi-publish: - name: upload release to PyPI - runs-on: ubuntu-latest - permissions: - id-token: write - steps: - - - name: Checkout code - uses: actions/checkout@v3 - - - name: Insert long_description_content_type - run: >- - sed -i "s|'name': 'isx'|'name': 'isx','long_description_content_type':'text/markdown'|g" setup.py - - - name: Set up Python 3.10 - uses: actions/setup-python@v3 - with: - python-version: '3.10' - - - name: Install pypa/setuptools - run: - python -m pip install wheel - - - name: Extract tag name - id: tag - run: echo ::set-output name=TAG_NAME::$(echo $GITHUB_REF | cut -d / -f 3) - - - name: Update version in setup.py - run: >- - sed -i "s/0.0.0.dev0/${{ steps.tag.outputs.TAG_NAME }}/g" setup.py - - - - name: Build a binary wheel - run: - python setup.py sdist bdist_wheel - - - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - - diff --git a/.gitignore b/.gitignore index 8eb9288..2195721 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # secrets .ideas-github-token +apiTestResults.xml # IDE *.sublime-workspace diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..9d70957 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "isxcore"] + path = isxcore + url = git@github.com:inscopix/isxcore.git diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..2414fdb --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,99 @@ +cmake_minimum_required(VERSION 3.5) + +set(ISX_CMAKE_TOOLS_DIR ${CMAKE_CURRENT_LIST_DIR}/isxcore/tools/cmake) +include(${ISX_CMAKE_TOOLS_DIR}/configure.cmake) +include(${ISX_CMAKE_TOOLS_DIR}/core.cmake) + +# +# Public API +# +set(TARGET_NAME_PUBLIC_API "isxpublicapi") +set(PUBLIC_API_SRC_DIR ${CMAKE_CURRENT_LIST_DIR}/isxcore/wrapper/src) +set(PUBLIC_API_API_DIR ${CMAKE_CURRENT_LIST_DIR}/isxcore/wrapper/api) +set(PUBLIC_API_SRCS ${PUBLIC_API_SRC_DIR}/isxCoreC.cpp ${PUBLIC_API_SRC_DIR}/isxUtilsC.cpp) +set(PUBLIC_API_HDRS ${PUBLIC_API_SRC_DIR}/isxUtilsC.h) +set(PUBLIC_API_API_HDRS ${PUBLIC_API_API_DIR}/isxCoreC.h) + +add_library(${TARGET_NAME_PUBLIC_API} SHARED ${PUBLIC_API_SRCS} ${PUBLIC_API_HDRS} ${PUBLIC_API_API_HDRS}) + +target_include_directories(${TARGET_NAME_PUBLIC_API} PUBLIC ${PUBLIC_API_API_DIR}) +target_include_directories(${TARGET_NAME_PUBLIC_API} PRIVATE + ${CORE_API_DIR} + ${OPENCV_HEADER_SEARCH_PATHS} + ${JSON_HEADER_SEARCH_PATHS}) + +set(API_DEST_DIR ${CMAKE_BINARY_DIR}/../bin) +set(PYTHON_API_SRC_DIR ${CMAKE_CURRENT_LIST_DIR}) +set(APP_LIB_DIR ${API_DEST_DIR}/isx/lib) + +if (${ISX_OS_MACOS}) + set(QT_CORE_SHARED_LIB_FILES ${QT_DIR}/lib/QtCore.framework/Versions/5/QtCore) + installFiles(${TARGET_NAME_PUBLIC_API} ${APP_LIB_DIR}/QtCore.framework/Versions/5 "${QT_CORE_SHARED_LIB_FILES}") +elseif (${ISX_OS_LINUX}) + set(QT_CORE_SHARED_LIB_FILES + ${QT_DIR}/lib/libQt5Core.so.5 + ${QT_DIR}/lib/libicui18n.so.56 + ${QT_DIR}/lib/libicuuc.so.56 + ${QT_DIR}/lib/libicudata.so.56 + ) + installFiles(${TARGET_NAME_PUBLIC_API} ${APP_LIB_DIR} "${QT_CORE_SHARED_LIB_FILES}") +elseif (${ISX_OS_WIN32}) + set(QT_CORE_SHARED_LIB_FILES + ${QT_DIR}/bin/Qt5Core.dll + ) + installFiles(${TARGET_NAME_PUBLIC_API} ${APP_LIB_DIR} "${QT_CORE_SHARED_LIB_FILES}") +endif() + +installFfmpegSharedLibs(${TARGET_NAME_PUBLIC_API} ${APP_LIB_DIR}) + +set_target_properties(${TARGET_NAME_PUBLIC_API} PROPERTIES + ARCHIVE_OUTPUT_DIRECTORY_DEBUG ${APP_LIB_DIR} + ARCHIVE_OUTPUT_DIRECTORY_RELEASE ${APP_LIB_DIR} + LIBRARY_OUTPUT_DIRECTORY_DEBUG ${APP_LIB_DIR} + LIBRARY_OUTPUT_DIRECTORY_RELEASE ${APP_LIB_DIR} + RUNTIME_OUTPUT_DIRECTORY_DEBUG ${APP_LIB_DIR} + RUNTIME_OUTPUT_DIRECTORY_RELEASE ${APP_LIB_DIR} +) +setCommonCxxOptionsForTarget(${TARGET_NAME_PUBLIC_API}) +setOsDefinesForTarget(${TARGET_NAME_PUBLIC_API}) +disableVisualStudioWarnings(${TARGET_NAME_PUBLIC_API}) + +# OS-specific properties for shared lib +# mac: change output lib name to *.so (default *.dylib) +# mac & linux: set rpath +if(${ISX_OS_MACOS}) + string(APPEND SO_NAME_PUBLIC_API ${TARGET_NAME_PUBLIC_API} ".so") + + set_target_properties(${TARGET_NAME_PUBLIC_API} PROPERTIES + PREFIX "lib" + OUTPUT_NAME ${SO_NAME_PUBLIC_API} + SUFFIX "" + #SOVERSION "1.0.0.0" + BUILD_WITH_INSTALL_RPATH TRUE + INSTALL_RPATH "@loader_path" + ) +elseif(${ISX_OS_LINUX}) + set_target_properties(${TARGET_NAME_PUBLIC_API} PROPERTIES + BUILD_WITH_INSTALL_RPATH TRUE + INSTALL_RPATH "$ORIGIN/" + # Interposition of symbols from static libraries in MATLAB was causing + # major problems, so we only export a few select symbols now. + # We may want to do the same for macOS, which also exposes these + # symbols (but hasn't caused a problem yet). + LINK_FLAGS "-Wl,--version-script=${PUBLIC_API_API_DIR}/export_map" + ) +endif() + +target_link_libraries(${TARGET_NAME_PUBLIC_API} PRIVATE + ${TARGET_NAME_CORE} + ${QT_CORE_LINK_LIBRARIES} +) + +# Deploy Python files +set(PYTHON_API_DEST_DIR ${API_DEST_DIR}/isx) +file(COPY ${PYTHON_API_SRC_DIR}/isx/ DESTINATION ${PYTHON_API_DEST_DIR}) +file(COPY ${PYTHON_API_SRC_DIR}/setup.py DESTINATION ${API_DEST_DIR}) +file(COPY ${PYTHON_API_SRC_DIR}/MANIFEST.in DESTINATION ${API_DEST_DIR}) +file(COPY ${PYTHON_API_SRC_DIR}/README.md DESTINATION ${API_DEST_DIR}) +file(COPY ${PYTHON_API_SRC_DIR}/LICENSE.txt DESTINATION ${API_DEST_DIR}) +file(COPY ${PYTHON_API_SRC_DIR}/isx/test DESTINATION ${API_DEST_DIR}/../) diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..3c5810a --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +recursive-include isx/lib * +include README.md +include LICENSE.txt diff --git a/Makefile b/Makefile index 195f814..cc3a50d 100644 --- a/Makefile +++ b/Makefile @@ -1,43 +1,113 @@ -repo=$(shell basename $(CURDIR)) +.PHONY: build test -.PHONY: test coverage-report jupyter +BUILD_DIR_ROOT=build +BUILD_DIR_MODULES=modules +BUILD_TYPE=Release +BUILD_DIR_CMAKE=cmake +BUILD_DIR_BIN=bin +BUILD_PATH=$(BUILD_DIR_ROOT)/$(BUILD_TYPE)/$(BUILD_DIR_CMAKE) +BUILD_PATH_BIN=$(BUILD_DIR_ROOT)/$(BUILD_TYPE)/$(BUILD_DIR_BIN) -jupyter: - @echo "Installing kernel $(repo) in jupyter" - -yes | jupyter kernelspec uninstall $(repo) - poetry run python -m ipykernel install --user --name $(repo) +API_TEST_RESULTS_PATH=$(PWD)/apiTestResults.xml +PYTHON_TEST_DIR=$(BUILD_DIR_ROOT)/$(BUILD_TYPE)/bin/isx +ifndef TEST_DATA_DIR + TEST_DATA_DIR=test_data +endif -install-poetry: - @bash install-poetry.sh +ifndef THIRD_PARTY_DIR + THIRD_PARTY_DIR=third_party +endif -install: install-poetry - @echo "Installing py_isx..." - poetry check --lock || poetry lock - poetry install --verbose +PYTHON_VERSION=$(shell python -c 'import sys; print(".".join(map(str, sys.version_info[:2])))') -install-test: install-poetry - @echo "Installing py_isx & dependencies for testing..." - poetry check --lock || poetry lock - poetry install --extras "test" --verbose +ifeq ($(OS), Windows_NT) + DETECTED_OS = windows +else + UNAME_S = $(shell uname -s) + ifeq ($(UNAME_S), Linux) + DETECTED_OS = linux + else ifeq ($(UNAME_S), Darwin) + DETECTED_OS = mac + + ifeq ($(PYTHON_VERSION), 3.9) + _MACOSX_DEPLOYMENT_TARGET=10.11 + else ifeq ($(PYTHON_VERSION), 3.10) + _MACOSX_DEPLOYMENT_TARGET=10.11 + else ifeq ($(PYTHON_VERSION), 3.11) + _MACOSX_DEPLOYMENT_TARGET=10.11 + else ifeq ($(PYTHON_VERSION), 3.12) + _MACOSX_DEPLOYMENT_TARGET=10.15 + endif + endif +endif -test: install-test - poetry run pytest -sx --failed-first +VERSION_MAJOR=2 +VERSION_MINOR=0 +VERSION_PATCH=0 +VERSION_BUILD=0 +IS_BETA=1 +WITH_CUDA=0 +ASYNC_API=1 +WITH_ALGOS=0 -test-pip: - @echo "Testing code installed on base env using pip..." - pytest -s +CMAKE_OPTIONS=\ + -DCMAKE_BUILD_TYPE=$(BUILD_TYPE)\ + -DISX_VERSION_MAJOR=${VERSION_MAJOR}\ + -DISX_VERSION_MINOR=${VERSION_MINOR}\ + -DISX_VERSION_PATCH=${VERSION_PATCH}\ + -DISX_VERSION_BUILD=${VERSION_BUILD}\ + -DISX_IS_BETA=${IS_BETA}\ + -DISX_WITH_CUDA=${WITH_CUDA}\ + -DISX_ASYNC_API=${ASYNC_API} \ + -DISX_WITH_ALGOS=${WITH_ALGOS} \ +ifeq ($(DETECTED_OS), windows) + CMAKE_GENERATOR = Visual Studio 14 2015 Win64 +else ifeq ($(DETECTED_OS), linux) + CMAKE_GENERATOR = Unix Makefiles + CMAKE_OPTIONS += -DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ +else ifeq ($(DETECTED_OS), mac) + CMAKE_GENERATOR = Xcode +endif -serve: install-poetry - @echo "Serving docs locally..." - poetry run mkdocs serve +check_os: + @echo "Verifying detected OS" +ifndef DETECTED_OS + @echo "Failed to detect supported OS"; exit 1 +else + @echo "Detected OS: ${DETECTED_OS}" +endif +ifeq ($(DETECTED_OS), mac) + @echo "Detected python version: ${PYTHON_VERSION}, using mac osx deployment target: ${MACOSX_DEPLOYMENT_TARGET}" +endif -setup.py: pyproject.toml README.md - poetry run poetry2setup > setup.py +clean: + @rm -rf build +ifeq ($(DETECTED_OS), mac) +build: export MACOSX_DEPLOYMENT_TARGET=${_MACOSX_DEPLOYMENT_TARGET} +endif +build: check_os + mkdir -p $(BUILD_PATH) && \ + cd $(BUILD_PATH) && \ + THIRD_PARTY_DIR=$(THIRD_PARTY_DIR) cmake $(CMAKE_OPTIONS) -G "$(CMAKE_GENERATOR)" ../../../ +ifeq ($(DETECTED_OS), windows) + cd $(BUILD_PATH) && \ + "/c/Program Files (x86)/MSBuild/14.0/Bin/MSBuild.exe" Project.sln //p:Configuration=$(BUILD_TYPE) //maxcpucount:8 +else ifeq ($(DETECTED_OS), linux) + cd $(BUILD_PATH) && \ + make -j2 +else ifeq ($(DETECTED_OS), mac) + cd $(BUILD_PATH) && \ + xcodebuild -alltargets -configuration $(BUILD_TYPE) -project Project.xcodeproj CODE_SIGN_IDENTITY="" +endif + cd $(BUILD_PATH_BIN) && \ + python -m build -deploy: install-poetry - @echo "Deploying documentation to GitHub pages..." - poetry run mkdocs build - poetry run mkdocs gh-deploy \ No newline at end of file +rebuild: clean build + +test: build + cd $(BUILD_PATH_BIN)/dist && pip install --force-reinstall isx-*.whl + cd build/Release && \ + ISX_TEST_DATA_PATH=$(TEST_DATA_DIR) python -m pytest --disable-warnings -v -s --junit-xml=$(API_TEST_RESULTS_PATH) test $(TEST_ARGS) diff --git a/README.md b/README.md index a54ad89..eb53dbb 100644 --- a/README.md +++ b/README.md @@ -1,54 +1,86 @@ -# isx: pure-python API to read Inscopix data +# pyisx -![](https://github.com/inscopix/py_isx/actions/workflows/main.yml/badge.svg) -![](https://img.shields.io/pypi/v/isx) +`pyisx` is a python package for interacting with Inscopix data. This package encapsulates the following I/O functionality: -This is a pure-python API to read Inscopix ISXD files. +* Reading Inscopix files (`.isxd`, `.isxc`, `.isxb`, `.gpio`, `.imu`) +* Writing Inscopix files (`.isxd`) +* Exporting Inscopix files to third-party formats (`.mp4`, `.tiff`, `.csv`, `.hdf5`) +## Install -## Documentation +Pre-built binaries of this API can be installed from [PyPi](https://pypi.org/project/isx/). -[Read the documentation](https://inscopix.github.io/py_isx/) +```bash +pip install isx +``` -## Support +## Supported Platforms -| File type | Support | -| --------- | ------- | -| ISXD CellSet | ✅ | -| ISXD Movie | ✅ | -| ISXD Movie (multi-plane) | ❌ | -| ISXD Movie (dual-color) | ❌ | -| GPIO data | ❌ | -| ISXD Events | ❌ | -| ISXD VesselSet | ❌ | +This library has been built and tested on the following operating systems: +| OS | Version | Architecture | +| --------- | ------- | ----- | +| macOS | 13 | x86_64 | +| Ubuntu (Linux) | 20.04 | x86_64 | +| Windows | 11 | amd64 | -## Install +Each system has been built and tested on python versions 3.9 - 3.12. -### Poetry +> **Note:** For Apple Silicon (arm64 architectures), the package is currently not natively supported. However, it's possible to use anaconda to configure an x86 environment and use the project. -```bash -poetry add isx +``` +CONDA_SUBDIR=osx-64 conda create -n pyisx python=3.12 +conda activate pyisx +conda config --env --set subdir osx-64 ``` -### pip +## Development Guide +This guide documents how to build the python package wheel locally. -```bash -pip install isx +1. Setup `isxcore` +Follow the setup instructions for the C++ [isxcore](https://github.com/inscopix/isxcore) repo. + +2. Setup python virtual environment + +Create a python virtual environment, specifying the desired python version. +This guide uses anaconda for demonstration, but other tools like virtualenv or poetry can also be used. + +``` +conda create -n pyisx python=3.12 +conda activate pyisx ``` -## Caution +> **Note**: On macOS systems with Apple Silicon, the conda environment is configured differently, since `isxcore` is currently only built for x86 architectures. -This is a work in progress, and all reading functions in the IDPS Python API are not supported yet. +``` +CONDA_SUBDIR=osx-64 conda create -n pyisx python=3.12 +conda activate pyisx +conda config --env --set subdir osx-64 +``` + +3. Install build & test dependencies + +Inside the virtual environment install the following dependencies: + +``` +conda install -y build pytest +``` + +> **Note**: For python 3.12 the `build` package must be installed used `pip` instead. +4. Build the package -## Testing +``` +make build THIRD_PARTY_DIR=/path/to/third/party/dir +``` + +5. Run the unit tests + +``` +make test THIRD_PARTY_DIR=/path/to/third/party/dir TEST_DATA_DIR=/path/to/test/data/dir +``` -This code is tested using GitHub Actions on the following python -versions: +# Support -- 3.9 -- 3.10 -- 3.11 -- 3.12 +For any questions about this package, please contact support@inscopix.bruker.com. diff --git a/install-poetry.sh b/install-poetry.sh deleted file mode 100644 index d49d909..0000000 --- a/install-poetry.sh +++ /dev/null @@ -1,7 +0,0 @@ -if ! command -v poetry &> /dev/null -then - echo "poetry could not be found, installing..." - curl -sSL https://install.python-poetry.org | python3 - - echo 'export PATH=$HOME/.local/bin:$PATH' >> ~/.bash_profile - exit -fi \ No newline at end of file diff --git a/isx/__init__.py b/isx/__init__.py index 8093711..fd802f3 100644 --- a/isx/__init__.py +++ b/isx/__init__.py @@ -1,430 +1,13 @@ -"""this module contains a pure-python implementation -of the I/O code found in the IDPS API to read ISXD files -of the Movie and CellSet variety. - -This module is not at feature parity with the IDPS C++ API -yet, and some features may not be supported. - +""" +The Inscopix Python API package. """ -import json -import os -import struct - -import importlib_metadata -import numpy as np -from beartype import beartype - -NOT_IMPLEMENTED_MESSAGE = """ -This functionality has not been implemented in the pure python -API yet. If you need this, please use the IDPS Python API""" - -__version__ = importlib_metadata.version("isx") - - -@beartype -def _check_cell_index(cell_id: int, num_cells: int) -> None: - """helper function to make sure that cell index is OK""" - if cell_id < 0: - raise IndexError("Cell ID must be >=0") - - if cell_id >= num_cells: - raise IndexError( - f"Cannot access cell {cell_id} because this cell set has {num_cells} cells" - ) - - -class Duration: - """ - !!! info "IDPS Equivalent" - This class is designed to be equivalent of the `isx.Duration`class in the IDPS Python API`. Not all - features of the IDPS Python API are mirrored here. - - Attributes: - secs_float: A period of time, expressed in seconds. - - """ - - secs_float: float = None - - def __init__(self, secs_float: float): - self.secs_float = secs_float - - -class Spacing: - """ - !!! info "IDPS Equivalent" - This class is designed to be equivalent of the `isx.Spacing`class in the IDPS Python API`. Not all - features of the IDPS Python API are mirrored here. - - Attributes: - num_pixels: A 2-tuple containing the dimensions of the frame. - - """ - - num_pixels: tuple[int, int] = None - - def __int__(self): - self.num_pixels = None - - -class Timing: - """ - !!! info "IDPS Equivalent" - This class is designed to be equivalent of the `isx.Timing`class in the IDPS Python API`. Not all - features of the IDPS Python API are mirrored here. - - Attributes: - period: An instance of isx.Duration with information about the period - num_samples: The number of time samples in this object. - - """ - - period: Duration = None - num_samples: int = None - - def __init__(self): - self.period = None - self.num_samples = None - - -class Movie: - """ - !!! info "IDPS Equivalent" - This class is designed to be equivalent of the `isx.Movie`class in the IDPS Python API - - The Movie class allows you to create objects - that represent ISXD movies. Every Movie object - is bound to a ISXD movie file that exists on disk. - - Attributes: - file_path: path to ISXD file - footer: A dictionary containing data in the JSON footer of ISXD Movies - timing: a isx.Timing object containing timing information for this movie - spacing: a isx.Spacing object containing spacing information for this movie - - - """ - - file_path: str = None - footer: dict = None - timing: Timing = Timing() - spacing: Spacing = Spacing() - - def __init__(self): - pass - - @property - def data_type(self): - if self.footer["dataType"] == 0: - return np.uint16 - elif self.footer["dataType"] == 1: - return np.float32 - elif self.footer["dataType"] == 2: - return np.uint8 - else: - raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE) - - @classmethod - def read(cls, file_path: str): - """ - Open an existing movie from a file for reading. - - This is a light weight operation that simply reads the meta-data from the movie, - and does not read any frame data. - - Parameters: - file_path: The path of the file to read. - - Returns: - A `isx.Movie` object. The movie that was read. Meta-data is immediately available. Frames must be read using `isx.Movie.get_frame`. - """ - self = cls() - self.file_path = file_path - - footer = _extract_footer(file_path) - - # y maps to rows, x maps to columns - self.spacing.num_pixels = ( - footer["spacingInfo"]["numPixels"]["y"], - footer["spacingInfo"]["numPixels"]["x"], - ) - - self.timing.num_samples = footer["timingInfo"]["numTimes"] - - self.timing.period = Duration( - footer["timingInfo"]["period"]["num"] - / footer["timingInfo"]["period"]["den"] - ) - - # save the footer too - self.footer = footer - - return self - - @classmethod - def write(cls, file_path, timing, spacing, data_type): - raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE) - - @beartype - def get_frame_data(self, index: int): - """ - Read the contents of a single frame in a movie - - Parameters: - index : The numeric index of the frame. - - Returns: - The retrieved frame data. - """ - - if self.footer["dataType"] == 0: - bytes_per_pixel = 2 - elif self.footer["dataType"] == 1: - bytes_per_pixel = 4 - elif self.footer["dataType"] == 2: - bytes_per_pixel = 1 - else: - raise NotImplementedError( - "Unknown number of bytes per pixel. Cannot decode this frame." - ) - - if self.footer["hasFrameHeaderFooter"]: - raise NotImplementedError( - """[UNIMPLEMENTED] Cannot extract frame from this - movie because frames have footers and headers.""" - ) - - n_frames = self.footer["timingInfo"]["numTimes"] - - if index >= n_frames: - raise IndexError( - f"""[INVALID FRAME NUMBER] This movie has - {n_frames}, so accessing frame number {index} - is impossible.""" - ) - - n_pixels = self.spacing.num_pixels[0] * self.spacing.num_pixels[1] - - n_bytes_per_frame = n_pixels * bytes_per_pixel - - with open(self.file_path, mode="rb") as file: - file.seek(index * n_bytes_per_frame) - data = file.read(bytes_per_pixel * n_pixels) - frame = np.frombuffer(data, dtype=self.data_type) - frame = np.reshape(frame, self.spacing.num_pixels) - - return frame - - def get_frame_timestamp(self, index): - raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE) - - def set_frame_data(self, index, frame): - raise NotImplementedError(NOT_IMPLEMENTED_MESSAGE) - - def flush(self): - """this method exists for drop-in compatibility - with the IDPS API, but doesn't do anything""" - pass - - def get_acquisition_info(self): - return None - - def __del__(self): - pass - - -class CellSet: - """ - - The CellSet class allows you to read ISXD CellSets. - - !!! info "How to use the CellSet class" - To see how to use this class to read data from - ISXD Cellsets, click [here](../how-to/read-cellset.md). - This reference page describes each member of this - class and what each function does.""" - - num_cells: int = 0 - file_path = None - footer = None - spacing: Spacing = Spacing() - timing: Timing = Timing() - - def __init__(self): - pass - - def get_cell_image_data(self, cell_id: int) -> np.array: - """This method reads the spatial footprint of a single - cell and returns that as a Numpy array. - - Parameters: - cell_id: index of cell of interest - - Returns: - A MxN Numpy array containing frame data where M and N are the pixel dimensions - """ - - _check_cell_index(cell_id, self.num_cells) - - n_frames = self.footer["timingInfo"]["numTimes"] - - # get frame dimensions - size_x = self.footer["spacingInfo"]["numPixels"]["x"] - size_y = self.footer["spacingInfo"]["numPixels"]["y"] - n_pixels = size_y * size_x - - n_bytes_per_cell = 4 * (n_pixels + n_frames) - - with open(self.file_path, mode="rb") as file: - file.seek(cell_id * n_bytes_per_cell) - data = file.read(4 * n_pixels) - - footprint = struct.unpack("f" * n_pixels, data) - footprint = np.array(footprint).reshape((size_y, size_x)) - - return footprint - - def get_cell_trace_data(self, cell_id: int) -> np.array: - """return trace for a single cell""" - - _check_cell_index(cell_id, self.num_cells) - - n_frames = self.footer["timingInfo"]["numTimes"] - - # get frame dimensions - size_x = self.footer["spacingInfo"]["numPixels"]["x"] - size_y = self.footer["spacingInfo"]["numPixels"]["y"] - n_pixels = size_y * size_x - - n_bytes_per_cell = 4 * (n_pixels + n_frames) - - with open(self.file_path, mode="rb") as file: - file.seek(cell_id * n_bytes_per_cell + (4 * n_pixels)) - - # read cell trace - data = file.read(4 * n_frames) - trace = struct.unpack("f" * n_frames, data) - trace = np.array(trace) - - return trace - - def get_cell_name(self, cell_id: int) -> str: - """return name of cell""" - - _check_cell_index(cell_id, self.num_cells) - - return self.footer["CellNames"][cell_id] - - def get_cell_status(self, cell_id: int) -> str: - """return status of a cell""" - - _check_cell_index(cell_id, self.num_cells) - - if self.footer["CellStatuses"][cell_id] == 0: - return "accepted" - elif self.footer["CellStatuses"][cell_id] == 1: - return "undecided" - else: - return "rejected" - - @classmethod - def read(cls, file_path: str): - """class method to open a CellSet file for reading""" - - self = cls() - self.file_path = file_path - - self.footer = _extract_footer(file_path) - - self.num_cells = len(self.footer["CellNames"]) - - self.timing.num_samples = self.footer["timingInfo"]["numTimes"] - - self.timing.period = Duration( - self.footer["timingInfo"]["period"]["num"] - / self.footer["timingInfo"]["period"]["den"] - ) - - # y maps to rows, x maps to columns - self.spacing.num_pixels = ( - self.footer["spacingInfo"]["numPixels"]["y"], - self.footer["spacingInfo"]["numPixels"]["x"], - ) - - return self - - -@beartype -def isxd_type(file_path: str) -> str: - """infer ISXD file type - - Parameters: - file_path: path to ISXD file - - """ - - metadata = _extract_footer(file_path) - - isx_datatype_mapping = { - 0: "miniscope_movie", - 1: "cell_set", - 2: "isxd_behavioral_movie", - 3: "gpio_data", - 4: "miniscope_image", - 5: "neural_events", - 6: "isxd_metrics", - 7: "imu_data", - 8: "vessel_set", - } - return isx_datatype_mapping[metadata["type"]] - - -@beartype -def _footer_length(isxd_file: str) -> int: - """find the length of the footer in bytes""" - - with open(isxd_file, mode="rb") as file: - file.seek(-8, os.SEEK_END) - data = file.read() - footer_length = struct.unpack("ii", data)[0] - - return footer_length - - -@beartype -def _extract_footer(isxd_file: str) -> dict: - """extract movie footer from ISXD file""" - - footer_length = _footer_length(isxd_file) - - with open(isxd_file, mode="rb") as file: - file.seek(-8 - footer_length - 1, os.SEEK_END) - data = file.read(footer_length) - - footer = data.decode("utf-8") - return json.loads(footer) - - -def _get_isxd_times(input_filename): - """Get the timestamps of every sample of an isxd file from its metadata. - - The timestamps are generated by getting the average sampling period - of the isxd file. +from .core import * +from .io import * +from .util import * - :param input_filename str: path to the input file (.isxd) - :return: The timestamps of every sample in the isxd file - """ +from .core import __version__ - metadata = _extract_footer(input_filename) - period = ( - metadata["timingInfo"]["period"]["num"] - / metadata["timingInfo"]["period"]["den"] - ) - num_times = metadata["timingInfo"]["numTimes"] - times = np.linspace( - 0, - (num_times - 1) * period, - num_times, - ) - return times +from ._internal import is_with_algos as _is_with_algos +if _is_with_algos: + from .algo import * diff --git a/isx/_internal.py b/isx/_internal.py new file mode 100644 index 0000000..a5a9811 --- /dev/null +++ b/isx/_internal.py @@ -0,0 +1,1320 @@ +""" +The behavior of this may change in the future, so we recommend that you +do not use it and we will not support it. +""" + +import os +import ctypes +import atexit +import json +import warnings + +import numpy as np + +# Load the C library. +# For Windows we must temporarily change directory to load the +# C library. We change back afterwards. +_this_dir = os.path.dirname(os.path.realpath(__file__)) +_lib_dir = os.path.join(_this_dir, 'lib') +_is_windows = os.name == 'nt' +if _is_windows: + _cwd = os.getcwd() + os.chdir(_lib_dir) + _isx_lib_name = os.path.join(_lib_dir, 'isxpublicapi.dll') +else: + _isx_lib_name = os.path.join(_lib_dir, 'libisxpublicapi.so') + +c_api = ctypes.CDLL(_isx_lib_name) + +if _is_windows: + os.chdir(_cwd) + +# Define utility functions for interaction with C library. + +is_with_algos = c_api.isx_get_is_with_algos() + + +def validate_ptr(ptr): + if not ptr: + raise RuntimeError('Underlying pointer is null. Try using the read or write function instead of the constructor.') + + +def list_to_ctypes_array(input_list, input_type): + """ Convert a list of a certain type for ctypes so it can be passed as a pointer. + """ + if not isinstance(input_list, list): + raise TypeError('Input must be contained in a list.') + + array = (input_type * len(input_list))() + for i, s in enumerate(input_list): + if input_type is ctypes.c_char_p: + array[i] = s.encode('utf-8') + else: + array[i] = s + + return array + + +def numpy_array_to_ctypes_array(numpy_array, element_type): + ctypes_array = (element_type * numpy_array.size)() + for i in range(numpy_array.size): + ctypes_array[i] = numpy_array[i] + return ctypes_array + + +def ctypes_ptr_to_list(ptr_to_element0, num_elements): + py_list = [] + for i in range(num_elements): + py_list.append(ptr_to_element0[i]) + return py_list + + +def convert_to_1d_numpy_array(input_array, dtype, name): + try: + array = np.array(input_array, dtype=dtype) + assert array.ndim == 1 + except Exception: + raise TypeError('{} must be an 1D array-like'.format(name)) + return array + + +def convert_to_nx2_numpy_array(input_array, dtype, name): + try: + array = np.array(input_array, dtype=dtype).reshape((-1, 2)) + assert (array.ndim == 2) and (array.shape[1] == 2) + except Exception: + raise TypeError('{} must be an Nx2 array-like'.format(name)) + return array + + +def ensure_list(inputs): + if not isinstance(inputs, list): + inputs = [inputs] + return inputs + + +def check_input_files(input_file_paths): + input_file_paths_arr = ensure_list(input_file_paths) + num_files = len(input_file_paths_arr) + in_arr = list_to_ctypes_array(input_file_paths_arr, ctypes.c_char_p) + return num_files, in_arr + + +def check_input_and_output_files(input_file_paths, output_file_paths, allow_single_output=False): + num_input_files, in_arr = check_input_files(input_file_paths) + output_file_paths_arr = ensure_list(output_file_paths) + if not allow_single_output: + if num_input_files != len(output_file_paths_arr): + raise ValueError('Number of input files must match the number of output files.') + out_arr = list_to_ctypes_array(output_file_paths_arr, ctypes.c_char_p) + return num_input_files, in_arr, out_arr + + +def _standard_errcheck(return_code, func, args=None): + """ The standard function to use for errcheck for CDLL functions. + """ + if return_code != 0: + error_message = c_api.isx_get_last_exception_string().decode() + raise Exception("Error calling C library function {}.\n{}".format(func.__name__, error_message)) + return args + + +def get_mode_from_read_only(read_only): + if read_only: + return 'r' + else: + return 'w' + + +def get_acquisition_info(ptr, get_info_func, get_info_size_func): + validate_ptr(ptr) + info_size = ctypes.c_size_t(0) + get_info_size_func(ptr, ctypes.byref(info_size)) + info_size = info_size.value + + info_str = ctypes.create_string_buffer(info_size) + get_info_func(ptr, info_str, info_size) + info_str = info_str.value.decode('utf-8') + return json.loads(info_str) + + +def ndarray_as_type(array, dtype): + """ Convert a numpy ndarray using the astype method but with a warning. + """ + if array.dtype != dtype: + warnings.warn('Converting from {} to {}.'.format(array.dtype, dtype)) + return array.astype(dtype) + return array + + +# Define maps to/from strings to enum values. + +def _reverse_dictionary(dictionary): + return {v: k for k, v in dictionary.items()} + + +def lookup_enum(enum_name, enum_dict, key): + try: + return enum_dict[key] + except KeyError: + raise ValueError("Unknown {} '{}'. Options are {}.".format(enum_name, key, ', '.join((str(k) for k in enum_dict.keys())))) + + +DATA_TYPE_FROM_NUMPY = { + np.uint16: c_api.isx_get_data_type_u16(), + np.float32: c_api.isx_get_data_type_f32(), + np.uint8: c_api.isx_get_data_type_u8(), +} +DATA_TYPE_TO_NUMPY = _reverse_dictionary(DATA_TYPE_FROM_NUMPY) + +CELL_STATUS_FROM_STRING = { + 'accepted': c_api.isx_get_cell_status_accepted(), + 'undecided': c_api.isx_get_cell_status_undecided(), + 'rejected': c_api.isx_get_cell_status_rejected(), +} +CELL_STATUS_TO_STRING = _reverse_dictionary(CELL_STATUS_FROM_STRING) + +VESSEL_STATUS_FROM_STRING = { + 'accepted': c_api.isx_get_vessel_status_accepted(), + 'undecided': c_api.isx_get_vessel_status_undecided(), + 'rejected': c_api.isx_get_vessel_status_rejected(), +} +VESSEL_STATUS_TO_STRING = _reverse_dictionary(VESSEL_STATUS_FROM_STRING) + +TIME_REF_FROM_STRING = { + 'start': c_api.isx_get_time_reference_start(), + 'unix': c_api.isx_get_time_reference_unix(), + 'tsc': c_api.isx_get_time_reference_tsc(), +} +TIME_REF_TO_STRING = _reverse_dictionary(TIME_REF_FROM_STRING) + +if is_with_algos: + ICA_UNMIX_FROM_STRING = { + 'temporal': c_api.isx_get_ica_unmix_type_temporal(), + 'spatial': c_api.isx_get_ica_unmix_type_spatial(), + 'both': c_api.isx_get_ica_unmix_type_both(), + } + ICA_UNMIX_TO_STRING = _reverse_dictionary(ICA_UNMIX_FROM_STRING) + + DFF_F0_FROM_STRING = { + 'mean': c_api.isx_get_dff_image_type_mean(), + 'min': c_api.isx_get_dff_image_type_min(), + } + DFF_F0_TO_STRING = _reverse_dictionary(DFF_F0_FROM_STRING) + + PROJECTION_FROM_STRING = { + 'mean': c_api.isx_get_projection_type_mean(), + 'min': c_api.isx_get_projection_type_min(), + 'max': c_api.isx_get_projection_type_max(), + 'standard_deviation': c_api.isx_get_projection_type_standard_deviation(), + } + PROJECTION_TO_STRING = _reverse_dictionary(PROJECTION_FROM_STRING) + + EVENT_REF_FROM_STRING = { + 'maximum': c_api.isx_get_event_time_reference_maximum(), + 'beginning': c_api.isx_get_event_time_reference_beginning(), + 'mid_rise': c_api.isx_get_event_time_reference_mid_rise(), + } + EVENT_REF_TO_STRING = _reverse_dictionary(EVENT_REF_FROM_STRING) + + +# Common types +CharPtrPtr = ctypes.POINTER(ctypes.c_char_p) +IntPtr = ctypes.POINTER(ctypes.c_int) +FloatPtr = ctypes.POINTER(ctypes.c_float) +DoublePtr = ctypes.POINTER(ctypes.c_double) +UInt16Ptr = ctypes.POINTER(ctypes.c_uint16) +Int64Ptr = ctypes.POINTER(ctypes.c_int64) +UInt64Ptr = ctypes.POINTER(ctypes.c_uint64) +SizeTPtr = ctypes.POINTER(ctypes.c_size_t) +UInt8Ptr = ctypes.POINTER(ctypes.c_uint8) + + +# Common structs + +class IsxRatio(ctypes.Structure): + _fields_ = [("num", ctypes.c_int64), + ("den", ctypes.c_int64)] + + def _as_float(self): + return self.num / self.den + + def __eq__(self, other): + # return (self.num == other.num) and (self.den == other.den) + return self._as_float() == other._as_float() + + def __repr__(self): + return 'IsxRatio({}, {})'.format(self.num, self.den) + + +class IsxTime(ctypes.Structure): + _fields_ = [("secs_since_epoch", IsxRatio), + ("utc_offset", ctypes.c_int32)] + + def __eq__(self, other): + return ((self.secs_since_epoch == other.secs_since_epoch) and + (self.utc_offset == other.utc_offset)) + + def __repr__(self): + return 'IsxTime({}, {})'.format(self.secs_since_epoch, self.utc_offset) + + +class IsxIndexRange(ctypes.Structure): + _fields_ = [("first", ctypes.c_size_t), + ("last", ctypes.c_size_t)] + + +class IsxTimingInfo(ctypes.Structure): + _fields_ = [("start", IsxTime), + ("step", IsxRatio), + ("num_samples", ctypes.c_size_t), + ("dropped", SizeTPtr), + ("num_dropped", ctypes.c_size_t), + ("cropped_first", SizeTPtr), + ("cropped_last", SizeTPtr), + ("num_cropped", ctypes.c_size_t), + ("blank", SizeTPtr), + ("num_blank", ctypes.c_size_t)] + + def __repr__(self): + return 'IsxTimingInfo({}, {}, {}, {}, {}, {}, {}, {}, {})'.format( + self.num_samples, self.step, self.start, self.num_dropped, self.dropped, self.num_cropped, self.cropped_first, self.cropped_last, self.blank, self.num_blank) + + +class IsxSpacingInfo(ctypes.Structure): + _fields_ = [("num_cols", ctypes.c_size_t), + ("num_rows", ctypes.c_size_t), + ("pixel_width", IsxRatio), + ("pixel_height", IsxRatio), + ("left", IsxRatio), + ("top", IsxRatio)] + + def __eq__(self, other): + return ((self.num_cols == other.num_cols) and + (self.num_rows == other.num_rows) and + (self.pixel_width == other.pixel_width) and + (self.pixel_height == other.pixel_height) and + (self.left == other.left) and + (self.top == other.top)) + + def __repr__(self): + return 'IsxSpacingInfo({}, {}, {}, {}, {}, {})'.format(self.num_cols, self.num_rows, self.pixel_width, self.pixel_height, self.left, self.top) + + @classmethod + def from_num_pixels(cls, num_pixels): + return cls( + num_pixels[1], num_pixels[0], + IsxRatio(3, 1), IsxRatio(3, 1), + IsxRatio(0, 1), IsxRatio(0, 1)) + + +# Movie struct and methods + +class IsxMovie(ctypes.Structure): + _fields_ = [("id", ctypes.c_size_t), + ("timing", IsxTimingInfo), + ("spacing", IsxSpacingInfo), + ("data_type", ctypes.c_int), + ("read_only", ctypes.c_bool), + ("file_path", ctypes.c_char_p)] +IsxMoviePtr = ctypes.POINTER(IsxMovie) + +c_api.isx_get_version_string_size.argtypes = [ + SizeTPtr] +c_api.isx_get_version_string_size.errcheck = _standard_errcheck + +c_api.isx_get_version_string.argtypes = [ + ctypes.c_char_p, + ctypes.c_size_t] +c_api.isx_get_version_string.errcheck = _standard_errcheck + +c_api.isx_read_movie.argtypes = [ + ctypes.c_char_p, + ctypes.POINTER(IsxMoviePtr)] +c_api.isx_read_movie.errcheck = _standard_errcheck + +c_api.isx_write_movie.argtypes = [ + ctypes.c_char_p, + IsxTimingInfo, + IsxSpacingInfo, + ctypes.c_int, + ctypes.c_bool, + ctypes.POINTER(IsxMoviePtr)] +c_api.isx_write_movie.errcheck = _standard_errcheck + +c_api.isx_movie_get_frame_data_u16.argtypes = [ + IsxMoviePtr, + ctypes.c_size_t, + UInt16Ptr] +c_api.isx_movie_get_frame_data_u16.errcheck = _standard_errcheck + +c_api.isx_movie_get_frame_data_f32.argtypes = [ + IsxMoviePtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_movie_get_frame_data_f32.errcheck = _standard_errcheck + +c_api.isx_movie_get_frame_data_u8.argtypes = [ + IsxMoviePtr, + ctypes.c_size_t, + UInt8Ptr] +c_api.isx_movie_get_frame_data_u8.errcheck = _standard_errcheck + +c_api.isx_movie_write_frame_u16.argtypes = [ + IsxMoviePtr, + ctypes.c_size_t, + UInt16Ptr] +c_api.isx_movie_write_frame_u16.errcheck = _standard_errcheck + +c_api.isx_movie_write_frame_f32.argtypes = [ + IsxMoviePtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_movie_write_frame_f32.errcheck = _standard_errcheck + +c_api.isx_movie_flush.argtypes = [ + IsxMoviePtr] +c_api.isx_movie_flush.errcheck = _standard_errcheck + +c_api.isx_movie_delete.argtypes = [ + IsxMoviePtr] +c_api.isx_movie_delete.errcheck = _standard_errcheck + +c_api.isx_movie_get_acquisition_info_size.argtypes = [ + IsxMoviePtr, + ctypes.POINTER(ctypes.c_size_t)] +c_api.isx_movie_get_acquisition_info_size.errcheck = _standard_errcheck + +c_api.isx_movie_get_acquisition_info.argtypes = [ + IsxMoviePtr, + ctypes.c_char_p, + ctypes.c_size_t] +c_api.isx_movie_get_acquisition_info.errcheck = _standard_errcheck + +c_api.isx_movie_get_frame_timestamp.argtypes = [ + IsxMoviePtr, + ctypes.c_size_t, + UInt64Ptr] +c_api.isx_movie_get_frame_timestamp.errcheck = _standard_errcheck + +# CellSet struct and methods + +class IsxCellSet(ctypes.Structure): + _fields_ = [("id", ctypes.c_size_t), + ("timing", IsxTimingInfo), + ("spacing", IsxSpacingInfo), + ("num_cells", ctypes.c_size_t), + ("roi_set", ctypes.c_bool), + ("read_only", ctypes.c_bool), + ("file_path", ctypes.c_char_p)] +IsxCellSetPtr = ctypes.POINTER(IsxCellSet) + +c_api.isx_read_cell_set.argtypes = [ + ctypes.c_char_p, + ctypes.c_bool, + ctypes.POINTER(IsxCellSetPtr)] +c_api.isx_read_cell_set.errcheck = _standard_errcheck + +c_api.isx_write_cell_set.argtypes = [ + ctypes.c_char_p, + IsxTimingInfo, + IsxSpacingInfo, + ctypes.c_bool, + ctypes.POINTER(IsxCellSetPtr)] +c_api.isx_write_cell_set.errcheck = _standard_errcheck + +c_api.isx_cell_set_get_name.argtypes = [ + IsxCellSetPtr, + ctypes.c_size_t, + ctypes.c_size_t, + ctypes.c_char_p] +c_api.isx_cell_set_get_name.errcheck = _standard_errcheck + +c_api.isx_cell_set_get_status.argtypes = [ + IsxCellSetPtr, + ctypes.c_size_t, + ctypes.POINTER(ctypes.c_int)] +c_api.isx_cell_set_get_status.errcheck = _standard_errcheck + +c_api.isx_cell_set_set_status.argtypes = [ + IsxCellSetPtr, + ctypes.c_size_t, + ctypes.c_int] +c_api.isx_cell_set_set_status.errcheck = _standard_errcheck + +c_api.isx_cell_set_get_trace.argtypes = [ + IsxCellSetPtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_cell_set_get_trace.errcheck = _standard_errcheck + +c_api.isx_cell_set_get_image.argtypes = [ + IsxCellSetPtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_cell_set_get_image.errcheck = _standard_errcheck + +c_api.isx_cell_set_write_image_trace.argtypes = [ + IsxCellSetPtr, + ctypes.c_size_t, + FloatPtr, + FloatPtr, + ctypes.c_char_p] +c_api.isx_cell_set_write_image_trace.errcheck = _standard_errcheck + +c_api.isx_cell_set_flush.argtypes = [ + IsxCellSetPtr] +c_api.isx_cell_set_flush.errcheck = _standard_errcheck + +c_api.isx_cell_set_delete.argtypes = [ + IsxCellSetPtr] +c_api.isx_cell_set_delete.errcheck = _standard_errcheck + +c_api.isx_cell_set_get_acquisition_info_size.argtypes = [ + IsxCellSetPtr, + ctypes.POINTER(ctypes.c_size_t)] +c_api.isx_cell_set_get_acquisition_info_size.errcheck = _standard_errcheck + +c_api.isx_cell_set_get_acquisition_info.argtypes = [ + IsxCellSetPtr, + ctypes.c_char_p, + ctypes.c_size_t] +c_api.isx_cell_set_get_acquisition_info.errcheck = _standard_errcheck + + +# Events struct and methods. + +class IsxEvents(ctypes.Structure): + _fields_ = [("id", ctypes.c_size_t), + ("timing", IsxTimingInfo), + ("num_cells", ctypes.c_size_t), + ("read_only", ctypes.c_bool), + ("file_path", ctypes.c_char_p)] +IsxEventsPtr = ctypes.POINTER(IsxEvents) + +c_api.isx_read_events.argtypes = [ + ctypes.c_char_p, + ctypes.POINTER(IsxEventsPtr)] +c_api.isx_read_events.errcheck = _standard_errcheck + +c_api.isx_write_events.argtypes = [ + ctypes.c_char_p, + IsxTimingInfo, + CharPtrPtr, + ctypes.c_size_t, + ctypes.POINTER(IsxEventsPtr)] +c_api.isx_write_events.errcheck = _standard_errcheck + +c_api.isx_events_write_cell.argtypes = [ + IsxEventsPtr, + ctypes.c_size_t, + ctypes.c_size_t, + UInt64Ptr, + FloatPtr] +c_api.isx_events_write_cell.errcheck = _standard_errcheck + +# gets number of events in event file (for efficient memory allocation) +c_api.isx_events_get_cell_count.argtypes = [ + IsxEventsPtr, + ctypes.c_char_p, + SizeTPtr] +c_api.isx_events_get_cell_count.errcheck = _standard_errcheck + +c_api.isx_events_get_cell_name.argtypes = [ + IsxEventsPtr, + ctypes.c_size_t, + ctypes.c_size_t, + ctypes.c_char_p] +c_api.isx_events_get_cell_name.errcheck = _standard_errcheck + +c_api.isx_events_get_cell.argtypes = [ + IsxEventsPtr, + ctypes.c_char_p, + UInt64Ptr, + FloatPtr] +c_api.isx_events_get_cell.errcheck = _standard_errcheck + +c_api.isx_events_flush.argtypes = [ + IsxEventsPtr] +c_api.isx_events_flush.errcheck = _standard_errcheck + +c_api.isx_events_delete.argtypes = [ + IsxEventsPtr] +c_api.isx_events_delete.errcheck = _standard_errcheck + +c_api.isx_events_get_acquisition_info_size.argtypes = [ + IsxEventsPtr, + ctypes.POINTER(ctypes.c_size_t)] +c_api.isx_events_get_acquisition_info_size.errcheck = _standard_errcheck + +c_api.isx_events_get_acquisition_info.argtypes = [ + IsxEventsPtr, + ctypes.c_char_p, + ctypes.c_size_t] +c_api.isx_events_get_acquisition_info.errcheck = _standard_errcheck + + +# GPIO struct and methods. + +class IsxGpio(ctypes.Structure): + _fields_ = [("id", ctypes.c_size_t), + ("timing", IsxTimingInfo), + ("num_channels", ctypes.c_size_t), + ("read_only", ctypes.c_bool), + ("file_path", ctypes.c_char_p)] +IsxGpioPtr = ctypes.POINTER(IsxGpio) + +c_api.isx_read_gpio.argtypes = [ + ctypes.c_char_p, + ctypes.POINTER(IsxGpioPtr)] +c_api.isx_read_gpio.errcheck = _standard_errcheck + +# gets number of channels in gpio file (for efficient memory allocation) +c_api.isx_gpio_get_channel_count.argtypes = [ + IsxGpioPtr, + ctypes.c_char_p, + SizeTPtr] +c_api.isx_gpio_get_channel_count.errcheck = _standard_errcheck + +c_api.isx_gpio_get_channel_name.argtypes = [ + IsxGpioPtr, + ctypes.c_size_t, + ctypes.c_size_t, + ctypes.c_char_p] +c_api.isx_gpio_get_channel_name.errcheck = _standard_errcheck + +c_api.isx_gpio_get_channel.argtypes = [ + IsxGpioPtr, + ctypes.c_char_p, + UInt64Ptr, + FloatPtr] +c_api.isx_gpio_get_channel.errcheck = _standard_errcheck + +c_api.isx_gpio_delete.argtypes = [ + IsxGpioPtr] +c_api.isx_gpio_delete.errcheck = _standard_errcheck + +c_api.isx_gpio_get_acquisition_info_size.argtypes = [ + IsxGpioPtr, + ctypes.POINTER(ctypes.c_size_t)] +c_api.isx_gpio_get_acquisition_info_size.errcheck = _standard_errcheck + +c_api.isx_gpio_get_acquisition_info.argtypes = [ + IsxGpioPtr, + ctypes.c_char_p, + ctypes.c_size_t] +c_api.isx_gpio_get_acquisition_info.errcheck = _standard_errcheck + +# VesselSet struct and methods + +class IsxVesselSet(ctypes.Structure): + _fields_ = [("id", ctypes.c_size_t), + ("timing", IsxTimingInfo), + ("spacing", IsxSpacingInfo), + ("num_vessels", ctypes.c_size_t), + ("read_only", ctypes.c_bool), + ("file_path", ctypes.c_char_p)] +IsxVesselSetPtr = ctypes.POINTER(IsxVesselSet) + +c_api.isx_read_vessel_set.argtypes = [ + ctypes.c_char_p, + ctypes.c_bool, + ctypes.POINTER(IsxVesselSetPtr)] +c_api.isx_read_vessel_set.errcheck = _standard_errcheck + +c_api.isx_write_vessel_set.argtypes = [ + ctypes.c_char_p, + IsxTimingInfo, + IsxSpacingInfo, + ctypes.c_int, + ctypes.POINTER(IsxVesselSetPtr)] +c_api.isx_write_vessel_set.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_name.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + ctypes.c_size_t, + ctypes.c_char_p] +c_api.isx_vessel_set_get_name.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_status.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + ctypes.POINTER(ctypes.c_int)] +c_api.isx_vessel_set_get_status.errcheck = _standard_errcheck + +c_api.isx_vessel_set_set_status.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + ctypes.c_int] +c_api.isx_vessel_set_set_status.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_trace.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_vessel_set_get_trace.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_image.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_vessel_set_get_image.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_line_endpoints.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + Int64Ptr] +c_api.isx_vessel_set_get_line_endpoints.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_type.argtypes = [ + IsxVesselSetPtr, + ctypes.POINTER(ctypes.c_int)] +c_api.isx_vessel_set_get_type.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_center_trace.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_vessel_set_get_center_trace.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_direction_trace.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + FloatPtr] +c_api.isx_vessel_set_get_direction_trace.errcheck = _standard_errcheck + +c_api.isx_vessel_set_is_correlation_saved.argtypes = [ + IsxVesselSetPtr, + ctypes.POINTER(ctypes.c_int)] +c_api.isx_vessel_set_is_correlation_saved.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_correlation_size.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + SizeTPtr] +c_api.isx_vessel_set_get_correlation_size.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_correlations.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + ctypes.c_size_t, + FloatPtr +] +c_api.isx_vessel_set_get_correlations.errcheck = _standard_errcheck + +c_api.isx_vessel_set_write_vessel_diameter_data.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + FloatPtr, + Int64Ptr, + FloatPtr, + FloatPtr, + ctypes.c_char_p] +c_api.isx_vessel_set_write_vessel_diameter_data.errcheck = _standard_errcheck + +c_api.isx_vessel_set_write_rbc_velocity_data.argtypes = [ + IsxVesselSetPtr, + ctypes.c_size_t, + FloatPtr, + Int64Ptr, + FloatPtr, + FloatPtr, + ctypes.c_size_t, + ctypes.c_size_t, + FloatPtr, + ctypes.c_char_p] +c_api.isx_vessel_set_write_rbc_velocity_data.errcheck = _standard_errcheck + +c_api.isx_vessel_set_flush.argtypes = [ + IsxVesselSetPtr] +c_api.isx_vessel_set_flush.errcheck = _standard_errcheck + +c_api.isx_vessel_set_delete.argtypes = [ + IsxVesselSetPtr] +c_api.isx_vessel_set_delete.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_acquisition_info_size.argtypes = [ + IsxVesselSetPtr, + ctypes.POINTER(ctypes.c_size_t)] +c_api.isx_vessel_set_get_acquisition_info_size.errcheck = _standard_errcheck + +c_api.isx_vessel_set_get_acquisition_info.argtypes = [ + IsxVesselSetPtr, + ctypes.c_char_p, + ctypes.c_size_t] +c_api.isx_vessel_set_get_acquisition_info.errcheck = _standard_errcheck + +# Core enums which do not return an error code, but directly return their values. + +c_api.isx_get_data_type_u16.argtypes = [] +c_api.isx_get_data_type_f32.argtypes = [] + +c_api.isx_get_cell_status_accepted.argtypes = [] +c_api.isx_get_cell_status_undecided.argtypes = [] +c_api.isx_get_cell_status_rejected.argtypes = [] + +# Gets the message associated with the last error. +# This returns that message directly as a char *, because it is assumed it can +# not error. +c_api.isx_get_last_exception_string.argtypes = [] +c_api.isx_get_last_exception_string.restype = ctypes.c_char_p + +# Other core functions + +c_api.isx_initialize.argtypes = [] +c_api.isx_initialize.errcheck = _standard_errcheck + +c_api.isx_shutdown.argtypes = [] +c_api.isx_shutdown.errcheck = _standard_errcheck + +c_api.isx_export_movie_nwb.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p] +c_api.isx_export_movie_nwb.errcheck = _standard_errcheck + +c_api.isx_export_movie_tiff.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_bool] +c_api.isx_export_movie_tiff.errcheck = _standard_errcheck + +c_api.isx_export_movie_mp4.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_double, + ctypes.c_bool, + ctypes.c_int, + ctypes.c_bool, + ctypes.c_bool, + ctypes.c_bool] +c_api.isx_export_movie_mp4.errcheck = _standard_errcheck + +c_api.isx_export_nvision_movie_tracking_frame_data_to_csv.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_int] +c_api.isx_export_nvision_movie_tracking_frame_data_to_csv.errcheck = _standard_errcheck + +c_api.isx_export_nvision_movie_tracking_zone_data_to_csv.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p] +c_api.isx_export_nvision_movie_tracking_zone_data_to_csv.errcheck = _standard_errcheck + +c_api.isx_export_movie_timestamps_to_csv.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_int] +c_api.isx_export_movie_timestamps_to_csv.errcheck = _standard_errcheck + +if is_with_algos: + c_api.isx_export_cell_set.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_int, + ctypes.c_bool, + ctypes.c_char_p] + c_api.isx_export_cell_set.errcheck = _standard_errcheck + + c_api.isx_export_vessel_set.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_int] + c_api.isx_export_vessel_set.errcheck = _standard_errcheck + +c_api.isx_export_event_set.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_int, + ctypes.c_char_p, + ctypes.c_bool, + ctypes.c_bool + ] +c_api.isx_export_event_set.errcheck = _standard_errcheck + +c_api.isx_export_gpio_set.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_int] +c_api.isx_export_gpio_set.errcheck = _standard_errcheck + +c_api.isx_export_gpio_isxd.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p] +c_api.isx_export_gpio_isxd.errcheck = _standard_errcheck + +c_api.isx_align_start_times.argtypes = [ + ctypes.c_char_p, + ctypes.c_size_t, + CharPtrPtr +] +c_api.isx_align_start_times.errcheck = _standard_errcheck + +c_api.isx_export_aligned_timestamps.argtypes = [ + ctypes.c_char_p, + ctypes.c_size_t, + CharPtrPtr, + ctypes.c_char_p, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_int +] +c_api.isx_export_aligned_timestamps.errcheck = _standard_errcheck + + +# Algo enums which do not return an error code, but their value directly. + +if is_with_algos: + c_api.isx_get_ica_unmix_type_temporal.argtypes = [] + c_api.isx_get_ica_unmix_type_spatial.argtypes = [] + c_api.isx_get_ica_unmix_type_both.argtypes = [] + + c_api.isx_get_dff_image_type_mean.argtypes = [] + c_api.isx_get_dff_image_type_min.argtypes = [] + + c_api.isx_get_projection_type_mean.argtypes = [] + c_api.isx_get_projection_type_min.argtypes = [] + c_api.isx_get_projection_type_max.argtypes = [] + c_api.isx_get_projection_type_standard_deviation.argtypes = [] + + c_api.isx_get_event_time_reference_maximum.argtypes = [] + c_api.isx_get_event_time_reference_beginning.argtypes = [] + c_api.isx_get_event_time_reference_mid_rise.argtypes = [] + +# Version numbers + +c_api.isx_get_core_version_major.argtypes = [] +c_api.isx_get_core_version_minor.argtypes = [] +c_api.isx_get_core_version_patch.argtypes = [] +c_api.isx_get_core_version_build.argtypes = [] + + +def get_core_version(): + return [c_api.isx_get_core_version_major(), + c_api.isx_get_core_version_minor(), + c_api.isx_get_core_version_patch(), + c_api.isx_get_core_version_build()] + + +# Algo functions + +if is_with_algos: + c_api.isx_preprocess_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int, + ctypes.c_bool, + ctypes.c_bool] + c_api.isx_preprocess_movie.errcheck = _standard_errcheck + + c_api.isx_deinterleave_movie.argtypes = [ + ctypes.c_int, + ctypes.c_int, + UInt16Ptr, + CharPtrPtr, + CharPtrPtr] + c_api.isx_deinterleave_movie.errcheck = _standard_errcheck + + c_api.isx_motion_correct_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_int, + ctypes.c_int, + ctypes.c_double, + ctypes.c_int, + ctypes.c_double, + IntPtr, + ctypes.c_int, + ctypes.c_int, + ctypes.c_char_p, + ctypes.c_float, + ctypes.c_int, + CharPtrPtr, + ctypes.c_int, + ctypes.c_char_p, + ctypes.c_bool] + c_api.isx_motion_correct_movie.errcheck = _standard_errcheck + + c_api.isx_pca_ica_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int, + ctypes.c_double, + ctypes.c_int, + ctypes.c_double, + ctypes.c_int, + IntPtr, + ctypes.c_int, + ctypes.c_bool, + ctypes.c_int] + c_api.isx_pca_ica_movie.errcheck = _standard_errcheck + + c_api.isx_cnmfe_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_int, + ctypes.c_float, + ctypes.c_float, + ctypes.c_size_t, + ctypes.c_float, + ctypes.c_int, + ctypes.c_int, + ctypes.c_float, + ctypes.c_int, + ctypes.c_size_t, + ctypes.c_size_t, + ctypes.c_size_t, + ctypes.c_int] + c_api.isx_cnmfe_movie.errcheck = _standard_errcheck + + c_api.isx_spatial_band_pass_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_int, + ctypes.c_double, + ctypes.c_int, + ctypes.c_double, + ctypes.c_int, + ctypes.c_int] + c_api.isx_spatial_band_pass_movie.errcheck = _standard_errcheck + + c_api.isx_delta_f_over_f.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_int] + c_api.isx_delta_f_over_f.errcheck = _standard_errcheck + + c_api.isx_project_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_int] + c_api.isx_project_movie.errcheck = _standard_errcheck + + c_api.isx_event_detection.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_double, + ctypes.c_double, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int] + c_api.isx_event_detection.errcheck = _standard_errcheck + + c_api.isx_temporal_crop_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + IntPtr, + ctypes.c_bool] + c_api.isx_temporal_crop_movie.errcheck = _standard_errcheck + + c_api.isx_compute_cell_metrics.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_bool] + c_api.isx_compute_cell_metrics.errcheck = _standard_errcheck + + c_api.isx_apply_cell_set.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_double] + c_api.isx_apply_cell_set.errcheck = _standard_errcheck + + c_api.isx_export_cell_contours.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_double, + ctypes.c_int] + c_api.isx_export_cell_contours.errcheck = _standard_errcheck + + c_api.isx_longitudinal_registration.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + CharPtrPtr, + CharPtrPtr, + ctypes.c_char_p, + ctypes.c_double, + ctypes.c_int, + ctypes.c_int] + c_api.isx_longitudinal_registration.errcheck = _standard_errcheck + + c_api.isx_ncc_register_cellsets.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_int, + ctypes.c_double, + ctypes.c_double] + c_api.isx_ncc_register_cellsets.errcheck = _standard_errcheck + + c_api.isx_multiplane_registration.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_float, + ctypes.c_float, + ctypes.c_bool] + c_api.isx_multiplane_registration.errcheck = _standard_errcheck + + c_api.isx_estimate_num_ics.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_float, + ctypes.c_float, + ctypes.c_float, + SizeTPtr] + c_api.isx_estimate_num_ics.errcheck = _standard_errcheck + + c_api.isx_classify_cell_status.argtypes = [ + ctypes.c_int, + CharPtrPtr, + ctypes.c_int, + CharPtrPtr, + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + DoublePtr, + ctypes.c_int, + SizeTPtr] + c_api.isx_classify_cell_status.errcheck = _standard_errcheck + + c_api.isx_movie_verify_deinterleave.argtypes = [ + ctypes.c_char_p, + ctypes.c_uint16, + IntPtr] + c_api.isx_movie_verify_deinterleave.errcheck = _standard_errcheck + + c_api.isx_deinterleave_dualcolor_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + CharPtrPtr, + ctypes.c_bool] + c_api.isx_deinterleave_dualcolor_movie.errcheck = _standard_errcheck + + c_api.isx_crop_cell_set.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int, + ctypes.c_int] + c_api.isx_crop_cell_set.errcheck = _standard_errcheck + + c_api.isx_binarize_cell_set.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_float, + ctypes.c_bool] + c_api.isx_binarize_cell_set.errcheck = _standard_errcheck + + c_api.isx_transform_cell_set.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_float] + c_api.isx_transform_cell_set.errcheck = _standard_errcheck + + c_api.isx_compute_spatial_overlap_cell_set.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_bool] + c_api.isx_compute_spatial_overlap_cell_set.errcheck = _standard_errcheck + + c_api.isx_register_cellsets.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_bool, + ctypes.c_float, + ctypes.c_float, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_uint32, + ctypes.c_uint32] + c_api.isx_register_cellsets.errcheck = _standard_errcheck + + c_api.isx_multicolor_registration.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_float, + ctypes.c_float, + ctypes.c_bool, + ctypes.c_bool, + ctypes.c_bool, + ctypes.c_bool, + ctypes.c_char_p] + c_api.isx_multicolor_registration.errcheck = _standard_errcheck + + c_api.isx_cellset_deconvolve.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + CharPtrPtr, + ctypes.c_bool, + ctypes.c_float, + ctypes.c_float, + ctypes.c_float, + ctypes.c_int, + ctypes.c_bool, + ctypes.c_uint32, + ctypes.c_float, + ctypes.c_int] + c_api.isx_cellset_deconvolve.errcheck = _standard_errcheck + + c_api.isx_estimate_vessel_diameter.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_int64, + Int64Ptr, + ctypes.c_double, + ctypes.c_double, + ctypes.c_int, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_bool, + ctypes.c_double, + ctypes.c_size_t] + c_api.isx_estimate_vessel_diameter.errcheck = _standard_errcheck + + c_api.isx_estimate_rbc_velocity.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_int64, + Int64Ptr, + ctypes.c_double, + ctypes.c_double, + ctypes.c_int, + ctypes.c_bool + ] + c_api.isx_estimate_rbc_velocity.errcheck = _standard_errcheck + + c_api.isx_create_neural_activity_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_bool + ] + c_api.isx_create_neural_activity_movie.errcheck = _standard_errcheck + + c_api.isx_interpolate_movie.argtypes = [ + ctypes.c_int, + CharPtrPtr, + CharPtrPtr, + ctypes.c_bool, + ctypes.c_bool, + ctypes.c_size_t + ] + c_api.isx_interpolate_movie.errcheck = _standard_errcheck + + c_api.isx_get_vessel_line_num_pixels.argtypes = [ + Int64Ptr, + ctypes.c_size_t, + ctypes.c_size_t, + IntPtr + ] + c_api.isx_get_vessel_line_num_pixels.errcheck = _standard_errcheck + + c_api.isx_get_vessel_line_coordinates.argtypes = [ + Int64Ptr, + ctypes.c_size_t, + ctypes.c_size_t, + IntPtr, + IntPtr + ] + c_api.isx_get_vessel_line_coordinates.errcheck = _standard_errcheck + + c_api.isx_estimate_vessel_diameter_single_vessel.argtypes = [ + ctypes.c_char_p, + Int64Ptr, + ctypes.c_size_t, + ctypes.c_size_t, + DoublePtr, + DoublePtr, + DoublePtr, + DoublePtr, + DoublePtr, + DoublePtr + ] + c_api.isx_estimate_vessel_diameter_single_vessel.errcheck = _standard_errcheck + + c_api.isx_decompress.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p] + c_api.isx_decompress.errcheck = _standard_errcheck + + +def initialize(): + c_api.isx_initialize() + + +def shutdown(): + c_api.isx_shutdown() + + +def get_version_string(): + version_string_size = ctypes.c_size_t(0) + c_api.isx_get_version_string_size(ctypes.byref(version_string_size)) + version_string_size = version_string_size.value + version_string = ctypes.create_string_buffer(version_string_size) + c_api.isx_get_version_string(version_string, version_string_size) + version_string = version_string.value.decode('utf-8') + return version_string + +# Initialize the API so the client does not have to, then register shutdown on exit. +initialize() +atexit.register(shutdown) diff --git a/isx/algo.py b/isx/algo.py new file mode 100644 index 0000000..fb8c68a --- /dev/null +++ b/isx/algo.py @@ -0,0 +1,1530 @@ +""" +The algo module deals with running algorithms on movies, +cell sets, and event sets. +""" + +import os +import ctypes + +import numpy as np + +import isx._internal + + +def preprocess( + input_movie_files, output_movie_files, + temporal_downsample_factor=1, spatial_downsample_factor=1, + crop_rect=None, crop_rect_format="tlbr", + fix_defective_pixels=True, trim_early_frames=True): + """ + Preprocess movies, optionally spatially and temporally downsampling and cropping. + + For more details see :ref:`preprocessing`. + + Arguments + --------- + input_movie_files : list + The file paths to the input movies. + output_movie_files : list + The file paths to write the preprocessed output movies to. + This must be the same length as input_movie_files. + temporal_downsample_factor : int >= 1 + The factor that determines how much the movie is temporally downsampled. + spatial_downsample_factor : int >= 1 + The factor that determines how much the movie is spatially downsampled. + crop_rect : 4-tuple + A list of 4 values representing the coordinates of the area to crop. + Can be represented as one of two formats, specified by `crop_rect_format`. + If `crop_rect_format == "tlbr"`, then the coordinates represent + the top-left and bottom-right corners of the area to crop: + [top_left_y, top_left_x, bottom_right_y, bottom_right_x]. All coordinates are specfied relative to the + top-left corner of the field of view. For example, to trim 10 pixels all around a field of view of size 100x50 + pixels, the cropping vertices would be specfied as [10, 10, 39, 89]. + If `crop_rect_format == "tlwh", then the coordinates represent + the top-left corner, width, and height of the area to crop" + [top_left_x, top_left_y, width, height]. The top-left corner is specfied relative to the + top-left corner of the field of view. For example, to trim 10 pixels all around a field of view of size 100x50 + pixels, the cropping vertices would be specfied as [10, 10, 90, 40]. + crop_rect_format : {'tlbr', 'tlwh'} + The format of `crop_rect`. + The format 'tlbr' stands for: top-left, bottom-right -- the two corners of the area to crop. + The format 'tlwh' stands for: top-left, width, height -- the top-left corner and the size of the area to crop. + fix_defective_pixels : bool + If True, then check for defective pixels and correct them. + trim_early_frames : bool + If True, then remove early frames that are usually dark or dim. + """ + crop_rect_formats = ("tlbr", "tlwh") + if crop_rect_format not in crop_rect_formats: + raise ValueError(f"Invalid crop rect format ({crop_rect_format}), must be one of the following: {crop_rect_formats}") + + if crop_rect is None: + crop_rect = (-1, -1, -1, -1) + elif crop_rect_format == "tlwh": + # format crop rect as top-left, bottom-right + top_left_x, top_left_y, width, height = crop_rect + bottom_right_x, bottom_right_y = (top_left_x + width - 1), (top_left_y + height - 1) + crop_rect = (top_left_y, top_left_x, bottom_right_y, bottom_right_x) + + num_files, in_arr, out_arr = isx._internal.check_input_and_output_files(input_movie_files, output_movie_files) + isx._internal.c_api.isx_preprocess_movie( + num_files, in_arr, out_arr, temporal_downsample_factor, spatial_downsample_factor, + crop_rect[0], crop_rect[1], crop_rect[2], crop_rect[3], fix_defective_pixels, trim_early_frames) + + +def de_interleave(input_movie_files, output_movie_files, in_efocus_values): + """ + De-interleave multiplane movies. + + Arguments + --------- + input_movie_files : list + The file paths to the input movies. + All files should have the same efocus values and same number of planes. + output_movie_files : list + The file paths to write the de-interleaved output movies to. + This must be the length of input_movie_files * the number of planes. + The sequence of every number of planes elements must match the sequence of efocus values. + E.g: [in_1, in_2], [efocus1, efocus2] -> [out_1_efocus1, out_1_efocus2, out_2_efocus1, out_2_efocus2] + in_efocus_values : list + The efocus value for each planes. + This must in range 0 <= efocus <= 1000. + """ + efocus_arr = isx._internal.list_to_ctypes_array(in_efocus_values, ctypes.c_uint16) + num_planes = len(in_efocus_values) + num_in_files, in_arr = isx._internal.check_input_files(input_movie_files) + num_output_files, out_arr = isx._internal.check_input_files(output_movie_files) + + if num_output_files != num_in_files * num_planes: + raise ValueError('Number of output files must match the number of input files times the number of planes.') + + isx._internal.c_api.isx_deinterleave_movie(num_in_files, num_planes, efocus_arr, in_arr, out_arr) + + +def motion_correct( + input_movie_files, output_movie_files, max_translation=20, + low_bandpass_cutoff=0.004, high_bandpass_cutoff=0.016, roi=None, + reference_segment_index=0, reference_frame_index=0, reference_file_name='', + global_registration_weight=1.0, output_translation_files=None, + output_crop_rect_file=None, preserve_input_dimensions=False): + """ + Motion correct movies to a reference frame. + + For more details see :ref:`motionCorrection`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to motion correct. + output_movie_files : list + The file paths of the output movies. + This must be the same length as input_movie_files. + max_translation : int > 0 + The maximum translation allowed by motion correction in pixels. + low_bandpass_cutoff : float > 0 + If not None, then the low cutoff of the spatial filter applied to each frame prior to motion estimation. + high_bandpass_cutoff : float > 0 + If not None, then the high cutoff for a spatial filter applied to each frame prior to motion estimation. + roi : Nx2 array-like + If not None, each row is a vertex of the ROI to use for motion estimation. + Otherwise, use the entire frame. + reference_segment_index : int > 0 + If a reference frame is to be specified, this parameter indicates the index of the movie whose frame will + be utilized, with respect to input_movie_files. + If only one movie is specified to be motion corrected, this parameter must be 0. + reference_frame_index : int > 0 + Use this parameter to specify the index of the reference frame to be used, with respect to reference_segment_index. + If reference_file_name is specified, this parameter, as well as reference_segment_index, is ignored. + reference_file_name : str + If an external reference frame is to be used, this parameter should be set to path of the .isxd file + that contains the reference image. + global_registration_weight : 0.05 <= float <= 1 + When this is set to 1, only the reference frame is used for motion estimation. + When this is less than 1, the previous frame is also used for motion estimation. + The closer this value is to 0, the more the previous frame is used and the less + the reference frame is used. + output_translation_files : list + A list of file names to write the X and Y translations to. + Must be either None, in which case no files are written, or a list of valid file names equal + in length to the number of input and output file names. + The output translations are written into a .csv file with three columns. + The first two columns, "translationX" and "translationY", store the X and Y translations from + each frame to the reference frame respectively. + The third column contains the time of the frame since the beginning of the movie. + The first row stores the column names as a header. + Each subsequent row contains the X translation, Y translation, and time offset for that frame. + output_crop_rect_file : str + The path to a file that will contain the crop rectangle applied to the input movies to generate the output + movies. + The format of the crop rectangle is a comma separated list: x,y,width,height. + preserve_input_dimensions: bool + If true, the output movie will be padded along the edges to match the dimensions of the input movie. + The padding value will be set to the 5th percentile of the pixel value distribution collected from 10 evenly sampled frames from the input movie. + """ + num_files, in_arr, out_arr = isx._internal.check_input_and_output_files(input_movie_files, output_movie_files) + + use_low = int(low_bandpass_cutoff is not None) + use_high = int(high_bandpass_cutoff is not None) + + if use_low == 0: + low_bandpass_cutoff = 0.0 + if use_high == 0: + high_bandpass_cutoff = 1.0 + + # The first two elements tell the C layer the number of ROIs, then the + # number of vertices in the first ROI. + if roi is not None: + roi_np = isx._internal.convert_to_nx2_numpy_array(roi, int, 'roi') + roi_arr = isx._internal.list_to_ctypes_array([1, roi_np.shape[0]] + list(roi_np.ravel()), ctypes.c_int) + else: + roi_arr = isx._internal.list_to_ctypes_array([0], ctypes.c_int) + + if reference_file_name is None: + ref_file_name = '' + else: + ref_file_name = reference_file_name + + out_trans_arr = isx._internal.list_to_ctypes_array([''], ctypes.c_char_p) + write_output_translations = int(output_translation_files is not None) + if write_output_translations: + out_trans_files = isx._internal.ensure_list(output_translation_files) + assert len(out_trans_files) == num_files, "Number of output translation files must match number of input movies ({} != {})".format(len(out_trans_files), len(in_arr)) + out_trans_arr = isx._internal.list_to_ctypes_array(out_trans_files, ctypes.c_char_p) + + write_crop_rect = int(output_crop_rect_file is not None) + if not write_crop_rect: + output_crop_rect_file = '' + + isx._internal.c_api.isx_motion_correct_movie( + num_files, in_arr, out_arr, max_translation, + use_low, low_bandpass_cutoff, use_high, high_bandpass_cutoff, + roi_arr, reference_segment_index, reference_frame_index, + ref_file_name.encode('utf-8'), global_registration_weight, + write_output_translations, out_trans_arr, + write_crop_rect, output_crop_rect_file.encode('utf-8'), preserve_input_dimensions) + + +def pca_ica( + input_movie_files, output_cell_set_files, num_pcs, num_ics=120, unmix_type='spatial', + ica_temporal_weight=0, max_iterations=100, convergence_threshold=1e-5, block_size=1000, + auto_estimate_num_ics=False, average_cell_diameter=13): + """ + Run PCA-ICA cell identification on movies. + + For more details see :ref:`PCA_ICA`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to run PCA-ICA on. + output_cell_set_files : list + The paths of the output cell set files. Must be same length as input_movie_files. + num_pcs : int > 0 + The number of principal components (PCs) to estimate. Must be >= num_ics. + num_ics : int > 0 + The number of independent components (ICs) to estimate. + unmix_type : {'temporal', 'spatial', 'both'} + The unmixing type or dimension. + ica_temporal_weight : 0 <= float <= 1 + The temporal weighting factor used for ICA. + max_iterations : int > 0 + The maximum number of iterations for ICA. + convergence_threshold : float > 0 + The convergence threshold for ICA. + block_size : int > 0 + The size of the blocks for the PCA step. The larger the block size, the more memory that will be used. + auto_estimate_num_ics : bool + If True the number of ICs will be automatically estimated during processing. + average_cell_diameter : int > 0 + Average cell diameter in pixels (only used when auto_estimate_num_ics is set to True) + Returns + ------- + bool + True if PCA-ICA converged, False otherwise. + """ + unmix_type_int = isx._internal.lookup_enum('unmix_type', isx._internal.ICA_UNMIX_FROM_STRING, unmix_type) + if ica_temporal_weight < 0 or ica_temporal_weight > 1: + raise ValueError("ica_temporal_weight must be between zero and one") + + num_files, in_arr, out_arr = isx._internal.check_input_and_output_files(input_movie_files, output_cell_set_files) + converged = ctypes.c_int() + isx._internal.c_api.isx_pca_ica_movie( + num_files, in_arr, out_arr, num_pcs, num_ics, unmix_type_int, ica_temporal_weight, + max_iterations, convergence_threshold, block_size, ctypes.byref(converged), 0, + auto_estimate_num_ics, average_cell_diameter) + + return converged.value > 0 + +def run_cnmfe( + input_movie_files, output_cell_set_files, output_dir='.', + cell_diameter=7, + min_corr=0.8, min_pnr=10, bg_spatial_subsampling=2, ring_size_factor=1.4, + gaussian_kernel_size=0, closing_kernel_size=0, merge_threshold=0.7, + processing_mode="parallel_patches", num_threads=4, patch_size=80, patch_overlap=20, + output_unit_type="df_over_noise"): + """ + Run CNMFe cell identification on movies. + + For more details see :ref:`CNMFe`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to run CNMFe on. + output_cell_set_files : list + The paths of the deconvolved output cell set files. Must be same length as input_movie_files. + output_dir : str + Output directory for intermediary files (e.g., memory map files) + cell_diameter: int > 0 + Expected average diameter of a neuron in pixels + min_corr: float + Minimum correlation with neighbours when searching for seed pixels + min_pnr: float + Minimum peak-to-noise ratio when searching for seed pixels + bg_spatial_subsampling: int > 0 (1 for no downsampling) + Background spatial downsampling factor + ring_size_factor: float > 0 + Ratio of ring radius to neuron diameter used for estimating background + gaussian_kernel_size: int >= 0 (0 for automatic estimation) + Width of Gaussian kernel to use for spatial filtering + closing_kernel_size: int >= 0 (0 for automatic estimation) + Morphological closing kernel size + merge_threshold: float + Temporal correlation threshold for merging spatially close cells + processing_mode: string in {'all_in_memory', 'sequential_patches', 'parallel_patches'} + Processing mode for Cnmfe + num_threads: int > 0 + Number of threads to use for processing the data + patch_size: int > 1 + Size of a single patch + patch_overlap: int >= 0 + Amount of overlap between patches in pixels + output_unit_type: string in {'df', 'df_over_noise'} + Output trace units for temporal components + """ + processing_mode_map = {'all_in_memory':0, 'sequential_patches':1, 'parallel_patches':2} + output_unit_type_map = {'df' : 0, 'df_over_noise' : 1} + + num_cell_files, in_movie_arr1, out_cell_arr = isx._internal.check_input_and_output_files(input_movie_files, output_cell_set_files) + + isx._internal.c_api.isx_cnmfe_movie( + num_cell_files, in_movie_arr1, out_cell_arr, output_dir.encode('utf-8'), + cell_diameter, + min_corr, min_pnr, bg_spatial_subsampling, ring_size_factor, + gaussian_kernel_size, closing_kernel_size, merge_threshold, + processing_mode_map[processing_mode], num_threads, patch_size, patch_overlap, + output_unit_type_map[output_unit_type]) + +def spatial_filter( + input_movie_files, output_movie_files, low_cutoff=0.005, high_cutoff=0.500, + retain_mean=False, subtract_global_minimum=True): + """ + Apply spatial bandpass filtering to each frame of one or more movies. + + For more details see :ref:`spatialBandpassFilter`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to filter. + output_movie_files : list + The file paths of the output movies. Must be the same length as input_movie_files. + low_cutoff : float > 0 + If not None, then the low cutoff for the spatial filter. + high_cutoff : float > 0 + If not None, then the high cutoff for the spatial filter. + retain_mean : bool + If True, retain the mean pixel intensity for each frame (the DC component). + subtract_global_minimum : bool + If True, compute the minimum pixel intensity across all movies, and subtract this + after frame-by-frame mean subtraction. + By doing this, all pixel intensities will stay positive valued, and integer-valued + movies can stay that way. + """ + num_files, in_arr, out_arr = isx._internal.check_input_and_output_files(input_movie_files, output_movie_files) + use_low = int(low_cutoff is not None) + use_high = int(high_cutoff is not None) + low_cutoff = low_cutoff if use_low else 0 + high_cutoff = high_cutoff if use_high else 0 + isx._internal.c_api.isx_spatial_band_pass_movie( + num_files, in_arr, out_arr, use_low, low_cutoff, use_high, high_cutoff, + int(retain_mean), int(subtract_global_minimum)) + + +def dff(input_movie_files, output_movie_files, f0_type='mean'): + """ + Compute DF/F movies, where each output pixel value represents a relative change + from a baseline. + + For more details see :ref:`DFF`. + + Arguments + --------- + input_movie_files : list + The file paths of the input movies. + output_movie_files : list + The file paths of the output movies. + f0_type : {'mean', 'min} + The reference image or baseline image used to compute DF/F. + """ + f0_type_int = isx._internal.lookup_enum('f0_type', isx._internal.DFF_F0_FROM_STRING, f0_type) + num_files, in_arr, out_arr = isx._internal.check_input_and_output_files(input_movie_files, output_movie_files) + isx._internal.c_api.isx_delta_f_over_f(num_files, in_arr, out_arr, f0_type_int) + + +def project_movie(input_movie_files, output_image_file, stat_type='mean'): + """ + Project movies to a single statistic image. + + For more details see :ref:`movieProjection`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to project. + output_image_file : str + The file path of the output image. + stat_type: {'mean', 'min', 'max', 'standard_deviation'} + The type of statistic to compute. + """ + stat_type_int = isx._internal.lookup_enum('stat_type', isx._internal.PROJECTION_FROM_STRING, stat_type) + num_files, in_arr = isx._internal.check_input_files(input_movie_files) + isx._internal.c_api.isx_project_movie(num_files, in_arr, output_image_file.encode('utf-8'), stat_type_int) + + +def event_detection( + input_cell_set_files, output_event_set_files, threshold=5, tau=0.2, + event_time_ref='beginning', ignore_negative_transients=True, accepted_cells_only=False): + """ + Perform event detection on cell sets. + + For more details see :ref:`eventDetection`. + + Arguments + --------- + input_cell_set_files : list + The file paths of the cell sets to perform event detection on. + output_event_set_files : list + The file paths of the output event sets. + threshold : float > 0 + The threshold in median-absolute-deviations that the trace has to cross to be considered an event. + tau : float > 0 + The minimum time in seconds that an event has to last in order to be considered. + event_time_ref : {'maximum', 'beginning', 'mid_rise'} + The temporal reference that defines the event time. + ignore_negative_transients : bool + Whether or not to ignore negative events. + accepted_cells_only : bool + If True, detect events only for accepted cells. + """ + event_time_ref_int = isx._internal.lookup_enum('event_time_ref', isx._internal.EVENT_REF_FROM_STRING, event_time_ref) + num_files, in_arr, out_arr = isx._internal.check_input_and_output_files(input_cell_set_files, output_event_set_files) + isx._internal.c_api.isx_event_detection( + num_files, in_arr, out_arr, threshold, tau, event_time_ref_int, + int(ignore_negative_transients), int(accepted_cells_only)) + + +def trim_movie(input_movie_file, output_movie_file, crop_segments, keep_start_time=False): + """ + Trim frames from a movie to produce a new movie. + + For more details see :ref:`trimMovie`. + + Arguments + --------- + input_movie_file : str + The file path of the movie. + output_movie_file : str + The file path of the trimmed movie. + crop_segments : Nx2 array-like + A numpy array of shape (num_segments, 2), where each row contains the start and + end indices of frames that will be cropped out of the movie. Or a list like: + [(start_index1, end_index1), (start_index2, end_index2), ...]. + keep_start_time : bool + If true, keep the start time of the movie, even if some of its initial frames are to be trimmed. + """ + num_files, in_arr, out_arr = isx._internal.check_input_and_output_files(input_movie_file, output_movie_file) + if num_files != 1: + raise TypeError("Only one movie can be specified.") + + crop_segs = isx._internal.convert_to_nx2_numpy_array(crop_segments, int, 'crop_segments') + indices_arr = isx._internal.list_to_ctypes_array([crop_segs.shape[0]] + list(crop_segs.ravel()), ctypes.c_int) + + isx._internal.c_api.isx_temporal_crop_movie(1, in_arr, out_arr, indices_arr, keep_start_time) + + +def apply_cell_set(input_movie_files, input_cell_set_file, output_cell_set_files, threshold): + """ + Apply the images of a cell set to movies, producing a new cell sets. + + For more details see :ref:`applyContours`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to apply the cell set to. + input_cell_set_file : list + The file path of the cell set to apply. + output_cell_set_files : list + The file paths of the output cell sets that will contain the images and new traces. + threshold : 0 >= float >= 1 + A threshold that will be applied to each footprint prior to application. + This indicates the fraction of the maximum image value that will be used as the + absolute threshold. + """ + num_movies, in_movie_arr, out_cs_arr = isx._internal.check_input_and_output_files(input_movie_files, output_cell_set_files) + num_cs_in, in_cs_arr = isx._internal.check_input_files(input_cell_set_file) + if num_cs_in != 1: + raise TypeError("Only one input cell set can be specified.") + isx._internal.c_api.isx_apply_cell_set(num_movies, in_movie_arr, out_cs_arr, in_cs_arr[0], threshold) + + +def apply_rois( + input_movie_files, + output_cell_set_files, + rois, + cell_names=[] +): + """ + Apply manually drawn rois on movies, producing new cell sets. + + For more details see :ref:`manualRois`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to apply the rois to. + output_cell_set_files : list + The file paths of the output cell sets that will contain the images and new traces. + rois: list>> + List of rois to apply. Must be one or more rois. + Each roi is a list of tuples of 2 integers representing the x, y coordinates of a single point. + cell_names: list + List of names to assign cells in the output cell sets. + If empty, then cells will have default names. + """ + num_movies, in_movie_arr, out_cs_arr = isx._internal.check_input_and_output_files(input_movie_files, output_cell_set_files) + + # get cell names if not empty + num_rois = len(rois) + cell_names_arr = isx._internal.list_to_ctypes_array(cell_names, ctypes.c_char_p) + use_cell_names = len(cell_names) > 0 + if cell_names: + if num_rois != len(cell_names): + raise ValueError("Number of rois must equal number of cell names.") + + # get roi points + if num_rois == 0: + raise ValueError('At least one roi needs to be specified') + + # count the number of points per roi + num_points_per_roi = [] + for roi in rois: + num_points_per_roi.append(len(roi)) + for point in roi: + if(len(point) != 2): + raise ValueError('All points must only have two coordinates (x and y-coordinates, respectively)') + + # flatten the roi array so it can be passed to c_types + rois = [ + x + for xs in rois + for x in xs + ] + + # get pointer to points memory + points = isx._internal.ndarray_as_type(np.array(rois), np.dtype(np.int64)) + points_p = points.ctypes.data_as(isx._internal.Int64Ptr) + + num_points_per_roi = isx._internal.ndarray_as_type(np.array(num_points_per_roi), np.dtype(np.int64)) + num_points_per_roi_p = num_points_per_roi.ctypes.data_as(isx._internal.Int64Ptr) + + isx._internal.c_api.isx_apply_rois( + num_movies, + in_movie_arr, + out_cs_arr, + num_rois, + num_points_per_roi_p, + points_p, + use_cell_names, + cell_names_arr + ) + + +def longitudinal_registration( + input_cell_set_files, output_cell_set_files, input_movie_files=[], output_movie_files=[], + csv_file='', min_correlation=0.5, accepted_cells_only=False, + transform_csv_file='', crop_csv_file=''): + """ + Run longitudinal registration on multiple cell sets. + + Optionally, also register the corresponding movies the cell sets were derived from. + + For more details see :ref:`LongitudinalRegistration`. + + Arguments + --------- + input_cell_set_files : list + The file paths of the cell sets to register. + output_cell_set_files : list + The file paths of the output cell sets. + input_movie_files : list + The file paths of the associated input movies (optional). + output_movie_files: list + The file paths of the output movies (optional) + csv_file : str + The path of the output CSV file to be written (optional). + min_correlation : 0 >= float >= 1 + The minimum correlation between cells to be considered a match. + accepted_cells_only : bool + Whether or not to use accepted cells from the input cell sets only, or to use both accepted and undecided cells. + transform_csv_file : str + The file path of the CSV file to store the affine transform parameters + from the reference cellset to each cellset. + Each row represents an input cell set and contains the values in the + 2x3 affine transform matrix in a row-wise order. + I.e. if we use a_{i,j} to represent the values in the 2x2 upper left + submatrix and t_{i} to represent the translations, the values are + written in the order: a_{0,0}, a_{0,1}, t_{0}, a_{1,0}, a_{1,1}, t_{1}. + crop_csv_file : str + The file path of the CSV file to store the crop rectangle applied after + transforming the cellsets and movies. + The format of the crop rectangle is a comma separated list: x,y,width,height. + """ + num_cell_files, in_cell_arr, out_cell_arr = isx._internal.check_input_and_output_files(input_cell_set_files, output_cell_set_files) + num_movie_files, in_movie_arr, out_movie_arr = isx._internal.check_input_and_output_files(input_movie_files, output_movie_files) + if (num_movie_files > 0) and (num_movie_files != num_cell_files): + raise ValueError("If specified, the number of movies must be the same as the number of cell sets.") + isx._internal.c_api.isx_longitudinal_registration(num_cell_files, in_cell_arr, out_cell_arr, in_movie_arr, out_movie_arr, csv_file.encode('utf-8'), min_correlation, int(not accepted_cells_only), int(num_movie_files > 0), transform_csv_file.encode('utf-8'), crop_csv_file.encode('utf-8')) + + +def auto_accept_reject(input_cell_set_files, input_event_set_files, filters=None): + """ + Automatically classify cell statuses as accepted or rejected. + + For more details see :ref:`autoAcceptReject`. + + Arguments + --------- + input_cell_set_files : list + The file paths of the cell sets to classify. + input_event_set_files : list + The file paths of the event sets to use for classification. + filters : list<3-tuple> + Each element describes a filter as (, , ). + The statistic must be one of {'# Comps', 'Cell Size', 'SNR', 'Event Rate'}. + The operator must be one of {'<', '=', '>'}. + The value is a floating point number. + """ + num_cell_sets, in_cell_arr = isx._internal.check_input_files(input_cell_set_files) + num_event_sets, in_event_arr = isx._internal.check_input_files(input_event_set_files) + + statistics = [] + operators = [] + values = [] + num_filters = 0 + if filters is not None: + if isinstance(filters, list): + statistics, operators, values = map(list, zip(*filters)) + num_filters = len(filters) + else: + raise TypeError('Filters must be contained in a list.') + + in_statistics = isx._internal.list_to_ctypes_array(statistics, ctypes.c_char_p) + in_operators = isx._internal.list_to_ctypes_array(operators, ctypes.c_char_p) + in_values = isx._internal.list_to_ctypes_array(values, ctypes.c_double) + + isx._internal.c_api.isx_classify_cell_status( + num_cell_sets, in_cell_arr, num_event_sets, in_event_arr, + num_filters, in_statistics, in_operators, in_values, + 0, isx._internal.SizeTPtr()) + + +def cell_metrics(input_cell_set_files, input_event_set_files, output_metrics_file, recompute_metrics=True): + """ + Compute cell metrics for a given cell set and events combination. + + For more details see :ref:`cellMetrics`. + + Arguments + --------- + input_cell_set_files : list + One or more input cell sets. + input_event_set_files : list + One or more events files associated with the input cell sets. + output_metrics_file : str + One .csv file that will be written which contain cell metrics. + If more than one input cell set & events file is passed, then the inputs are treated as a time-series. + recompute_metrics : bool + Flag indicating whether cell metrics should be recomputed from input files + otherwise cell metrics stored in the input files are exported. + If no cell metrics are stored in the input files and this flag is set to False, + then this function will throw an error. + """ + num_cs_in, in_cs_arr = isx._internal.check_input_files(input_cell_set_files) + num_events_in, in_events_arr = isx._internal.check_input_files(input_event_set_files) + if num_cs_in != num_events_in: + raise TypeError("The number of cell sets and events must be the same.") + isx._internal.c_api.isx_compute_cell_metrics(num_cs_in, in_cs_arr, in_events_arr, output_metrics_file.encode('utf-8'), recompute_metrics) + + +def export_cell_contours(input_cell_set_file, output_json_file, threshold=0.0, rectify_first=True): + """ + Export cell contours to a JSON file. + + If a cell image has multiple components the contour for each component is exported in a separate array. + + These are the contours calculated from preprocessed cell images as described in :ref:`cellMetrics`. + + Arguments + --------- + input_movie_file : str + The file path of a cell set. + output_json_file : str + The file path to the output JSON file to be written. + threshold : 0 >= float >= 1 + The threshold to apply to the footprint before computing the contour, specified as a + fraction of the maximum pixel intensity. + rectify_first : bool + Whether or not to rectify the image (remove negative components) prior to computing the threshold. + """ + num_cs_in, in_cs_arr, out_js_arr = isx._internal.check_input_and_output_files(input_cell_set_file, output_json_file) + if num_cs_in != 1: + raise TypeError("Only one input cell set can be specified.") + isx._internal.c_api.isx_export_cell_contours(num_cs_in, in_cs_arr, out_js_arr, threshold, int(rectify_first)) + + +def multiplane_registration( + input_cell_set_files, + output_cell_set_file, + min_spat_correlation=0.5, + temp_correlation_thresh=0.99, + accepted_cells_only=False): + """ + Identify unique signals in 4D imaging data using longitudinal registration + of spatial footprints and temporal correlation of activity. + + :param input_cell_set_files: (list ) the file paths of the cell sets from de-interleaved multiplane movies. + :param output_cell_set_file: (str) the file path of the output cell set of multiplane registration. + :param min_spat_correlation: (0 <= float <= 1) the minimum spatial overlap between cells to be considered a match. + :param temp_correlation_thresh: (0 <= float <= 1) the percentile of the comparison distribution below which + activity correlations are considered from distinct signals + :param accepted_cells_only: (bool) whether or not to include only accepted cells from the input cell sets. + """ + if not 0 <= min_spat_correlation <= 1: + raise TypeError("Spatial correlation must be between 0 and 1.") + if not 0 <= temp_correlation_thresh <= 1: + raise TypeError("Temporal correlation threshold must be between 0 and 1.") + num_cs_in, in_cs_arr, out_cs_arr = isx._internal.check_input_and_output_files(input_cell_set_files, output_cell_set_file, True) + isx._internal.c_api.isx_multiplane_registration( + num_cs_in, + in_cs_arr, + out_cs_arr, + min_spat_correlation, + temp_correlation_thresh, + accepted_cells_only + ) + +def estimate_num_ics( + input_image_files, + average_diameter = None, + min_diameter = None, + max_diameter = None, + min_inter_dist = 0): + """ + Estimates ICs parameter on a projection image of a movie to be run through PCA-ICA. + Images should be DF/F projections for best results. + Should either give the average diameter, or the min and max diameter of cells. + + :param input_image_files: (list ) the file paths of the df/f projection images + :pstsm average_diameter: (0 < float) average diameter of cells in pixels + :param min_diameter: (0 < float) minimum diameter of a cell in pixels + :param max_diameter: (min_diameter < float) maximum diameter of a cell in pixels + :param min_inter_dist: (0 <= float) minimum allowable distance between adjacent cells + :return: (list ) number of estimated cells in each input image + """ + if (average_diameter is None) and (min_diameter is None or max_diameter is None): + raise ValueError("Either average diameter or min and max diameters should be given.") + + if min_diameter is not None and max_diameter is not None: + if not 0 < min_diameter: + raise ValueError("Minimum diameter should be positive.") + if not min_diameter < max_diameter: + raise ValueError("Maximum diameter should be greater than minimum diameter.") + else: + if not 0 < average_diameter: + raise ValueError("Average diameter should be positive.") + min_diameter = average_diameter / 3. + max_diameter = average_diameter * (5. / 3.) + + num_img_in, in_img_arr = isx._internal.check_input_files(input_image_files) + ic_count = (ctypes.c_size_t * num_img_in)() + isx._internal.c_api.isx_estimate_num_ics( + num_img_in, + in_img_arr, + min_diameter, + max_diameter, + min_inter_dist, + ic_count + ) + + return list(ic_count) if num_img_in > 1 else ic_count[0] + +def de_interleave_dualcolor(input_movie_files, output_green_movie_files, output_red_movie_files, correct_chromatic_shift=True): + """ + De-interleave dual-color movies. + + Arguments + --------- + input_movie_files : list + The file paths to the input movies. + output_green_movie_files : list + The file paths to write the de-interleaved green output movies to. + output_red_movie_files : list + The file paths to write the de-interleaved red output movies to. + correct_chromatic_shift : bool + If true, correct chromatic shift of green and red channels. + The correction will only be applied if the movie is wide-field (i.e., acquired from the LScape module). + """ + num_in_files, in_arr = isx._internal.check_input_files(input_movie_files) + num_output_files_green, out_arr_green = isx._internal.check_input_files(output_green_movie_files) + num_output_files_red, out_arr_red = isx._internal.check_input_files(output_red_movie_files) + + if num_output_files_green != num_in_files: + raise ValueError('Number of green output files must match the number of input files.') + if num_output_files_red != num_in_files: + raise ValueError('Number of red output files must match the number of input files.') + + isx._internal.c_api.isx_deinterleave_dualcolor_movie(num_in_files, in_arr, out_arr_green, out_arr_red, correct_chromatic_shift) + +def multicolor_registration( + input_cellset_file1, + input_cellset_file2, + output_spatial_overlap_csv_file, + output_registration_matrix_csv_file, + output_directory='.', + lower_threshold=0.2, + upper_threshold=0.5, + accepted_cells_only=False, + save_matched_cellset=True, + save_unmatched_cellset=True, + save_uncertain_cellset=True, + image_format="tiff"): + """ + Run multicolor registration on two cell sets. + + Arguments + --------- + input_cellset_file1 : str + Path to the first .isxd cellset file. + input_cellset_file2 : str + Path to the second .isxd cellset file. + output_spatial_overlap_csv_file : str + Path to the .csv file containing the pairwise spatial overlap scores. + output_registration_matrix_csv_file : str + Path to the .csv file containing the registration matrix. + output_directory : str + Path to the output directory. Generated cellsets and images will be saved in this directory. + lower_threshold : double + Maximum score between two cells that can be rejected as a match. + upper_threshold : double + Minimum score between two cells that can be accepted as a match. + accepted_cells_only : bool + Whether or not to use accepted cells from the input cell sets only, or to use both accepted and undecided cells. + save_matched_cellset : bool + Whether or not to save the matched cells from the primary cellset to a cellset file. + save_unmatched_cellset : bool + Whether or not to save the unmatched cells from the primary cellset to a cellset file. + save_uncertain_cellset : bool + Whether or not to save the uncertain cells from the primary cellset to a cellset file. + image_format : str in {"tiff", "png"} + File format to use for the images to save + """ + for input_file in [input_cellset_file1, input_cellset_file2]: + if not os.path.exists(input_file): + raise FileNotFoundError('Input file not found: {}'.format(input_file)) + + for output_file in [output_spatial_overlap_csv_file, output_registration_matrix_csv_file]: + if os.path.exists(output_file): + raise FileExistsError('Output file already exists: {}'.format(output_file)) + + if not os.path.exists(output_directory): + os.makedirs(output_directory) + + if not 0 <= lower_threshold <= 1: + raise TypeError("Lower threshold must be between 0 and 1.") + if not 0 <= upper_threshold <= 1: + raise TypeError("Upper threshold must be between 0 and 1.") + if image_format not in ["tiff","png"]: + raise TypeError("Image format must be either 'tiff' or 'png'.") + + isx._internal.c_api.isx_multicolor_registration( + input_cellset_file1.encode('utf-8'), + input_cellset_file2.encode('utf-8'), + output_spatial_overlap_csv_file.encode('utf-8'), + output_registration_matrix_csv_file.encode('utf-8'), + output_directory.encode('utf-8'), + lower_threshold, + upper_threshold, + accepted_cells_only, + save_matched_cellset, + save_unmatched_cellset, + save_uncertain_cellset, + image_format.encode('utf-8')) + +def binarize_cell_set(input_cellset_file, output_cellset_file, threshold, use_percentile_threshold=False): + """Apply a threshold to each footprint in a cell set to produce a new cell set with binary footprints. + + Arguments + --------- + input_cellset_file : str + Path to the .isxd cellset file to binarize. Each cell footprint is + transformed independently and added back to the output cellset. + output_cellset_file : str + Path to the .isxd cellset file that has been binarized. + threshold : double + Threshold for updating pixels. + Pixels with values above threshold are set to 1, otherwise set to 0. + use_percentile_threshold : bool + If true, the provided threshold is treated as a percentile. + """ + if not os.path.exists(input_cellset_file): + raise FileExistsError('Input file not found: {}'.format(input_cellset_file)) + + if os.path.exists(output_cellset_file): + raise FileExistsError('Output file already exists: {}'.format(output_cellset_file)) + + if use_percentile_threshold and (threshold < 0 or threshold > 100): + raise ValueError('Percentile threshold must be between 0 and 100.') + + isx._internal.c_api.isx_binarize_cell_set( + input_cellset_file.encode('utf-8'), + output_cellset_file.encode('utf-8'), + threshold, + use_percentile_threshold) + +def crop_cell_set(input_cellset_file, output_cellset_file, crop): + """Crop each footprint of a cell set to produce a new cell set with the desired size. + + Arguments + --------- + input_cellset_file : str + Path to the .isxd cellset file to crop. + output_cellset_file : str + Path to the .isxd cellset file that has been cropped. + crop : 4-tuple + A list of 4 values indicating how many pixels to crop on each side: [left, right, top, bottom]. + """ + if not os.path.exists(input_cellset_file): + raise FileNotFoundError('Input file not found: {}'.format(input_file)) + + if os.path.exists(output_cellset_file): + raise FileExistsError('Output file already exists: {}'.format(output_cellset_file)) + + if len(crop) != 4: + raise ValueError('The amount of cropping for all 4 sides must be specified as [left, right, top, bottom].') + if any(k < 0 for k in crop): + raise ValueError('The amount of cropping on each side must be a positive integer.') + + isx._internal.c_api.isx_crop_cell_set( + input_cellset_file.encode('utf-8'), + output_cellset_file.encode('utf-8'), + crop[0], crop[1], crop[2], crop[3]) + + +def transform_cell_set(input_cellset_file, output_cellset_file, pad_value=np.nan): + """Transform an isxd cell set to its pre-motion-correction dimensions by padding the cell footprints. + + Arguments + --------- + isxd_cellset_file : str + Path to a .isxd cellset file to align. Each cell footprint is + transformed independently and added back to the output cellset. + output_transformed_cellset_file : str + Path to the .isxd cellset file that has been transformed. + pad_value : valid numpy value + Value to fill the padded region of the footprints. + """ + if not os.path.exists(input_cellset_file): + raise FileNotFoundError('Input file not found: {}'.format(input_cellset_file)) + + if os.path.exists(output_cellset_file): + raise FileExistsError('Output file already exists: {}'.format(output_cellset_file)) + + isx._internal.c_api.isx_transform_cell_set( + input_cellset_file.encode('utf-8'), + output_cellset_file.encode('utf-8'), + pad_value) + +def compute_spatial_overlap_cell_set(input_cellset_file1, input_cellset_file2, output_csv_file, accepted_cells_only=False): + """Compute the pairwise spatial overlap between footprints from two input cell sets. + If the two footprints are binary then the spatial overlap is computed as the pairwise Jaccard index + If the two footprints are analog then the spatial overlap is computed as the pairwise normalized cross correlation + In order to compute the spatial overlap between binary and analog footprints it's necessary to binarize the analog footprints first. + + Arguments + --------- + input_cellset_file1 : str + Path to the first .isxd cellset file. + input_cellset_file2 : str + Path to the second .isxd cellset file. + output_csv_file : str + Path to the .csv file containing the f1 scores. + accepted_cells_only : bool + Whether or not to use accepted cells from the input cell sets only, or to use both accepted and undecided cells. + """ + for input_file in [input_cellset_file1, input_cellset_file2]: + if not os.path.exists(input_file): + raise FileNotFoundError('Input file not found: {}'.format(input_file)) + + if os.path.exists(output_csv_file): + raise FileExistsError('Output file already exists: {}'.format(output_csv_file)) + + isx._internal.c_api.isx_compute_spatial_overlap_cell_set( + input_cellset_file1.encode('utf-8'), + input_cellset_file2.encode('utf-8'), + output_csv_file.encode('utf-8'), + accepted_cells_only) + +def register_cellsets( + input_cellset_file1, + input_cellset_file2, + output_spatial_overlap_csv_file, + output_registration_matrix_csv_file, + output_directory='.', + lower_threshold=0.3, + upper_threshold=0.7, + accepted_cells_only=False, + primary_cellset_name='primary', + secondary_cellset_name='secondary', + primary_color=0x00FF00, + secondary_color=0xFF0000): + """ + Register two cellsets + Computes the pairwise spatial overlap of two cellsets in order to match cells + If the two footprints are binary then the spatial overlap is computed as the pairwise Jaccard index + If the two footprints are analog then the spatial overlap is computed as the pairwise normalized cross correlation + Throws an exception if the footprint types of the cellsets are incomptible (i.e., binary and analog) + + Arguments + --------- + input_cellset_file1 : str + Path to the first .isxd cellset file. + input_cellset_file2 : str + Path to the second .isxd cellset file. + output_spatial_overlap_csv_file : str + Path to the .csv file containing the pairwise spatial overlap scores. + output_registration_matrix_csv_file : str + Path to the .csv file containing the registration matrix. + output_directory : str + Path to the output directory. Cellmaps will be save to this directory. + lower_threshold : double + Maximum score between two cells that can be rejected as a match. + upper_threshold : double + Minimum score between two cells that can be accepted as a match. + accepted_cells_only : bool + Whether or not to use accepted cells from the input cell sets only, or to use both accepted and undecided cells. + primary_cellset_name : string + Name of the first cellset to use in .csv files + secondary_cellset_name : string + Name of the second cellset to use in .csv files + primary_color : int > 0 + Color of cells from the first cellset to use in cellmaps + secondary_color : int > 0 + Color of cells from the second cellset to use in cellmaps + """ + for input_file in [input_cellset_file1, input_cellset_file2]: + if not os.path.exists(input_file): + raise FileNotFoundError('Input file not found: {}'.format(input_file)) + + for output_file in [output_spatial_overlap_csv_file, output_registration_matrix_csv_file]: + if os.path.exists(output_file): + raise FileExistsError('Output file already exists: {}'.format(output_file)) + + if not os.path.exists(output_directory): + os.makedirs(output_directory) + + if not 0 <= lower_threshold <= 1: + raise TypeError("Lower threshold must be between 0 and 1.") + if not 0 <= upper_threshold <= 1: + raise TypeError("Upper threshold must be between 0 and 1.") + + isx._internal.c_api.isx_register_cellsets( + input_cellset_file1.encode('utf-8'), + input_cellset_file2.encode('utf-8'), + output_spatial_overlap_csv_file.encode('utf-8'), + output_registration_matrix_csv_file.encode('utf-8'), + output_directory.encode('utf-8'), + accepted_cells_only, + lower_threshold, + upper_threshold, + primary_cellset_name.encode('utf-8'), + secondary_cellset_name.encode('utf-8'), + primary_color, + secondary_color) + +def deconvolve_cellset( + input_raw_cellset_files, + output_denoised_cellset_files=None, + output_spike_eventset_files=None, + accepted_only=False, + spike_snr_threshold=3.0, + noise_range=(0.25, 0.5), + noise_method='mean', + first_order_ar=True, + lags=5, + fudge_factor=0.96, + deconvolution_method='oasis'): + """ + Deconvolve temporal traces of cellsets. + + Arguments + --------- + input_raw_cellset_files : list + The file paths of the cellsets to perform deconvolution on. + output_denoised_cellset_files : list + The file paths of the output denoised cellsets. If None, then not created. + output_spike_eventset_files : list + The file paths of the output spike eventsets. If None, then not created. + accepted_only : bool + If True, only deconvolve for accepted cells, otherwise accepted and undecided. + spike_snr_threshold : float > 0 + SNR threshold for spike outputs. This is in units of noise which is estimated from the raw temporal traces. + noise_range : 0 <= 2-tuple <= 1 + Range of frequencies to average for estimating pixel noise. + Maximum frequency must be greater than or equal to minimum frequency. + noise_method : str + Specifies averaging method for noise. Must be one of ('mean', 'median', 'logmexp'). + first_order_ar : bool + If True, use an AR(1) model, otherwise use AR(2). + lags : int > 0 + Number of lags for estimating time constant. + fudge_factor : float > 0 + Fudge factor for reducing time constant bias. + deconvolution_method : str + Decoonvolution method for calcium dynamics. Must be one of ('oasis', 'scs'). + Note: SCS is significantly slower than OASIS but AR(2) models are currently only supported with SCS. + """ + + if deconvolution_method == 'oasis' and not first_order_ar: + raise ValueError("Deconvolution with OASIS only works for an AR(1) model"); + + if noise_range[1] < noise_range[0]: + raise ValueError("Maximum must be greater than or equal to minimum for noise range"); + + if not output_denoised_cellset_files and not output_spike_eventset_files: + raise ValueError("Must specify at least one type of deconvolution output"); + + noise_method_map = { 'mean' : 0, 'median' : 1, 'logmexp' : 2 } + deconvolution_method_map = { 'oasis' : 0, 'scs' : 1 } + + noise_method = noise_method_map[noise_method] + deconvolution_method = deconvolution_method_map[deconvolution_method] + + out_denoised_arr = None + if output_denoised_cellset_files: + num_files, in_raw_arr, out_denoised_arr = isx._internal.check_input_and_output_files( + input_raw_cellset_files, output_denoised_cellset_files) + + out_spike_arr = None + if output_spike_eventset_files: + num_files, in_raw_arr, out_spike_arr = isx._internal.check_input_and_output_files( + input_raw_cellset_files, output_spike_eventset_files) + + isx._internal.c_api.isx_cellset_deconvolve( + num_files, + in_raw_arr, + out_denoised_arr, + out_spike_arr, + accepted_only, + spike_snr_threshold, + noise_range[0], + noise_range[1], + noise_method, + first_order_ar, + lags, + fudge_factor, + deconvolution_method) + +def estimate_vessel_diameter( + input_movie_files, + output_vessel_set_files, + lines, + time_window = 2, + time_increment = 1, + output_units = "pixels", + estimation_method = "Non-Parametric FWHM", + height_estimate_rule = "independent", + auto_accept_reject = True, + rejection_threshold_fraction = 0.2, + rejection_threshold_count = 5): + """ + Estimates blood vessel diameter along each input line over time + + Arguments + --------- + input_movie_files : list + The file paths of the movies to analyse. + output_vessel_set_files : list + The file paths of the output vessel set files. + lines : Union[list>, np.ndarray] + The pairs of points to perform analysis on. + This can be represented as a list of points, or a numpy.ndarray object + e.g. [ [ [1, 1], [2, 2] ], [ [2, 2], [3, 3] ], [ [3, 3], [4, 4] ]] + time_window : float + This specifies the duration in seconds of the time window to use for every measurement. + time_increment : float + This specifies the time shift in seconds between consecutive measurements. + When the time increment is smaller than the time window, consecutive windows will overlap. + The time increment must be greater than or equal to the time window. + output_units : string in {'pixels', 'microns'} + Output units for vessel diameter estimation. + estimation_method : string in {'Non-Parametric FWHM', 'Parametric FWHM'} + The type of method to use for vessel diameter estimation. + Both methods estimate diameter from a line profile extracted from the input movie using the input contours. + Parametric FWHM fits the line profile to a Lorentzian curve. + Non-Parametric FWHM measures the distance between the midpoints of the line profile peak. + height_estimate_rule: string in {'independent', 'global', 'local'} + Used in Non-Parametric FWHM estimation method. + Describes the method to use for determing the midpoint height on each side of the line profile peak. + Can be one of the following values: + * global: Take the halfway point between the max and the global min. + * local: Take the largest of the two halfway points between min/max. + * independent: The height estimate will be independent on both sides of the peak. + auto_accept_reject: bool + Flag indicating whether the vessels should be auto accepted/rejected. + Rejected vessels are identified as those with derivatives greater than a particular fraction of the mean. + rejection_threshold_fraction: float + Parameter for auto accept/reject functionality. + The max fraction of the mean diameter allowed for a derivative in a particular vessel diameter trace. + rejection_threshold_count: int + Parameter for auto accept/reject functionality. + The number of threshold crossings allowed in a particular vessel diameter trace. + """ + + # File checks + # - Input files must exist + # - Number of input files must match number of output files + num_files, movie_files, vessel_set_files = isx._internal.check_input_and_output_files(input_movie_files, output_vessel_set_files) + + output_units_map = {'pixels' : 0, 'microns' : 1} + + # Points check + num_lines = len(lines) + if num_lines <= 0: + raise ValueError('At least one line needs to be specified') + + for pair in lines: + if(len(pair) != 2): + raise ValueError('All pairs must have two points.') + for point in pair: + if(len(point) != 2): + raise ValueError('All points must only have two coordinates (x and y-coordinates, respectively)') + + points = isx._internal.ndarray_as_type(np.array(lines), np.dtype(np.int64)) + points_p = points.ctypes.data_as(isx._internal.Int64Ptr) + + dim_points = points.ndim + if dim_points != 3: + raise ValueError('Input points must be a 3-D numpy array') + + # Time window and Time increment check + if time_increment <= 0 or time_window <= 0: + raise ValueError('Time increment and time window must be greater than 0') + + if not output_units in output_units_map.keys(): + raise ValueError('Invalid units. Valid units includes: {}'.format(*output_units_map.keys())) + + isx._internal.c_api.isx_estimate_vessel_diameter( + num_files, + movie_files, + vessel_set_files, + num_lines, + points_p, + time_window, + time_increment, + output_units_map[output_units], + estimation_method.encode('utf-8'), + height_estimate_rule.encode('utf-8'), + auto_accept_reject, + rejection_threshold_fraction, + rejection_threshold_count + ) + return + +def estimate_rbc_velocity( + input_movie_files, + output_vessel_set_files, + rois, + time_window = 10, + time_increment = 2, + output_units = "pixels", + save_correlation_heatmaps = True +): + """ + Estimates red blood cell (rbc) velocity within each region of interest over time + + Arguments + --------- + input_movie_files : list + The file paths of the movies to analyse. + output_vessel_set_files : list + The file paths of the output vessel set files. + rois : Union[list>, np.ndarray] + The groups of points to perform analysis on. + This can be represented as a list of points, or a numpy.ndarray object + E.g. [ [ [0, 0], [0, 1], [1, 0], [1, 1] ], [ [2, 0], [2, 1], [3, 0], [3, 1] ] ] + time_window : float + This specifies the duration in seconds of the time window to use for every measurement. + time_increment : float + This specifies the time shift in seconds between consecutive measurements. + When the time increment is smaller than the time window, consecutive windows will overlap. + The time increment must be greater than or equal to the time window. + output_units : string in {'pixels', 'microns'} + Output units for vessel velocity estimation. + save_correlation_heatmaps: bool + This specifies whether to save the correlation heatmaps to the vessel set or not + """ + + # File checks + # - Input files must exist + # - Number of input files must match number of output files + num_files, movie_files, vessel_set_files = isx._internal.check_input_and_output_files(input_movie_files, output_vessel_set_files) + + output_units_map = {'pixels' : 2, 'microns' : 3} + + # Points check + num_rois = len(rois) + if num_rois <= 0: + raise ValueError('At least one ROI needs to be specified') + + for vertices in rois: + if(len(vertices) != 4): + raise ValueError('All rois must have 4 points.') + for point in vertices: + if(len(point) != 2): + raise ValueError('All points must only have two coordinates (x and y-coordinates, respectively)') + + points = isx._internal.ndarray_as_type(np.array(rois), np.dtype(np.int64)) + points_p = points.ctypes.data_as(isx._internal.Int64Ptr) + + dim_points = points.ndim + if dim_points != 3: + raise ValueError('Input points must be a 3-D numpy array') + + # Time window and Time increment check + if time_increment <= 0 or time_window <= 0: + raise ValueError('Time increment and time window must be greater than 0') + + if not output_units in output_units_map.keys(): + raise ValueError('Invalid units. Valid units includes: {}'.format(*output_units_map.keys())) + + isx._internal.c_api.isx_estimate_rbc_velocity(num_files, movie_files, vessel_set_files, num_rois, points_p, time_window, time_increment, output_units_map[output_units], save_correlation_heatmaps) + return + +def create_neural_activity_movie( + input_cell_set_files, + output_neural_movie_files, + accepted_cells_only=False +): + """ + Create a neural activity movie by aggregating individual cell activity. + + Arguments + --------- + input_cell_set_files : list + The file paths of the cell set to process. + output_neural_movie_files : list + The file paths of the output neural movie files. + accepted_cells_only : bool + If True, output movies will only included the activity from accepted cells. + """ + num_files, movie_files, neural_movie_files = isx._internal.check_input_and_output_files(input_cell_set_files, output_neural_movie_files) + + isx._internal.c_api.isx_create_neural_activity_movie( + num_files, + movie_files, + neural_movie_files, + accepted_cells_only + ) + + +def interpolate_movie( + input_movie_files, + output_interpolated_movie_files, + interpolate_dropped=True, + interpolate_blank=True, + max_consecutive_invalid_frames=1 +): + """ + Replace invalid movie time samples with interpolated data. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to process. + output_interpolated_movie_files : list + The file paths of the output interpolated movie files. + interpolate_dropped : bool + If True, dropped frames will be interpolated. + interpolate_blank : bool + If True, blank frames will interpolated. + max_consecutive_invalid_frames : int > 0 + The maximum number of consecutive invalid frames that can be interpolated over. + """ + num_files, movie_files, interpolated_movie_files = isx._internal.check_input_and_output_files(input_movie_files, output_interpolated_movie_files) + + isx._internal.c_api.isx_interpolate_movie( + num_files, + movie_files, + interpolated_movie_files, + interpolate_dropped, + interpolate_blank, + max_consecutive_invalid_frames + ) + +def estimate_vessel_diameter_single_vessel( + input_movie_file, + line, + start_frame, + end_frame, + output_coordinates=False +): + """ + Estimates blood vessel diameter for a single vessel. + This function exposes an internal part of the function ``isx.estimate_vessel_diameter``, + where a single measurement of diameter is estimated by fitting a Lorentzian curve to a line profile. + The purpose of this function is help troubleshoot performance of the vessel diameter algorithm + when it does not work as expected. + + Arguments + --------- + input_movie_file : str + The file path of the movie to analyse. + line : Union[list>, tuple, tuple>, np.ndarray] + A line to measure diameter with. + This can be represented as a list of two points, a tuple of two points, or a ``np.ndarray`` object. + E.g. ((1, 1), (2, 2)) + start_frame : int >= 0 + Start frame of the window to measure diameter from in the input movie. + end_frame : int >= 0 + End frame of the window to measure diameter from in the input movie. + output_coordinates : bool, optional + If true, output (x, y) coordinates of the pixels sampled along the user-defined line + This is an optional parameter that is set to false by default. + + Returns + ------- + line_profile : np.ndarray + 1D array containing the line profile extracted from the input movie. + The values in the line profile are in units of pixel intensity relative to the input movie. + The values are extracted from a mean projection image of the input movie for the specified window range. + The line profile is calculated by averaging three parallel line profiles of equal length including the input line + and two additional lines, each one pixel apart on either side of the input line. + The line profile is then background subtracted using the minimum value in the pixel values. + **Note**: The line profile is extracted from the input movie by representing the input line as a raster line segment. + The raster line segment is computed using the following OpenCV function: `cv::LineIterator `_. + The raster scan results in a line profile that does not neccessarily have the same number of pixels as the length of the input line. + This has implications for how the Lorentzian curve fit results should be interpreted. + Two dictionaries are returned by this function. + The first, ``model_fit``, is relative the raster line segment. + The second, ``diameter_estimate``, is relative to the user-defined contour line in image space. + model_fit : dict + Dictionary containing the Lorentzian curve fit parameters. Includes the following keys. + ``amplitude``: the peak amplitude of the Lorentzian curve. + ``fwhm``: the full width half max of the Lorentzian curve. + This measurement is in pixels and is relative to the number of pixels in the line profile. + ``peak_center``: the peak center of Lorentzian curve. + This measurement is in pixels and is relative to the number of pixels in the line profile. + Using these curve fit parameters, the Lorentzian function can be charecterized as the following function: + ``L(x) = (amplitude * 0.5 * fwhm / PI) / ((x - peak_center)^2 + (0.5 * fwhm)^2))`` + diameter_estimate : dict + Dictionary containing the estimated diameter results. + These values are obtained by scaling the ``fwhm`` and ``peak_center`` values from the ``model_fit`` dictionary by the relative length of the full contour line (in image space) to the length of the line profile. + Includes the following keys. + ``length``: the length of the diameter estimate. + ``center``: the center point of the diameter estimate on the user line, relative to the start point of the input line. + line_coords : np.ndarray, optional + 2D array containing (x, y) coordinates of the pixels values in image space for the raster line segment representing the input line. + """ + def get_vessel_line_num_pixels(input_movie_file, line): + movie = isx.Movie.read(input_movie_file) + image_width, image_height = movie.spacing.num_pixels + points = isx._internal.ndarray_as_type(np.array(line), np.dtype(np.int64)) + points_p = points.ctypes.data_as(isx._internal.Int64Ptr) + + num_pixels = ctypes.c_int() + num_pixels_p = ctypes.pointer(num_pixels) + isx._internal.c_api.isx_get_vessel_line_num_pixels(points_p, image_width, image_height, num_pixels_p) + return num_pixels.value + + def get_vessel_line_coordinates(input_movie_file, line, num_pixels): + movie = isx.Movie.read(input_movie_file) + image_width, image_height = movie.spacing.num_pixels + + line_x = np.zeros((num_pixels,), dtype=np.int32) + line_x_p = line_x.ctypes.data_as(isx._internal.IntPtr) + + line_y = np.zeros((num_pixels,), dtype=np.int32) + line_y_p = line_y.ctypes.data_as(isx._internal.IntPtr) + + isx._internal.c_api.isx_get_vessel_line_coordinates(points_p, image_width, image_height, line_x_p, line_y_p) + line_coords = np.column_stack((line_x, line_y)) + + return line_coords + + num_pixels = get_vessel_line_num_pixels(input_movie_file, line) + + line_profile = np.zeros((num_pixels,), dtype=np.float64) + fit_amplitude = ctypes.c_double() + fit_fwhm = ctypes.c_double() + fit_peak_center = ctypes.c_double() + estimate_length = ctypes.c_double() + estimate_center = ctypes.c_double() + + line_profile_p = line_profile.ctypes.data_as(isx._internal.DoublePtr) + fit_amplitude_p = ctypes.pointer(fit_amplitude) + fit_fwhm_p = ctypes.pointer(fit_fwhm) + fit_peak_center_p = ctypes.pointer(fit_peak_center) + estimate_length_p = ctypes.pointer(estimate_length) + estimate_center_p = ctypes.pointer(estimate_center) + + points = isx._internal.ndarray_as_type(np.array(line), np.dtype(np.int64)) + points_p = points.ctypes.data_as(isx._internal.Int64Ptr) + + isx._internal.c_api.isx_estimate_vessel_diameter_single_vessel( + input_movie_file.encode('utf-8'), + points_p, + start_frame, + end_frame, + line_profile_p, + fit_amplitude_p, + fit_fwhm_p, + fit_peak_center_p, + estimate_length_p, + estimate_center_p) + + model_fit = {"amplitude" : fit_amplitude.value, "fwhm" : fit_fwhm.value, "peak_center" : fit_peak_center.value} + diameter_estimate = {"length" : estimate_length.value, "center" : estimate_center.value} + + if output_coordinates: + line_coords = get_vessel_line_coordinates(input_movie_file, line, num_pixels) + return line_profile, model_fit, diameter_estimate, line_coords + else: + return line_profile, model_fit, diameter_estimate + + +def decompress(input_isxc_file, output_dir): + """ + Decompress an isxc file to the corresponding isxd file. + + Output file will have the name .isxd + + Arguments + --------- + input_isxc_file :str + The file path of the isxc file to decompress. + output_dir : str + The path of the directory to write the isxd file. + """ + isx._internal.c_api.isx_decompress(input_isxc_file.encode('utf-8'), output_dir.encode('utf-8')) diff --git a/isx/core.py b/isx/core.py new file mode 100644 index 0000000..c5c05a2 --- /dev/null +++ b/isx/core.py @@ -0,0 +1,351 @@ +""" +The core module contains functionality that is used by most other +modules frequently. +""" + +import ctypes +import datetime +from fractions import Fraction + +import numpy as np + +import isx._internal + +__version__ = isx._internal.get_version_string() + +class Duration(object): + """ + A duration of time. + + Examples + -------- + Make a period of 50 milliseconds + + >>> period = isx.Duration.from_msecs(50) + + Attributes + ---------- + secs_float : float + The duration in seconds as a floating point number. + """ + + def __init__(self): + self._impl = isx._internal.IsxRatio(0, 1) + + @property + def secs_float(self): + return float(self._impl.num) / float(self._impl.den) + + def to_secs(self): + """ Convert to an integer number of whole seconds. + """ + return int(self.secs_float) + + def to_msecs(self): + """ Convert to an integer number of whole milliseconds. + """ + return int(self.secs_float * 1e3) + + def to_usecs(self): + """ Convert to an integer number of whole microseconds. + """ + return round(self.secs_float * 1e6) + + @classmethod + def from_secs(cls, secs): + """ Make a duration from a number of seconds. + """ + return cls._from_num_den(int(secs), int(1)) + + @classmethod + def from_msecs(cls, msecs): + """ Make a duration from a number of milliseconds. + """ + return cls._from_num_den(int(msecs), int(1e3)) + + @classmethod + def from_usecs(cls, usecs): + """ Make a duration from a number of microseconds. + """ + return cls._from_num_den(int(usecs), int(1e6)) + + @classmethod + def _from_num_den(cls, num, den): + return cls._from_impl(isx._internal.IsxRatio(num, den)) + + @classmethod + def _from_secs_float(cls, flt, max_denominator=1000000000): + frac = Fraction(flt).limit_denominator(max_denominator) + return cls._from_impl(isx._internal.IsxRatio(frac.numerator, frac.denominator)) + + @classmethod + def _from_impl(cls, impl): + self = cls() + self._impl = impl + return self + + def __eq__(self, other): + return self._impl == other._impl + + def __str__(self): + return '{}s'.format(self.secs_float) + + +class Time(object): + """ + A time stamp that defines a calendar date and wall time. + """ + + def __init__(self): + self._impl = isx._internal.IsxTime(isx._internal.IsxRatio(0, 1), 0) + + def to_datetime(self): + """ + Returns + ------- + :class:`datetime.datetime` + The nearest Python datetime. + """ + secs_since_epoch = self._to_secs_since_epoch().secs_float + float(self._impl.utc_offset) + return datetime.datetime.utcfromtimestamp(secs_since_epoch) + + def _to_secs_since_epoch(self): + return isx.Duration._from_impl(self._impl.secs_since_epoch) + + @classmethod + def _from_secs_since_epoch(cls, secs_since_epoch, utc_offset=0): + self = cls() + self._impl.secs_since_epoch = secs_since_epoch._impl + self._impl.utc_offset = utc_offset + return self + + @classmethod + def _from_impl(cls, impl): + self = cls() + self._impl = impl + return self + + def __eq__(self, other): + return self._impl == other._impl + + def __str__(self): + return str(self.to_datetime()) + + +class Timing(object): + """ + The timing associated with a set of samples, such as the frames of a movie or the + values of a trace. + + Some samples are described as invalid, meaning that the sample is missing. + These include dropped samples, which could arise due to an error at acquisition time, + and cropped samples, which are likely due to processing. + + Examples + -------- + Make timing for 9 samples, every 10 milliseconds. + + >>> timing = isx.Timing(num_samples=9, period=isx.Duration.from_msecs(10)) + + Attributes + ---------- + num_samples : int >= 0 + The number of samples, including invalid (dropped, cropped, or blank) ones. + period : :class:`isx.Duration` + The period or duration of one sample. + start : :class:`isx.Time` + The time stamp associated with the start of the first sample. + dropped : list + The indices of the dropped samples. + cropped : list<2-tuple> + The index ranges of the cropped samples. + Each element specifies the inclusive lower and upper bounds of a range + of indices. + blank : list + The indices of the blank samples. + """ + + def __init__(self, num_samples=0, period=Duration.from_msecs(50), start=Time(), dropped=[], cropped=[], blank=[]): + """ + __init__(self, num_samples=0, period=``isx.Duration.from_msecs(50)``, start=``isx.Time()``, dropped=[], cropped=[], blank=[]): + + Make a timing object. + + Arguments + --------- + num_samples : int >= 0 + The number of samples, including invalid (dropped, cropped, or blank) ones. + period : :class:`isx.Duration` + The period or duration of one sample. + start : :class:`isx.Time` + The time stamp associated with the start of the first sample. + dropped : 1D array-like + The indices of the dropped samples. + cropped : Nx2 array-like + The index ranges of the cropped samples. + Each 2-tuple or row specifies the inclusive lower and upper bounds of a + range of indices. + blank : list + The indices of the blank samples. + """ + + if num_samples < 0: + raise ValueError('num_samples must be non-negative') + + if not isinstance(period, Duration): + raise TypeError('period must be an isx.Duration object') + + if not isinstance(start, Time): + raise ValueError('start_time must be an isx.Time object') + + self._impl = isx._internal.IsxTimingInfo() + self._impl.num_samples = num_samples + self._impl.step = period._impl + self._impl.start = start._impl + + dropped = isx._internal.convert_to_1d_numpy_array(dropped, np.uint64, 'dropped') + self._impl.num_dropped = dropped.size + self._impl.dropped = isx._internal.numpy_array_to_ctypes_array(dropped, ctypes.c_uint64) + + cropped = isx._internal.convert_to_nx2_numpy_array(cropped, np.uint64, 'cropped') + self._impl.num_cropped = cropped.shape[0] + self._impl.cropped_first = isx._internal.numpy_array_to_ctypes_array(cropped[:, 0], ctypes.c_uint64) + self._impl.cropped_last = isx._internal.numpy_array_to_ctypes_array(cropped[:, 1], ctypes.c_uint64) + + blank = isx._internal.convert_to_1d_numpy_array(blank, np.uint64, 'blank') + self._impl.num_blank = blank.size + self._impl.blank = isx._internal.numpy_array_to_ctypes_array(blank, ctypes.c_uint64) + + @property + def start(self): + return isx.Time._from_impl(self._impl.start) + + @property + def period(self): + return isx.Duration._from_impl(self._impl.step) + + @property + def num_samples(self): + return self._impl.num_samples + + @property + def dropped(self): + return isx._internal.ctypes_ptr_to_list(self._impl.dropped, self._impl.num_dropped) + + @property + def cropped(self): + cropped_first = isx._internal.ctypes_ptr_to_list(self._impl.cropped_first, self._impl.num_cropped) + cropped_last = isx._internal.ctypes_ptr_to_list(self._impl.cropped_last, self._impl.num_cropped) + return [(first, last) for first, last in zip(cropped_first, cropped_last)] + + @property + def blank(self): + return isx._internal.ctypes_ptr_to_list(self._impl.blank, self._impl.num_blank) + + def get_offsets_since_start(self): + """ + Get the offsets from the start of the timing. + + Returns + ------- + list<:class:`isx.Duration`> + Each element is the offset from the start to a sample. + """ + OffsetsType = ctypes.c_int64 * self.num_samples + offsets_num = OffsetsType() + offsets_den = OffsetsType() + isx._internal.c_api.isx_timing_info_get_secs_since_start(ctypes.byref(self._impl), offsets_num, offsets_den) + durations = [] + for i in range(self.num_samples): + durations.append(isx.Duration._from_num_den(offsets_num[i], offsets_den[i])) + return durations + + def get_valid_samples_mask(self): + """ + Get a 1D array mask indicating whether each sample is valid. + + Returns + ------- + :class:`numpy.ndarray` + Each element indicates whether the corresponding sample is valid. + """ + mask = (ctypes.c_uint8 * self.num_samples)() + isx._internal.c_api.isx_timing_info_get_valid_sample_mask(ctypes.byref(self._impl), mask) + return np.array(mask, dtype=bool) + + def get_valid_samples(self): + """ + Returns + ------- + :class:`numpy.ndarray` + The valid sample indices. + """ + return np.flatnonzero(self.get_valid_samples_mask()) + + @classmethod + def _from_impl(cls, impl): + self = cls() + self._impl = impl + return self + + def __eq__(self, other): + return ((self.start == other.start) and + (self.period == other.period) and + (self.num_samples == other.num_samples) and + (self.dropped == other.dropped) and + (self.cropped == self.cropped) and + (self.blank == other.blank)) + + + def __str__(self): + return 'Timing(num_samples={}, period={}, start={}, dropped={}, cropped={}, blank={})'.format( + self.num_samples, self.period, self.start, self.dropped, self.cropped, self.blank) + + +class Spacing(object): + """ + The spacing associated with a set of pixels. + + Examples + -------- + Make spacing for a 1440x1080 image. + + >>> spacing = isx.Spacing(num_pixels=(1080, 1440)) + + Attributes + ---------- + num_pixels : 2-tuple + The number of pixels as (num_rows, num_cols). + """ + + def __init__(self, num_pixels=(0, 0)): + if len(num_pixels) != 2: + raise ValueError('num_pixels must be specified as a two element list/tuple/array (num_rows, num_cols)') + self._impl = isx._internal.IsxSpacingInfo.from_num_pixels(num_pixels) + + @property + def num_pixels(self): + return (self._impl.num_rows, self._impl.num_cols) + + @property + def _pixel_coordinates(self): + pixel_width = self._impl.pixel_width._as_float() + pixel_height = self._impl.pixel_height._as_float() + left = self._impl.left._as_float() / pixel_width # convert micron to pixel + top = self._impl.top._as_float() / pixel_height + numX = self._impl.num_cols + numY = self._impl.num_rows + + return np.array([round(left), round(top), numX, numY]).astype(int) + + @classmethod + def _from_impl(cls, impl): + self = cls() + self._impl = impl + return self + + def __eq__(self, other): + return self._impl == other._impl + + def __str__(self): + return 'Spacing(num_pixels={})'.format(self.num_pixels) diff --git a/isx/io.py b/isx/io.py new file mode 100644 index 0000000..fb62dfa --- /dev/null +++ b/isx/io.py @@ -0,0 +1,2682 @@ +""" +The io module deals with data input and output. + +This includes reading from and writing to supported file formats for +movies, images, cell sets and event sets. +""" + +import os +import ctypes +import textwrap +import tifffile +from enum import Enum +import warnings + +import numpy as np +import pandas as pd + +import PIL.Image + +import isx._internal +import isx.core + + +class Movie(object): + """ + A movie contains a number of frames with timing and spacing information. + + It is always backed by a file, which can be read or written using this class. + See :ref:`importMovie` for details on what formats are supported for read. + Only the native `.isxd` format is supported for write. + + Examples + -------- + Read an existing movie and get its first frame as a numpy array. + + >>> movie = isx.Movie.read('recording_20160613_105808-PP-PP.isxd') + >>> frame_data = movie.get_frame_data(0) + + Write a 400x300 movie with 200 random frames of float32 values. + + >>> timing = isx.Timing(num_samples=200) + >>> spacing = isx.Spacing(num_pixels=(300, 400)) + >>> movie = isx.Movie.write('movie-400x300x200.isxd', timing, spacing, numpy.float32) + >>> for i in range(timing.num_samples): + >>> movie.set_frame_data(i, numpy.random.random(spacing.num_pixels).astype(numpy.float32)) + >>> movie.flush() + + Attributes + ---------- + file_path : str + The path of the file that stores this. + mode : {'r', 'w'} + The mode the file was opened with. + timing : :class:`isx.Timing` + The timing of the frames. + spacing : :class:`isx.Spacing` + The spacing of the pixels in each frame. + data_type : {numpy.uint16, numpy.float32} + The data type of each pixel. + """ + + def __init__(self): + self._ptr = isx._internal.IsxMoviePtr() + + @property + def file_path(self): + return self._ptr.contents.file_path.decode() if self._ptr else None + + @property + def mode(self): + return isx._internal.get_mode_from_read_only(self._ptr.contents.read_only) if self._ptr else None + + @property + def timing(self): + return isx.core.Timing._from_impl(self._ptr.contents.timing) if self._ptr else None + + @property + def spacing(self): + return isx.core.Spacing._from_impl(self._ptr.contents.spacing) if self._ptr else None + + @property + def data_type(self): + return isx._internal.DATA_TYPE_TO_NUMPY[self._ptr.contents.data_type] if self._ptr else None + + @classmethod + def read(cls, file_path): + """ + Open an existing movie from a file for reading. + + This is a light weight operation that simply reads the meta-data from the movie, + and does not read any frame data. + + Arguments + --------- + file_path : str + The path of the file to read. + + Returns + ------- + :class:`isx.Movie` + The movie that was read. Meta-data is immediately available. + Frames must be read using :func:`isx.Movie.get_frame`. + """ + movie = cls() + isx._internal.c_api.isx_read_movie(file_path.encode('utf-8'), ctypes.byref(movie._ptr)) + return movie + + @classmethod + def write(cls, file_path, timing, spacing, data_type): + """ + Open a new movie to a file for writing. + + This is a light weight operation. It does not write any frame data immediately. + + Arguments + --------- + file_path : str + The path of the file to write. If it already exists, this will error. + timing : :class:`isx.Timing` + The timing of the movie to write. + spacing : :class:`isx.Spacing` + The spacing of the movie to write. + data_type : {numpy.uint16, numpy.float32} + The data type of each pixel. + + Returns + ------- + :class:`isx.Movie` + The empty movie that was written. + Frame data must be written with :func:`isx.Movie.set_frame_data`. + """ + movie = cls() + data_type_int = isx._internal.lookup_enum('data_type', isx._internal.DATA_TYPE_FROM_NUMPY, data_type) + isx._internal.c_api.isx_write_movie(file_path.encode('utf-8'), timing._impl, spacing._impl, data_type_int, False, ctypes.byref(movie._ptr)) + return movie + + def get_frame_data(self, index): + """ + Get a frame from the movie by index. + + Arguments + --------- + index : int >= 0 + The index of the frame. If this is out of range, this should error. + + Returns + ------- + :class:`numpy.ndarray` + The retrieved frame data. + """ + isx._internal.validate_ptr(self._ptr) + + shape = self.spacing.num_pixels + f = np.zeros([np.prod(shape)], dtype=self.data_type) + + if self.data_type == np.uint16: + f_p = f.ctypes.data_as(isx._internal.UInt16Ptr) + isx._internal.c_api.isx_movie_get_frame_data_u16(self._ptr, index, f_p) + elif self.data_type == np.float32: + f_p = f.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_movie_get_frame_data_f32(self._ptr, index, f_p) + elif self.data_type == np.uint8: + f_p = f.ctypes.data_as(isx._internal.UInt8Ptr) + isx._internal.c_api.isx_movie_get_frame_data_u8(self._ptr, index, f_p) + else: + raise RuntimeError('Cannot read from movie with datatype: {}'.format(str(self.data_type))) + + return f.reshape(shape) + + def get_frame_timestamp(self, index): + """ + Get a frame timestamp from the movie by index. + + The timestamps are in units of microseconds. + This is a TSC (time stamp counter) value which is saved during acquisition. + These values come from a hardware counter on a particular acquisition box. + As a result, they can only be used to compare to other values that originate from the same hardware counter (e.g., paired recordings). + + To get timestamps relative to the start of the movie, + simply subtract each timestamp with the timestamp of the first frame in the movie. + To get timestamps relative to Unix epoch time, add the timestamps computed relative + to the start of the movie with the Unix epoch start timestamp of the movie, accessible through the `timing` member of this class. + Alternatively, the timestamps of a movie can be exported relative to the start of the movie, + or relative to the Unix epoch time, using the function `isx.export_movie_timestamps_to_csv`. + + Arguments + --------- + index : int >= 0 + The index of the frame. If this is out of range, this should error. + + Returns + ------- + int + The retreived frame timestamp. + If the movie has no frame timestamps, the function will throw an error, + """ + isx._internal.validate_ptr(self._ptr) + timestamp = ctypes.c_uint64(0) + timestamp_ptr = ctypes.pointer(timestamp) + isx._internal.c_api.isx_movie_get_frame_timestamp(self._ptr, index, timestamp_ptr) + return timestamp.value + + def set_frame_data(self, index, frame): + """ + Set frame data in a writable movie. + + Frames must be set in increasing order, otherwise this will error. + + Arguments + --------- + index : int >= 0 + The index of the frame. + frame : :class:`numpy.ndarray` + The frame data. + """ + isx._internal.validate_ptr(self._ptr) + + if self.mode != 'w': + raise ValueError('Cannot set frame data if movie is read-only.') + + if not isinstance(frame, np.ndarray): + raise TypeError('Frame must be a numpy array') + + if frame.shape != self.spacing.num_pixels: + raise ValueError('Cannot set frame with different shape than movie') + + f_flat = isx._internal.ndarray_as_type(frame, np.dtype(self.data_type)).ravel() + + if self.data_type == np.uint16: + FrameType = ctypes.c_uint16 * np.prod(frame.shape) + c_frame = FrameType(*f_flat) + isx._internal.c_api.isx_movie_write_frame_u16(self._ptr, index, c_frame) + elif self.data_type == np.float32: + FrameType = ctypes.c_float * np.prod(frame.shape) + c_frame = FrameType(*f_flat) + isx._internal.c_api.isx_movie_write_frame_f32(self._ptr, index, c_frame) + else: + raise RuntimeError('Cannot write frames for movie with datatype: {}'.format(str(self.data_type))) + + def flush(self): + """ + Flush all meta-data and frame data to the file. + + This should be called after setting all frames of a movie opened with :func:`isx.Movie.write`. + """ + isx._internal.validate_ptr(self._ptr) + isx._internal.c_api.isx_movie_flush(self._ptr) + + def get_acquisition_info(self): + """ + Get information about acquisition that may be stored in some files, + such as nVista 3 movies and data derived from those. + + Returns + ------- + dict + A dictionary likely parsed from JSON that maps from string keys to variant values. + """ + return isx._internal.get_acquisition_info( + self._ptr, + isx._internal.c_api.isx_movie_get_acquisition_info, + isx._internal.c_api.isx_movie_get_acquisition_info_size); + + def __del__(self): + if self._ptr: + isx._internal.c_api.isx_movie_delete(self._ptr) + + def __str__(self): + return textwrap.dedent("""\ + Movie + file_path: {} + mode: {} + timing: {} + spacing: {} + data_type: {}\ + """.format(self.file_path, self.mode, self.timing, self.spacing, self.data_type)) + + +class Image(object): + """ + An image is effectively a movie with one frame and no timing. + + It is always backed by a file, which can be read or written using this class. + See :ref:`importMovie` for details on what formats are supported for read. + Only the native `.isxd` format is supported for write. + + Examples + -------- + Read an existing image and get its data. + + >>> image = isx.Image.read('recording_20160613_105808-PP-PP-BP-Mean Image.isxd') + >>> image_data = image.get_data() + + Calculate the minimum image from an existing movie and write it. + + >>> movie = isx.Movie.read('recording_20160613_105808-PP-PP.isxd') + >>> min_image = 4095 * numpy.ones(movie.spacing.num_pixels, dtype=movie.data_type) + >>> for i in range(movie.timing.num_samples): + >>> min_image = numpy.minimum(min_image, movie.get_frame_data(i)) + >>> isx.Image.write('recording_20160613_105808-PP-PP-min.isxd', movie.spacing, movie.data_type, min_image) + + Attributes + ---------- + file_path : str + The path of the file that stores this. + mode : {'r', 'w'} + The mode the file was opened with. + spacing : :class:`isx.Spacing` + The spacing of the pixels in the image. + data_type : {numpy.uint16, numpy.float32} + The data type of each pixel. + """ + + def __init__(self): + self._impl = isx.Movie() + self._data = None + + @property + def file_path(self): + return self._impl.file_path + + @property + def mode(self): + return self._impl.mode + + @property + def spacing(self): + return self._impl.spacing + + @property + def data_type(self): + return self._impl.data_type + + @classmethod + def read(cls, file_path): + """ + Read an existing image from a file. + + Arguments + --------- + file_path : str + The path of the image file to read. + + Returns + ------- + :class:`isx.Image` + The image that was read. + """ + self = cls() + self._impl = isx.Movie.read(file_path) + if self._impl.timing.num_samples > 1: + raise AttributeError('File has more than one frame. Use isx.Movie.read instead.') + self._data = self._impl.get_frame_data(0) + return self + + @classmethod + def write(cls, file_path, spacing, data_type, data): + """ + Write an image to a file. + + Arguments + --------- + file_path : str + The path of the file to write. If it already exists, this will error. + spacing : :class:`isx.Spacing` + The spacing of the image to write. + data_type : {numpy.uint16, numpy.float32} + The data type of each pixel. + data : :class:`numpy.array` + The 2D array of data to write. + + Returns + ------- + :class:`isx.Image` + The image that was written. + """ + self = cls() + self._impl = isx.Movie.write(file_path, isx.Timing(num_samples=1), spacing, data_type) + self._data = isx._internal.ndarray_as_type(data, np.dtype(data_type)) + self._impl.set_frame_data(0, self._data) + self._impl.flush() + return self + + def get_data(self): + """ + Get the data stored in the image. + + Returns + ------- + :class:`numpy.ndarray` + The image data. + """ + return self._data + + def __str__(self): + return textwrap.dedent("""\ + Image + file_path: {} + mode: {} + spacing: {} + data_type: {}\ + """.format(self.file_path, self.mode, self.spacing, self.data_type)) + + +class CellSet(object): + """ + A cell set contains the image and trace data associated with components in + a movie, such as cells or regions of interest. + + It is always backed by a file in the native `.isxd` format. + + Examples + -------- + Read an existing cell set from a file and get the image and trace data of + the first cell. + + >>> cell_set = isx.CellSet.read('recording_20160613_105808-PP-PP-BP-MC-DFF-PCA-ICA.isxd') + >>> image_0 = cell_set.get_cell_image_data(0) + >>> trace_0 = cell_set.get_cell_trace_data(0) + + Write a new cell set to a file with the same timing and spacing as an + existing movie, with 3 random cell images and traces. + + >>> movie = isx.Movie.read('recording_20160613_105808-PP-PP.isxd') + >>> cell_set = isx.CellSet.write('cell_set.isxd', movie.timing, movie.spacing) + >>> for i in range(3): + >>> image = numpy.random.random(cell_set.spacing.num_pixels).astype(numpy.float32) + >>> trace = numpy.random.random(cell_set.timing.num_samples).astype(numpy.float32) + >>> cell_set.set_cell_data(i, image, trace, 'C{}'.format(i)) + >>> cell_set.flush() + + Attributes + ---------- + file_path : str + The path of the file that stores this. + mode : {'r', 'w'} + The mode the file was opened with. + timing : :class:`isx.Timing` + The timing of the samples in each cell trace. + spacing : :class:`isx.Spacing` + The spacing of the pixels in each cell image. + num_cells : int + The number of cells or components. + """ + + _MAX_CELL_NAME_SIZE = 256 + + def __init__(self): + self._ptr = isx._internal.IsxCellSetPtr() + + @property + def file_path(self): + return self._ptr.contents.file_path.decode() if self._ptr else None + + @property + def mode(self): + return isx._internal.get_mode_from_read_only(self._ptr.contents.read_only) if self._ptr else None + + @property + def timing(self): + return isx.core.Timing._from_impl(self._ptr.contents.timing) if self._ptr else None + + @property + def spacing(self): + return isx.core.Spacing._from_impl(self._ptr.contents.spacing) if self._ptr else None + + @property + def num_cells(self): + return self._ptr.contents.num_cells if self._ptr else None + + @classmethod + def read(cls, file_path, read_only=True): + """ + Open an existing cell set from a file for reading. + + This is a light weight operation that simply reads the meta-data from the cell set, + and does not read any image or trace data. + + Arguments + --------- + file_path : str + The path of the file to read. + read_only : bool + If true, only allow meta-data and data to be read, otherwise allow some meta-data + to be written (e.g. cell status). + + Returns + ------- + :class:`isx.CellSet` + The cell set that was read. Meta-data is immediately available. + Image and trace data must be read using :func:`isx.CellSet.get_cell_image_data` + and :func:`isx.CellSet.get_cell_trace_data` respectively. + """ + cell_set = cls() + isx._internal.c_api.isx_read_cell_set(file_path.encode('utf-8'), read_only, ctypes.byref(cell_set._ptr)) + return cell_set + + @classmethod + def write(cls, file_path, timing, spacing): + """ + Open a new cell set to a file for writing. + + This is a light weight operation. It does not write any image or trace data immediately. + + Arguments + --------- + file_path : str + The path of the file to write. If it already exists, this will error. + timing : :class:`isx.Timing` + The timing of the cell set to write. Typically this comes from the movie this + is derived from. + spacing : :class:`isx.Spacing` + The spacing of the movie to write. Typically this comes from the movie this is + derived from. + + Returns + ------- + :class:`isx.CellSet` + The empty cell set that was written. + Image and trace data must be written with :func:`isx.CellSet.set_cell_data`. + """ + if not isinstance(timing, isx.core.Timing): + raise TypeError('timing must be a Timing object') + + if not isinstance(spacing, isx.core.Spacing): + raise ValueError('spacing must be a Spacing object') + + cell_set = cls() + isx._internal.c_api.isx_write_cell_set( + file_path.encode('utf-8'), timing._impl, spacing._impl, False, ctypes.byref(cell_set._ptr)) + return cell_set + + def get_cell_name(self, index): + """ + Arguments + --------- + index : int >= 0 + The index of a cell. + + Returns + ------- + str + The name of the indexed cell. + """ + isx._internal.validate_ptr(self._ptr) + result = ctypes.create_string_buffer(CellSet._MAX_CELL_NAME_SIZE) + isx._internal.c_api.isx_cell_set_get_name(self._ptr, index, CellSet._MAX_CELL_NAME_SIZE, result) + return result.value.decode('utf-8') + + def get_cell_status(self, index): + """ + Arguments + --------- + index : int >= 0 + The index of a cell. + + Returns + ------- + {'accepted', 'undecided', 'rejected'} + The status of the indexed cell as a string. + """ + isx._internal.validate_ptr(self._ptr) + status_int = ctypes.c_int(0) + isx._internal.c_api.isx_cell_set_get_status(self._ptr, index, ctypes.byref(status_int)) + return isx._internal.CELL_STATUS_TO_STRING[status_int.value] + + def set_cell_status(self, index, status): + """ + Set the status of cell. This will also flush the file. + + .. warning:: As this flushes the file, only use this after all cells have been + written using :func:`isx.CellSet.set_cell_data`. + + Arguments + --------- + index : int >= 0 + The index of a cell. + status : {'accepted', 'undecided', 'rejected'} + The desired status of the indexed cell. + """ + isx._internal.validate_ptr(self._ptr) + if self.mode != 'w': + raise RuntimeError('Cannot set cell status in read-only mode') + status_int = isx._internal.lookup_enum('cell_status', isx._internal.CELL_STATUS_FROM_STRING, status) + isx._internal.c_api.isx_cell_set_set_status(self._ptr, index, status_int) + + def get_cell_trace_data(self, index): + """ + Get the trace data associated with a cell. + + Arguments + --------- + index : int >= 0 + The index of a cell. + + Returns + ------- + :class:`numpy.ndarray` + The trace data in a 1D array. + """ + isx._internal.validate_ptr(self._ptr) + trace = np.zeros([self.timing.num_samples], dtype=np.float32) + trace_p = trace.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_cell_set_get_trace(self._ptr, index, trace_p) + return trace + + def get_cell_image_data(self, index): + """ + Get the image data associated with a cell. + + Arguments + --------- + index : int >= 0 + The index of a cell. + + Returns + ------- + :class:`numpy.ndarray` + The image data in a 2D array. + """ + isx._internal.validate_ptr(self._ptr) + f = np.zeros([np.prod(self.spacing.num_pixels)], dtype=np.float32) + f_p = f.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_cell_set_get_image(self._ptr, index, f_p) + return f.reshape(self.spacing.num_pixels) + + def set_cell_data(self, index, image, trace, name): + """ + Set the image and trace data of a cell. + + Cells must be set in increasing order, otherwise this will error. + + Arguments + --------- + index : int >= 0 + The index of a cell. + image : :class:`numpy.ndarray` + The image data in a 2D array. + trace : :class:`numpy.ndarray` + The trace data in a 1D array. + name : str + The name of the cell. + """ + isx._internal.validate_ptr(self._ptr) + + if self.mode != 'w': + raise RuntimeError('Cannot set cell data in read-only mode') + + if name is None: + name = 'C{}'.format(index) + + im = isx._internal.ndarray_as_type(image.reshape(np.prod(self.spacing.num_pixels)), np.dtype(np.float32)) + im_p = im.ctypes.data_as(isx._internal.FloatPtr) + tr = isx._internal.ndarray_as_type(trace, np.dtype(np.float32)) + tr_p = tr.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_cell_set_write_image_trace(self._ptr, index, im_p, tr_p, name.encode('utf-8')) + + def flush(self): + """ + Flush all meta-data and cell data to the file. + + This should be called after setting all cell data of a cell set opened with :func:`isx.CellSet.write`. + """ + isx._internal.validate_ptr(self._ptr) + isx._internal.c_api.isx_cell_set_flush(self._ptr) + + def get_acquisition_info(self): + """ + Get information about acquisition that may be stored in some files, + such as nVista 3 movies and data derived from those. + + Returns + ------- + dict + A dictionary likely parsed from JSON that maps from string keys to variant values. + """ + return isx._internal.get_acquisition_info( + self._ptr, + isx._internal.c_api.isx_cell_set_get_acquisition_info, + isx._internal.c_api.isx_cell_set_get_acquisition_info_size); + + def __del__(self): + if self._ptr: + isx._internal.c_api.isx_cell_set_delete(self._ptr) + + def __str__(self): + return textwrap.dedent("""\ + CellSet + file_path: {} + mode: {} + timing: {} + spacing: {} + num_cells: {}\ + """.format(self.file_path, self.mode, self.timing, self.spacing, self.num_cells)) + + +class EventSet(object): + """ + An event set contains the event data of a number of components or cells. + + It is typically derived from a cell set after applying an event detection + algorithm. + Each event of a cell is comprised of a time stamp offset and a value or amplitude. + + Examples + -------- + Read an existing event set from a file and get the event data associated with the + first cell. + + >>> event_set = isx.EventSet.read('recording_20160613_105808-PP-PP-BP-MC-DFF-PCA-ICA-ED.isxd') + >>> [offsets, amplitudes] = event_set.get_cell_data(0) + + Write a new event set to a file by applying a threshold to the traces of an existing + cell set. + + >>> cell_set = isx.CellSet.read('recording_20160613_105808-PP-PP-BP-MC-DFF-PCA-ICA.isxd') + >>> cell_names = ['C{}'.format(c) for c in range(cell_set.num_cells)] + >>> event_set = isx.EventSet.write('recording_20160613_105808-PP-PP-BP-MC-DFF-PCA-ICA-custom_ED.isxd', cell_set.timing, cell_names) + >>> offsets = numpy.array([x.to_usecs() for x in cell_set.timing.get_offsets_since_start()], numpy.uint64) + >>> for c in range(cell_set.num_cells): + >>> trace = cell_set.get_cell_trace_data(c) + >>> above_thresh = trace > 500 + >>> event_set.set_cell_data(c, offsets[above_thresh], trace[above_thresh]) + >>> event_set.flush() + + Attributes + ---------- + file_path : str + The path of the file that stores this. + mode : {'r', 'w'} + The mode the file was opened with. + timing : :class:`isx.Timing` + The timing of the samples in each event trace. + num_cells : int + The number of cells or components. + cell_dict : dict + Dictionary mapping cell names to cell indices + """ + + def __init__(self): + self._ptr = isx._internal.IsxEventsPtr() + self._cell_dict = dict() + + @property + def file_path(self): + return self._ptr.contents.file_path.decode() if self._ptr else None + + @property + def mode(self): + return isx._internal.get_mode_from_read_only(self._ptr.contents.read_only) if self._ptr else None + + @property + def timing(self): + return isx.core.Timing._from_impl(self._ptr.contents.timing) if self._ptr else None + + @property + def num_cells(self): + return self._ptr.contents.num_cells if self._ptr else None + + @property + def cell_dict(self): + return self._cell_dict + + @classmethod + def read(cls, file_path): + """ + Open an existing event set from a file for reading. + + This is a light weight operation that simply reads the meta-data from the event set, + and does not read any event data. + + Arguments + --------- + file_path : str + The path of the file to read. + + Returns + ------- + :class:`isx.EventSet` + The event set that was read. Meta-data is immediately available. + Event data must be read using :func:`isx.EventSet.get_cell_data`. + """ + event_set = cls() + isx._internal.c_api.isx_read_events(file_path.encode('utf-8'), ctypes.byref(event_set._ptr)) + + # Populate cell -> index dict + for i in range(event_set.num_cells): + event_set._cell_dict[event_set.get_cell_name(i)] = i + + return event_set + + @classmethod + def write(cls, file_path, timing, cell_names): + """ + Open a new event set to a file for writing. + + This is a light weight operation. It does not write any event data immediately. + + Arguments + --------- + file_path : str + The path of the file to write. If it already exists, this will error. + timing : isx.Timing + The timing of the event set to write. Typically this comes from the cell set this + is derived from. + cell_names : list + The names of the cells that will be written. Typically these come from the cell set + this is derived from. + + Returns + ------- + :class:`isx.EventSet` + The empty event set that was written. + Image and trace data must be written with :func:`isx.EventSet.set_cell_data`. + """ + if not isinstance(timing, isx.core.Timing): + raise TypeError('timing must be a Timing object') + + num_cells = len(cell_names) + if num_cells <= 0: + raise ValueError('cell_names must not be empty') + + cell_names_c = isx._internal.list_to_ctypes_array(cell_names, ctypes.c_char_p) + event_set = cls() + isx._internal.c_api.isx_write_events(file_path.encode('utf-8'), timing._impl, cell_names_c, num_cells, ctypes.byref(event_set._ptr)) + + # Populate cell -> index dict + event_set._cell_dict = { name : index for index, name in enumerate(cell_names) } + + return event_set + + def get_cell_name(self, index): + """ + Arguments + --------- + index : int >= 0 + The index of a cell. + + Returns + ------- + str + The name of the indexed cell. + """ + isx._internal.validate_ptr(self._ptr) + result = ctypes.create_string_buffer(CellSet._MAX_CELL_NAME_SIZE) + isx._internal.c_api.isx_events_get_cell_name(self._ptr, index, CellSet._MAX_CELL_NAME_SIZE, result) + return result.value.decode('utf-8') + + def get_cell_index(self, name): + """ + Arguments + --------- + name : int >= 0 + The name of a cell. + + Returns + ------- + str + The index of the named cell. + """ + try: + return self._cell_dict[name] + except KeyError: + raise KeyError(f"Cell with name \"{name}\" does not exist.") + + def get_cell_data(self, index): + """ + Get the event data associated with a cell. + + Arguments + --------- + index : int >= 0 + The index of a cell. + + Returns + ------- + offsets : :class:`numpy.ndarray` + The 1D array of time stamps offsets from the start in microseconds. + amplitudes : :class:`numpy.ndarray` + The 1D array of event amplitudes. + """ + isx._internal.validate_ptr(self._ptr) + + cell_name = self.get_cell_name(index) + + num_events = ctypes.c_size_t(0) + isx._internal.c_api.isx_events_get_cell_count(self._ptr, cell_name.encode('utf-8'), ctypes.byref(num_events)) + num_events = num_events.value + + f = np.zeros([np.prod(num_events)], dtype=np.float32) + f_p = f.ctypes.data_as(isx._internal.FloatPtr) + + usecs = np.zeros([np.prod(num_events)], dtype=np.uint64) + usecs_p = usecs.ctypes.data_as(isx._internal.UInt64Ptr) + + isx._internal.c_api.isx_events_get_cell(self._ptr, cell_name.encode('utf-8'), usecs_p, f_p) + + return usecs, f + + def set_cell_data(self, index, offsets, amplitudes): + """ + Set the event data of a cell. + + Arguments + --------- + index : int >= 0 + The index of a cell. + offsets : :class:`numpy.ndarray` + The 1D array of time stamps offsets from the start in microseconds. + amplitudes : :class:`numpy.ndarray` + The 1D array of event amplitudes. + """ + isx._internal.validate_ptr(self._ptr) + + if len(offsets) != len(amplitudes): + raise TypeError("Number of events must be the same as the number of timestamps.") + + amps = isx._internal.ndarray_as_type(amplitudes, np.dtype(np.float32)) + offs = isx._internal.ndarray_as_type(offsets, np.dtype(np.uint64)) + f_p = amps.ctypes.data_as(isx._internal.FloatPtr) + usecs_p = offs.ctypes.data_as(isx._internal.UInt64Ptr) + isx._internal.c_api.isx_events_write_cell(self._ptr, index, len(offs), usecs_p, f_p) + + def flush(self): + """ + Flush all meta-data and cell data to the file. + + This should be called after setting all cell data of an event set opened with :func:`isx.EventSet.write`. + """ + isx._internal.validate_ptr(self._ptr) + isx._internal.c_api.isx_events_flush(self._ptr) + + def get_acquisition_info(self): + """ + Get information about acquisition that may be stored in some files, + such as nVista 3 movies and data derived from those. + + Returns + ------- + dict + A dictionary likely parsed from JSON that maps from string keys to variant values. + """ + return isx._internal.get_acquisition_info( + self._ptr, + isx._internal.c_api.isx_events_get_acquisition_info, + isx._internal.c_api.isx_events_get_acquisition_info_size); + + def __del__(self): + if self._ptr: + isx._internal.c_api.isx_events_delete(self._ptr) + + def __str__(self): + return textwrap.dedent("""\ + EventSet + file_path: {} + mode: {} + timing: {} + num_cells: {}\ + """.format(self.file_path, self.mode, self.timing, self.num_cells)) + + +class GpioSet(object): + """ + A GPIO set contains the data recorded across a number of channels. + + Each data point is comprised of a time stamp offset and a value or amplitude. + + Examples + -------- + Read an existing gpio set from a file and get the data associated with the first channel. + + >>> gpio_set = isx.GpioSet.read('2020-05-20-10-33-22_video.gpio') + >>> [offsets, amplitudes] = gpio_set.get_channel_data(0) + + Attributes + ---------- + file_path : str + The path of the file that stores this. + mode : {'r', 'w'} + The mode the file was opened with. + timing : :class:`isx.Timing` + The timing of the samples. + num_channels : int + The number of channels. + channel_dict : dict + Dictionary mapping channel names to channel indices + """ + + _MAX_CHANNEL_NAME_SIZE = 256 + + def __init__(self): + self._ptr = isx._internal.IsxGpioPtr() + self._is_imu = False + self._channel_dict = dict() + + @property + def file_path(self): + return self._ptr.contents.file_path.decode() if self._ptr else None + + @property + def mode(self): + return isx._internal.get_mode_from_read_only(self._ptr.contents.read_only) if self._ptr else None + + @property + def timing(self): + return isx.core.Timing._from_impl(self._ptr.contents.timing) if self._ptr else None + + @property + def num_channels(self): + return self._ptr.contents.num_channels if self._ptr else None + + @property + def channel_dict(self): + return self._channel_dict + + @classmethod + def read(cls, file_path): + """ + Open an existing GPIO set from a file for reading. + + This is a light weight operation that simply reads the meta-data from the GPIO set, + and does not read any GPIO data. + + Arguments + --------- + file_path : str + The path of the file to read. + + Returns + ------- + :class:`isx.GpioSet` + The GPIO set that was read. Meta-data is immediately available. + GPIO data must be read using :func:`isx.GpioSet.get_channel_data`. + """ + gpio = cls() + isx._internal.c_api.isx_read_gpio(file_path.encode('utf-8'), ctypes.byref(gpio._ptr)) + + if file_path.lower().endswith('.imu'): + gpio._is_imu = True + + # Populate channel -> index dict + for i in range(gpio.num_channels): + gpio._channel_dict[gpio.get_channel_name(i)] = i + + return gpio + + def get_channel_name(self, index): + """ + Arguments + --------- + index : int >= 0 + The index of a channel. + + Returns + ------- + str + The name of the indexed channel. + """ + isx._internal.validate_ptr(self._ptr) + result = ctypes.create_string_buffer(GpioSet._MAX_CHANNEL_NAME_SIZE) + isx._internal.c_api.isx_gpio_get_channel_name(self._ptr, index, GpioSet._MAX_CHANNEL_NAME_SIZE, result) + return result.value.decode('utf-8') + + def get_channel_index(self, name): + """ + Arguments + --------- + name : int >= 0 + The name of a channel. + + Returns + ------- + str + The index of the named channel. + """ + try: + return self._channel_dict[name] + except KeyError: + raise KeyError(f"Channel with name \"{name}\" does not exist.") + + def get_channel_data(self, index): + """ + Get the data associated with a channel. + + Arguments + --------- + index : int >= 0 + The index of a channel. + + Returns + ------- + offsets : :class:`numpy.ndarray` + The 1D array of time stamps offsets from the start in microseconds. + amplitudes : :class:`numpy.ndarray` + The 1D array of amplitudes. + """ + isx._internal.validate_ptr(self._ptr) + + channel_name = self.get_channel_name(index) + + num_channels = ctypes.c_size_t(0) + isx._internal.c_api.isx_gpio_get_channel_count(self._ptr, channel_name.encode('utf-8'), ctypes.byref(num_channels)) + num_channels = num_channels.value + + f = np.zeros([np.prod(num_channels)], dtype=np.float32) + f_p = f.ctypes.data_as(isx._internal.FloatPtr) + + usecs = np.zeros([np.prod(num_channels)], dtype=np.uint64) + usecs_p = usecs.ctypes.data_as(isx._internal.UInt64Ptr) + + isx._internal.c_api.isx_gpio_get_channel(self._ptr, channel_name.encode('utf-8'), usecs_p, f_p) + + # Convert acceleration (first 3 channels) units to g for IMU data + if self._is_imu and index < 3: + f /= 16384. + + return usecs, f + + def get_acquisition_info(self): + """ + Get information about acquisition that may be stored in some files, + such as nVista 3 movies and data derived from those. + + Returns + ------- + dict + A dictionary likely parsed from JSON that maps from string keys to variant values. + """ + return isx._internal.get_acquisition_info( + self._ptr, + isx._internal.c_api.isx_gpio_get_acquisition_info, + isx._internal.c_api.isx_gpio_get_acquisition_info_size) + + def __del__(self): + if self._ptr: + isx._internal.c_api.isx_gpio_delete(self._ptr) + + def __str__(self): + return textwrap.dedent("""\ + GPIO Set + file_path: {} + mode: {} + timing: {} + num_channels: {}\ + """.format(self.file_path, self.mode, self.timing, self.num_channels)) + + +class VesselSet(object): + """ + A vessel set contains the image, line and trace data associated with components in + a movie, such as vessels or regions of interest. + + It is always backed by a file in the native `.isxd` format. + + A vessel set can represent two types of data: vessel diameter and rbc velocity. + Depending on the vessel type, different information will be stored to disk. + + Note: Since blood flow algorithms apply a sliding window over input movies, + the timing of a vessel trace is different from the timing of its input movie. + Each frame of a vessel trace represents a measurement for a particular window sampled from its input movie. + Relative to the input movie, each frame maps to the start of the corresponding window sampled. + The duration of a frame is equal to the time increment of the sliding window. + + The following examples will demonstrate the types of data accessible for both types of vessel sets. + + Examples + -------- + **Vessel Diameter** + Read an existing vessel set from a file and get the image, line and diameter trace data of + the first vessel. + + >>> vessel_set = isx.VesselSet.read('bloodflow_movie_10s-VD.isxd') + >>> image_0 = vessel_set.get_vessel_image_data(0) + >>> line_0 = vessel_set.get_vessel_line_data(0) + >>> trace_0 = vessel_set.get_vessel_trace_data(0) + >>> center_trace_0 = vessel_set.get_vessel_center_trace_data(0) + + Write a new vessel set to a file with the same timing and spacing as an + existing movie, with 3 random vessel images, lines and diameter traces. + + >>> movie = isx.Movie.read('recording_20160613_105808-PP-PP.isxd') + >>> vessel_set = isx.VesselSet.write('vessel_set.isxd', movie.timing, movie.spacing, 'vessel diameter') + >>> for i in range(3): + >>> image = numpy.random.random(vessel_set.spacing.num_pixels).astype(numpy.float32) + >>> lines = numpy.random.randint(0, min(spacing.num_pixels), (num_vessels, 2, 2)) + >>> trace = numpy.random.random(vessel_set.timing.num_samples).astype(numpy.float32) + >>> center_trace = numpy.random.random(vessel_set.timing.num_samples).astype(numpy.float32) + >>> vessel_set.set_vessel_diameter_data(i, image, lines, trace, center_trace, 'V{}'.format(i)) + >>> vessel_set.flush() + + **RBC Velocity** + Read an existing vessel set from a file and get the image, line and rbc velocity trace data of + the first vessel. + + >>> vessel_set = isx.VesselSet.read('bloodflow_movie_10s-RBCV.isxd') + >>> image_0 = vessel_set.get_vessel_image_data(0) + >>> line_0 = vessel_set.get_vessel_line_data(0) + >>> trace_0 = vessel_set.get_vessel_trace_data(0) + >>> direction_trace_0 = vessel_set.get_vessel_direction_trace_data(0) + >>> corr_0 = vessel_set.get_vessel_correlations_data(0, 0) # look at first frame of correlation data for the first vessel + + Write a new vessel set to a file with the same timing and spacing as an + existing movie, with 3 random vessel images, lines and rbc velocity traces. + + >>> movie = isx.Movie.read('recording_20160613_105808-PP-PP.isxd') + >>> vessel_set = isx.VesselSet.write('vessel_set.isxd', movie.timing, movie.spacing, 'rbc velocity') + >>> for i in range(3): + >>> image = numpy.random.random(vessel_set.spacing.num_pixels).astype(numpy.float32) + >>> lines = numpy.random.randint(0, min(spacing.num_pixels), (num_vessels, 2, 2)) + >>> trace = numpy.random.random(vessel_set.timing.num_samples).astype(numpy.float32) + >>> direction_trace = numpy.random.random(vessel_set.timing.num_samples).astype(numpy.float32) + >>> correlation_size = np.random.randint(2, 5, size=(2,)) + >>> correlations_trace = numpy.random.random([vessel_set.timing.num_samples, 3, correlation_size[0], correlation_size[1]]).astype(numpy.float32) + >>> vessel_set.set_rbc_velocity_data(i, image, lines, trace, direction_trace, correlations_trace, 'V{}'.format(i)) + >>> vessel_set.flush() + + Attributes + ---------- + file_path : str + The path of the file that stores this. + mode : {'r', 'w'} + The mode the file was opened with. + timing : :class:`isx.Timing` + The timing of the samples in each vessel trace. + spacing : :class:`isx.Spacing` + The spacing of the pixels in each vessel image. + num_vessels : int + The number of vessels or components. + """ + + _MAX_VESSEL_NAME_SIZE = 256 + + class VesselSetType(Enum): + VESSEL_DIAMETER = 0 + RBC_VELOCITY = 1 + + @classmethod + def from_str(cls, type_str): + if type_str == 'rbc velocity': + return cls.RBC_VELOCITY + else: + return cls.VESSEL_DIAMETER + + def __init__(self): + self._ptr = isx._internal.IsxVesselSetPtr() + + @property + def file_path(self): + return self._ptr.contents.file_path.decode() if self._ptr else None + + @property + def mode(self): + return isx._internal.get_mode_from_read_only(self._ptr.contents.read_only) if self._ptr else None + + @property + def timing(self): + return isx.core.Timing._from_impl(self._ptr.contents.timing) if self._ptr else None + + @property + def spacing(self): + return isx.core.Spacing._from_impl(self._ptr.contents.spacing) if self._ptr else None + + @property + def num_vessels(self): + return self._ptr.contents.num_vessels if self._ptr else None + + @classmethod + def read(cls, file_path, read_only=True): + """ + Open an existing vessel set from a file for reading. + + This is a light weight operation that simply reads the meta-data from the vessel set, + and does not read any image or trace data. + + Arguments + --------- + file_path : str + The path of the file to read. + read_only : bool + If true, only allow meta-data and data to be read, otherwise allow some meta-data + to be written (e.g. vessel status). + + Returns + ------- + :class:`isx.VesselSet` + The vessel set that was read. Meta-data is immediately available. + Image and trace data must be read using :func:`isx.VesselSet.get_vessel_image_data` + and :func:`isx.VesselSet.get_vessel_trace_data` respectively. + """ + vessel_set = cls() + isx._internal.c_api.isx_read_vessel_set(file_path.encode('utf-8'), read_only, ctypes.byref(vessel_set._ptr)) + return vessel_set + + @classmethod + def write(cls, file_path, timing, spacing, vessel_type): + """ + Open a new vessel set to a file for writing. + + This is a light weight operation. It does not write any image or trace data immediately. + + Arguments + --------- + file_path : str + The path of the file to write. If it already exists, this will error. + timing : :class:`isx.Timing` + The timing of the vessel set to write. Typically this comes from the movie this + is derived from. + spacing : :class:`isx.Spacing` + The spacing of the movie to write. Typically this comes from the movie this is + derived from. + vessel_type : str + The type of metric to store in the vessel set. Either 'vessel diameter' or 'rbc velocity'. + + Returns + ------- + :class:`isx.VesselSet` + The empty vessel set that was written. + Image and trace data must be written with :func:`isx.VesselSet.set_vessel_data`. + """ + if not isinstance(timing, isx.core.Timing): + raise TypeError('timing must be a Timing object') + + if not isinstance(spacing, isx.core.Spacing): + raise ValueError('spacing must be a Spacing object') + + vessel_set = cls() + isx._internal.c_api.isx_write_vessel_set( + file_path.encode('utf-8'), timing._impl, spacing._impl, cls.VesselSetType.from_str(vessel_type).value, ctypes.byref(vessel_set._ptr)) + return vessel_set + + def get_vessel_name(self, index): + """ + Arguments + --------- + index : int >= 0 + The index of a vessel. + + Returns + ------- + str + The name of the indexed vessel. + """ + isx._internal.validate_ptr(self._ptr) + result = ctypes.create_string_buffer(VesselSet._MAX_VESSEL_NAME_SIZE) + isx._internal.c_api.isx_vessel_set_get_name(self._ptr, index, VesselSet._MAX_VESSEL_NAME_SIZE, result) + return result.value.decode('utf-8') + + def get_vessel_status(self, index): + """ + Arguments + --------- + index : int >= 0 + The index of a vessel. + + Returns + ------- + {'accepted', 'undecided', 'rejected'} + The status of the indexed vessel as a string. + """ + isx._internal.validate_ptr(self._ptr) + status_int = ctypes.c_int(0) + isx._internal.c_api.isx_vessel_set_get_status(self._ptr, index, ctypes.byref(status_int)) + return isx._internal.VESSEL_STATUS_TO_STRING[status_int.value] + + def set_vessel_status(self, index, status): + """ + Set the status of vessel. This will also flush the file. + + .. warning:: As this flushes the file, only use this after all vessels have been + written using :func:`isx.VesselSet.set_vessel_data`. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + status : {'accepted', 'undecided', 'rejected'} + The desired status of the indexed vessel. + """ + isx._internal.validate_ptr(self._ptr) + if self.mode != 'w': + raise RuntimeError('Cannot set vessel status in read-only mode') + status_int = isx._internal.lookup_enum('vessel_status', isx._internal.VESSEL_STATUS_FROM_STRING, status) + isx._internal.c_api.isx_vessel_set_set_status(self._ptr, index, status_int) + + def get_vessel_trace_data(self, index): + """ + Get the trace data associated with a vessel. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + + Returns + ------- + :class:`numpy.ndarray` + The trace data in a 1D array. + """ + isx._internal.validate_ptr(self._ptr) + trace = np.zeros([self.timing.num_samples], dtype=np.float32) + trace_p = trace.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_vessel_set_get_trace(self._ptr, index, trace_p) + return trace + + def get_vessel_image_data(self, index): + """ + Get the image data associated with a vessel. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + + Returns + ------- + :class:`numpy.ndarray` + The image data in a 2D array. + """ + isx._internal.validate_ptr(self._ptr) + image = np.zeros([np.prod(self.spacing.num_pixels)], dtype=np.float32) + image_p = image.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_vessel_set_get_image(self._ptr, index, image_p) + return image.reshape(self.spacing.num_pixels) + + def get_vessel_line_data(self, index): + """ + Get the line data associated with a vessel. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + + Returns + ------- + :class:`numpy.ndarray` + The line data in a 2D array. + """ + isx._internal.validate_ptr(self._ptr) + # Handle vessel diameter vessel set + if (self.get_vessel_set_type() == self.VesselSetType.VESSEL_DIAMETER): + line = np.zeros(4, dtype=np.int64) + line_p = line.ctypes.data_as(isx._internal.Int64Ptr) + isx._internal.c_api.isx_vessel_set_get_line_endpoints(self._ptr, index, line_p) + return line.reshape(2,2) + # Handle rbc velocity vessel set + elif (self.get_vessel_set_type() == self.VesselSetType.RBC_VELOCITY): + line = np.zeros(8, dtype=np.int64) + line_p = line.ctypes.data_as(isx._internal.Int64Ptr) + isx._internal.c_api.isx_vessel_set_get_line_endpoints(self._ptr, index, line_p) + return line.reshape(4,2) + + def get_vessel_set_type(self): + """ + Get the type of the vessel set. + + Returns + ------- + VesselSetType Enum + An enum representation of the different vessel set types. + either VesselSetType.VESSEL_DIAMETER or VesselSetType.RBC_VELOCITY + """ + isx._internal.validate_ptr(self._ptr) + type_int = ctypes.c_int(-1) + isx._internal.c_api.isx_vessel_set_get_type(self._ptr, ctypes.byref(type_int)) + return self.VesselSetType(type_int.value) + + def get_vessel_center_trace_data(self, index): + """ + Get the center trace data associated with a vessel. + This represents an index on the user drawn line where the center of the diamter was estimated to be. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + + Returns + ------- + :class:`numpy.ndarray` + The center trace data in a 1D array. + If no center traces are stored in the file, the function will throw an error + """ + isx._internal.validate_ptr(self._ptr) + trace = np.zeros([self.timing.num_samples], dtype=np.float32) + trace_p = trace.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_vessel_set_get_center_trace(self._ptr, index, trace_p) + return trace + + def get_vessel_direction_trace_data(self, index): + """ + Get the direction trace data associated with a vessel + This is the direction component of each velocity measurement reported in degrees relative to positive x-axis. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + + Returns + ------- + :class:`numpy.ndarray` + The direction trace data in a 1D array. + If no direction traces are stored in the file, the function will throw an error + """ + isx._internal.validate_ptr(self._ptr) + trace = np.zeros([self.timing.num_samples], dtype=np.float32) + trace_p = trace.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_vessel_set_get_direction_trace(self._ptr, index, trace_p) + return trace + + def has_correlation_heatmaps(self): + """ + If true, cross-correlation heatmaps for rbc velocity measurements are stored in this file. + + Returns + ------- + bool + Flag indicating whether heatmaps were saved. + """ + isx._internal.validate_ptr(self._ptr) + saved_ptr = ctypes.c_int(0) + isx._internal.c_api.isx_vessel_set_is_correlation_saved(self._ptr, ctypes.byref(saved_ptr)) + return bool(saved_ptr.value) + + def get_vessel_correlations_data(self, index, frame): + """ + Get the correlation trace data associated with a vessel at a certain frame. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + frame : int >= 0 + The frame index of the time-series trace. + + Returns + ------- + :class:`numpy.ndarray` + The correlations data in a 3D array of size (3, width, height) + Each slice of the ndarray is a correlation heatmap for a particular temporal offset + The mapping of temporal offsets is (slice 0 -> t = -1, slice 1 -> t = 0, slice 2 = t = 1) + If no correlations are stored in the file, the function will throw an error + """ + isx._internal.validate_ptr(self._ptr) + + correlation_size = np.zeros([2], dtype=np.uint64) + correlation_size_p = correlation_size.ctypes.data_as(isx._internal.SizeTPtr) + isx._internal.c_api.isx_vessel_set_get_correlation_size(self._ptr, index, correlation_size_p); + + height, width = correlation_size[0], correlation_size[1] + correlations = np.zeros([int(3 * height *width)], dtype=np.float32) + correlations_p = correlations.ctypes.data_as(isx._internal.FloatPtr) + + isx._internal.c_api.isx_vessel_set_get_correlations(self._ptr, index, frame, correlations_p) + + return correlations.reshape((3, height, width)) + + def set_vessel_diameter_data(self, index, image, line, trace, center_trace, name=None): + """ + Set the image, line and diameter trace data of a vessel + + Vessels must be set in increasing order, otherwise this will error. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + image : :class:`numpy.ndarray` + The image data in a 2D array. + line : :class:`numpy.ndarray` + The line endpoint data in a 2D array. + trace : :class:`numpy.ndarray` + The trace data in a 1D array. + center_trace : :class:`numpy.ndarray` + The center trace data in a 1D array. + name : str + The name of the vessel. + """ + isx._internal.validate_ptr(self._ptr) + + if self.mode != 'w': + raise RuntimeError('Cannot set vessel data in read-only mode') + + if name is None: + name = 'V{}'.format(index) + + im = isx._internal.ndarray_as_type(image.reshape(np.prod(self.spacing.num_pixels)), np.dtype(np.float32)) + im_p = im.ctypes.data_as(isx._internal.FloatPtr) + + ln = isx._internal.ndarray_as_type(line, np.dtype(np.int64)) + ln_p = ln.ctypes.data_as(isx._internal.Int64Ptr) + + tr = isx._internal.ndarray_as_type(trace, np.dtype(np.float32)) + tr_p = tr.ctypes.data_as(isx._internal.FloatPtr) + + cen_tr = isx._internal.ndarray_as_type(center_trace, np.dtype(np.float32)) + cen_tr_p = cen_tr.ctypes.data_as(isx._internal.FloatPtr) + isx._internal.c_api.isx_vessel_set_write_vessel_diameter_data(self._ptr, index, im_p, ln_p, tr_p, cen_tr_p, name.encode('utf-8')) + + def set_rbc_velocity_data(self, index, image, line, trace, direction_trace, correlations_trace=None, name=None): + """ + Set the image, line and rbc velocity trace data of a vessel. + + Vessels must be set in increasing order, otherwise this will error. + + Arguments + --------- + index : int >= 0 + The index of a vessel. + image : :class:`numpy.ndarray` + The image data in a 2D array. + line : :class:`numpy.ndarray` + The line endpoint data in a 2D array. + trace : :class:`numpy.ndarray` + The trace data in a 1D array. + direction_trace : :class:`numpy.ndarray` + The direction trace data in a 1D array. + correlations_trace : :class:`numpy.ndarray` + The correlations trace data in a 4D array T x 3 x W x H + Where T is the number of time samples + W is the width of the correlation heatmap + H is the width of the correlation heatmap + There are three heatmaps for each time sample representing the three temporal offsets (-1, 0, 1) + name : str + The name of the vessel. + """ + isx._internal.validate_ptr(self._ptr) + + if self.mode != 'w': + raise RuntimeError('Cannot set vessel data in read-only mode') + + if name is None: + name = 'V{}'.format(index) + + im = isx._internal.ndarray_as_type(image.reshape(np.prod(self.spacing.num_pixels)), np.dtype(np.float32)) + im_p = im.ctypes.data_as(isx._internal.FloatPtr) + + ln = isx._internal.ndarray_as_type(line, np.dtype(np.int64)) + ln_p = ln.ctypes.data_as(isx._internal.Int64Ptr) + + tr = isx._internal.ndarray_as_type(trace, np.dtype(np.float32)) + tr_p = tr.ctypes.data_as(isx._internal.FloatPtr) + + dir_tr = isx._internal.ndarray_as_type(direction_trace, np.dtype(np.float32)) + dir_tr_p = dir_tr.ctypes.data_as(isx._internal.FloatPtr) + + corr_tr, corr_tr_p = None, None + corr_size = [0, 0] + if correlations_trace is not None: + corr_tr = isx._internal.ndarray_as_type(correlations_trace, np.dtype(np.float32)) + corr_tr_p = corr_tr.ctypes.data_as(isx._internal.FloatPtr) + corr_size = [correlations_trace.shape[3], correlations_trace.shape[2]] + + isx._internal.c_api.isx_vessel_set_write_rbc_velocity_data(self._ptr, index, im_p, ln_p, tr_p, dir_tr_p, corr_size[0], corr_size[1], corr_tr_p, name.encode('utf-8')) + + def flush(self): + """ + Flush all meta-data and vessel data to the file. + + This should be called after setting all vessel data of a vessel set opened with :func:`isx.VesselSet.write`. + """ + isx._internal.validate_ptr(self._ptr) + isx._internal.c_api.isx_vessel_set_flush(self._ptr) + + def get_acquisition_info(self): + """ + Get information about acquisition that may be stored in some files, + such as nVista 3 movies and data derived from those. + + Returns + ------- + dict + A dictionary likely parsed from JSON that maps from string keys to variant values. + """ + return isx._internal.get_acquisition_info( + self._ptr, + isx._internal.c_api.isx_vessel_set_get_acquisition_info, + isx._internal.c_api.isx_vessel_set_get_acquisition_info_size); + + def __del__(self): + if self._ptr: + isx._internal.c_api.isx_vessel_set_delete(self._ptr) + + def __str__(self): + return textwrap.dedent("""\ + VesselSet + file_path: {} + mode: {} + timing: {} + spacing: {} + num_vessels: {}\ + """.format(self.file_path, self.mode, self.timing, self.spacing, self.num_vessels)) + +def convert_type_numpy_array(array, dtype=np.uint16, keep_0_to_1=False): + """ Convert a numpy array to a different data type by normalizing and mapping.""" + + if np.nanmax(array) - np.nanmin(array) != 0: + scaled_array = (array - np.nanmin(array)) / (np.nanmax(array) - np.nanmin(array)) + else: + scaled_array = array.copy() + + if np.issubdtype(dtype, np.integer): + scaled_array *= np.iinfo(dtype).max + elif not keep_0_to_1: + scaled_array *= np.finfo(dtype).max + + # if converting from complex to real type, drop imaginary component + if np.iscomplexobj(array) and not np.issubdtype(dtype, np.dtype(complex)): + scaled_array = np.real(scaled_array) + + return scaled_array.astype(dtype) + + +def export_image_to_tiff(image, tiff_file, write_rgb=False): + """Save an image as a uint16 tiff.""" + if write_rgb: + image_data = convert_type_numpy_array(image, np.uint8) + image_out = PIL.Image.fromarray(image_data) + else: + image_data = convert_type_numpy_array(image, np.uint16) + image_out = PIL.Image.fromarray(image_data) + image_out.save(tiff_file) + + +def _normalize_image(cell_image): + """Map an image to the scale [0, 1]. """ + + lower_limit = np.nanpercentile(cell_image, 0) + higher_limit = np.nanpercentile(cell_image, 100) + + cell_image_f32 = cell_image.astype('float32') + limits = np.array((lower_limit, higher_limit)).astype('float32') + + norm_image = (cell_image_f32 - limits[0]) / np.diff(limits) + #norm_image[norm_image < 0] = 0 + #norm_image[norm_image > 1] = 1 + norm_image[np.less(norm_image, 0, where=~np.isnan(norm_image))] = 0 + norm_image[np.greater(norm_image, 1, where=~np.isnan(norm_image))] = 1 + + return norm_image + + +def _get_footprint_list(cellset_file, selected_cell_statuses=['accepted']): + """ Get a list of valid footprint images from a cellset.""" + cellset = isx.io.CellSet.read(cellset_file) + + # cells with non-NaN values are valid + valid_indices = [index for index in range(cellset.num_cells) if not all(np.isnan(cellset.get_cell_trace_data(index)))] + # select cells that have the selected status + selected_indices = [index for index in valid_indices if cellset.get_cell_status(index) in selected_cell_statuses] + + cell_images = [] + n_cells = len(selected_indices) + for i in range(n_cells): + cell_index = selected_indices[i] + cell_image = cellset.get_cell_image_data(cell_index) + cell_name = cellset.get_cell_name(cell_index) + cell_images.append(cell_image) + + return cell_images + + +def _isxd_cell_set_to_cell_map(cell_set, selected_cell_statuses=['accepted'], + cell_normalization=True, cell_thresh=None, binary=False, + footprint_min_var=1e-4): + """Generate cell map as a max projection of the cell footprints of an isxd cell set. + + Arguments + --------- + cell_set : str + Path to .isxd cell set file. + selected_cell_statuses : list + a list of cell statuses for decision criteria to keep cell footprints. + Possible list values are 'accepted', 'undecided', 'rejected'. + cell_normalization : Bool + If true, each cell will be normalized to [0, 1]. + cell_thresh : float [0, 1] + Pixels with values lower than cell_thresh will be set to 0. + binary : Bool + If true, pixels with values above cell_thresh are set to 1. (Pixels with values + below cell_thresh are set to 0.) + footprint_min_var: float + Minimum variance of an individual cell footprint. Footprints with variance + below this threshold are not included in the cell map. + + Return Type + ----------- + numpy.ndarray + cell map as a max projection of the cell footprints. + """ + footprint_list = _get_footprint_list(cell_set, selected_cell_statuses=selected_cell_statuses) + + if not footprint_list: + raise ValueError('There are no cells to create a cell map with! Only selected cells will be used for the cell map.') + + cell_footprints = np.stack(footprint_list, axis=2) + cell_footprints = np.moveaxis(cell_footprints, 2, 0) + + n_cells = cell_footprints.shape[0] + cell_map = None + for index in range(n_cells): + cell_image = cell_footprints[index, :, :].copy() + + if cell_normalization: + cell_image = _normalize_image(cell_image) + + # skip if footprint pixel values lack variability - indicates absence of cell in image + if np.var(cell_image) < footprint_min_var: + continue + + if cell_thresh is not None: + cell_image_thresh = cell_thresh * np.nanmax(cell_image) + cell_image[cell_image < cell_image_thresh] = 0. + + if binary: + cell_image = (cell_image >= cell_image_thresh).astype(np.float32) + + if cell_map is None: # first index + cell_map = cell_image + else: + np.fmax(cell_map, cell_image, out=cell_map) + + return cell_map + + +def export_image_to_isxd_tiff(image, isxd_file, tiff_file, rgb=None): + """Save an image as isxd and tiff. + If rgb is one of 'red', 'green', 'blue', the images will be colored. + """ + spacing = isx.core.Spacing(num_pixels=image.shape) + + if image.dtype == np.float32: + dtype = np.float32 + elif image.dtype == np.uint16: + dtype = np.uint16 + elif image.dtype == np.float64: + image = convert_type_numpy_array(image, np.float32) + dtype = np.float32 + elif image.dtype == bool: + image = image.astype(np.uint16) + dtype = np.uint16 + else: + image = convert_type_numpy_array(image, np.uint16) + dtype = np.uint16 + + if isxd_file is not None: + Image.write(isxd_file, spacing, dtype, image) + + if tiff_file is not None: + if rgb is not None: + color_array = ['red', 'green', 'blue'] + if rgb not in color_array: + raise ValueError('Value {} for rgb not one of red, green, blue'.format(rgb)) + + color = color_array.index(rgb) + image = convert_type_numpy_array(image, np.uint16) + image_rgb = np.full([image.shape[0], image.shape[1], 3], 0, dtype=np.uint16) + image_rgb[:, :, color] = image + + export_image_to_tiff(image_rgb, tiff_file, write_rgb=True) + else: + export_image_to_tiff(image, tiff_file) + + +def export_isxd_image_to_tiff(input_isxd_image, output_tiff_file): + """ Convert an ISXD image file to a tiff image. + WARNING: does not export tiff with alignment metadata, use align_image instead + + Arguments + --------- + input_isxd_image : str + Path to the input ISXD image file. + output_tiff_file : str + Path to the output tiff file. + """ + + if not os.path.exists(input_isxd_image): + raise FileNotFoundError('ISXD image not found: {}'.format(input_isxd_image)) + + image_data = isx.Image.read(input_isxd_image).get_data() + PIL.Image.fromarray(isx.convert_type_numpy_array(image_data)).save(output_tiff_file) + + +def export_tiff_to_isxd_image(input_tiff_file, output_isxd_file): + """ Convert an tiff image file to an ISXD image. + WARNING: does not export tiff with alignment metadata, use align_image instead + + Arguments + --------- + input_tiff_file : str + Path to the input tiff file. + output_isxd_file : str + Path to the output ISXD image file. + + """ + if not os.path.exists(input_tiff_file): + raise FileNotFoundError('TIFF file not found: {}'.format(input_tiff_file)) + + tiff_data = PIL.Image.open(input_tiff_file) + image = np.array(tiff_data) + spacing = isx.core.Spacing(num_pixels=image.shape) + + if image.dtype == np.float32: + dtype = np.float32 + elif image.dtype == np.uint16: + dtype = np.uint16 + elif image.dtype == np.float64: + image = isx.convert_type_numpy_array(image, np.float32) + dtype = np.float32 + elif image.dtype == bool: + image = image.astype(np.uint16) + dtype = np.uint16 + else: + image = isx.convert_type_numpy_array(image, np.uint16) + dtype = np.uint16 + + isx.Image.write(output_isxd_file, spacing, dtype, image) + + +def _overlay_isxd_images(*isxd_images): + """Max project the isxd images and return a numpy array.""" + max_image = None + for isxd_image in isxd_images: + image = Image.read(isxd_image).get_data() + if max_image is None: + max_image = image + else: + max_image = np.maximum(max_image, image) + + return max_image + + +def create_cell_map(isxd_cellset_file, selected_cell_statuses=['accepted'], + cell_thresh=0.3, binary=False, rgb=None, + output_isxd_cell_map_file=None, output_tiff_cell_map_file=None): + """Generate cell map from an .isxd cellset file, saving the image as isxd and tiff. + + Arguments + --------- + isxd_cellset_file : str + Path to an .isxd cellset file. + selected_cell_statuses : list + a list of cell statuses for decision criteria to keep cell footprints. + Possible list values are 'accepted', 'undecided', 'rejected'. + cell_thresh : float [0, 1] + Pixels with values lower than cell_thresh will be set to 0. + binary : Bool + If true, pixels with values above cell_thresh are set to 1. (Pixels with values + below cell_thresh are set to 0.) + rgb : one of "red", "blue", or "green" + Color for the cell map. + output_isxd_cell_map_file : str + Path to the output isxd cell map image. If not given, will not be generated. + output_tiff_cell_map_file : str + Path to the output tiff cell map image. If not given, will not be generated. + """ + if not os.path.exists(isxd_cellset_file): + raise FileNotFoundError('ISXD cellset not found: {}'.format(isxd_cellset_file)) + + for file_name in [output_isxd_cell_map_file, output_tiff_cell_map_file]: + if file_name and os.path.exists(file_name): + raise FileExistsError('Output file already exists: {}'.format(file_name)) + + cell_map = _isxd_cell_set_to_cell_map(isxd_cellset_file, + selected_cell_statuses=selected_cell_statuses, + cell_thresh=cell_thresh, binary=binary) + cell_map = convert_type_numpy_array(cell_map, np.uint16) + export_image_to_isxd_tiff(cell_map, output_isxd_cell_map_file, output_tiff_cell_map_file, rgb=rgb) + + +def overlay_cellmaps(first_tiff_cellmap_file, second_tiff_cellmap_file, overlayed_tiff_cellmap_file, + background_color='#000000', first_color='#00ff00', second_color='#ff00ff', cell_thresh=0.5): + """ Overlay two cellmaps using different colors to show overlap. + + Arguments + --------- + first_tiff_cellmap_file : str + Path to the first tiff cellmap image. + second_tiff_cellmap_file : str + Path to the second tiff cellmap image. + overlayed_tiff_cellmap_file : str + Path to the output tiff cellmap. + background_color : str + Hex color code for background. Format: #RRGGBB + first_color : str + Hex color code for cells in first cellmap. Format: #RRGGBB + second_color : str + Hex color code for cells in second cellmap. Format: #RRGGBB + cell_thresh : float [0, 1] + Pixel values less than cell_thresh will be considered as the background. + """ + for input_file in [first_tiff_cellmap_file, second_tiff_cellmap_file]: + if not os.path.exists(input_file): + raise FileNotFoundError("Input file not found: {}".format(input_file)) + + first_image = PIL.Image.open(first_tiff_cellmap_file) + second_image = PIL.Image.open(second_tiff_cellmap_file) + + # convert images to 8 bit grayscale + first_image = PIL.Image.fromarray(isx.convert_type_numpy_array(first_image, dtype=np.uint8)).convert('L') + second_image = PIL.Image.fromarray(isx.convert_type_numpy_array(second_image, dtype=np.uint8)).convert('L') + + if first_image.size != second_image.size: + raise ValueError('The two images do not have the same size: {} vs {}'.format(first_image.size, second_image.size)) + + # get rgb tuples of selected colors + pil_bg_color = PIL.ImageColor.getcolor(background_color, "RGB") + pil_first_color = PIL.ImageColor.getcolor(first_color, "RGB") + pil_second_color = PIL.ImageColor.getcolor(second_color, "RGB") + + # create boolean mask - True for lighter colors (cells), False for darker colors (no cell) + first_mask = np.array(first_image) >= 255 * cell_thresh + second_mask = np.array(second_image) >= 255 * cell_thresh + + overlayed_arr = np.empty(first_mask.shape + (3,), dtype=np.uint8) + + # assign appropriate colors depending on where cells are and intersect + overlayed_arr[np.logical_and(np.logical_not(first_mask), np.logical_not(second_mask))] = pil_bg_color # no cell + overlayed_arr[np.logical_and(first_mask, np.logical_not(second_mask))] = pil_first_color # first cell map only + overlayed_arr[np.logical_and(np.logical_not(first_mask), second_mask)] = pil_second_color # second cell map only + overlayed_arr[np.logical_and(first_mask, second_mask)] = np.maximum(pil_first_color, pil_second_color) # both cell maps + + isx.export_image_to_tiff(overlayed_arr, overlayed_tiff_cellmap_file, write_rgb=True) + + +def overlay_cell_map_on_image(input_isxd_cell_map_file, input_isxd_image_file, output_tiff_image_file): + """Overlay a cellmap onto an image, and save as isxd. Can save tiff as well. + + Arguments + --------- + input_isxd_cell_map_file : str + Path to an .isxd cell map image file. + input_isxd_image_file : str + Path to an .isxd image file to overlay on. + output_tiff_image_file : str + Path to the output isxd cell map image. + """ + if not os.path.exists(input_isxd_cell_map_file): + raise FileNotFoundError('ISXD cell map not found: {}'.format(input_isxd_cell_map_file)) + if not os.path.exists(input_isxd_image_file): + raise FileNotFoundError('ISXD image not found: {}'.format(input_isxd_image_file)) + + if os.path.exists(output_tiff_image_file): + raise FileExistsError('Output file already exists: {}'.format(output_tiff_image_file)) + + overlayed_image = _overlay_isxd_images(input_isxd_image_file, input_isxd_cell_map_file) + export_image_to_isxd_tiff(overlayed_image, None, output_tiff_image_file) + + +def export_movie_to_tiff(input_movie_files, output_tiff_file, write_invalid_frames=False): + """ + Export movies to a TIFF file. + + For more details see :ref:`exportMovie`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to be exported. + output_tiff_file : str + The path of the TIFF file to be written. + write_invalid_frames : bool + If True, write invalid (dropped, cropped, and blank) frames as zero, + otherwise, do not write them at all. + """ + num_movies, in_movie_arr = isx._internal.check_input_files(input_movie_files) + isx._internal.c_api.isx_export_movie_tiff(num_movies, in_movie_arr, output_tiff_file.encode('utf-8'), write_invalid_frames) + + +def export_movie_to_nwb( + input_movie_files, output_nwb_file, + identifier='', session_description='', comments='', + description='', experiment_description='', experimenter='', + institution='', lab='', session_id=''): + """ + Export movies to an HDF5-based neurodata without borders (NWB) file. + + For more details see :ref:`exportMovie`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to be exported. + output_nwb_file : str + The path of the NWB file to be written. + identifier : str + An identifier for the file according to the NWB spec. + session_description : str + A session description for the file according to the NWB spec. + comments : str + Comments on the recording session. + description : str + Description for the file according to the NWB spec. + experiment_description : str + Details about the experiment. + experimenter : str + The person who recorded the data. + institution : str + The place where the recording was performed. + lab : str + The lab where the recording was performed. + session_id : str + A unique session identifier for the recording. + """ + num_movies, in_movie_arr = isx._internal.check_input_files(input_movie_files) + isx._internal.c_api.isx_export_movie_nwb( + num_movies, in_movie_arr, output_nwb_file.encode('utf-8'), + identifier.encode('utf-8'), session_description.encode('utf-8'), + comments.encode('utf-8'), description.encode('utf-8'), + experiment_description.encode('utf-8'), experimenter.encode('utf-8'), + institution.encode('utf-8'), lab.encode('utf-8'), session_id.encode('utf-8')) + + +def export_movie_to_mp4( + input_movie_files, + output_mp4_file, + compression_quality=0.1, + write_invalid_frames=False, + frame_rate_format="float", + draw_bounding_box=True, + draw_bounding_box_center=True, + draw_zones=True +): + """ + Export movies to an MP4 file. + + For more details see :ref:`exportMovie`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to be exported. + output_mp4_file : str + The path of the MP4 file to be written. + compression_quality : float + A value between 0.001 and 1 that controls the quality and size of the output file for the MP4 format. + The larger the value, the better the quality of the movie, but the larger the size of the file. + The default value of 0.1 typically produces a good quality movie where the file is at most 10% of the uncompressed original file size, + but may be smaller if the movie content can be efficiently encoded. + More formally, this represents a rough maximum on the output file size as a fraction of the original file size. + write_invalid_frames : bool + If True, write invalid (dropped, cropped, and blank) frames as zero, + otherwise, do not write them at all. + frame_rate_format : {"float", "int"} + Format to encode the frame rate in the output mp4 file. + If float, the frame rate will be exported as a precise estimate of the input movie sampling rate. + If int, the frame rate will be rounded to the nearest integer. + draw_bounding_box : bool + Only used for nVision `.isxb` movies. + If there is nVision tracking data in the `.isxb` movie, and this flag is enabled, + then draw the bounding box estimate on each frame of the exported mp4 movie. + draw_bounding_box_center : bool + Only used for nVision `.isxb` movies. + If there is nVision tracking data in the `.isxb` movie, and this flag is enabled, + then draw the center of the bounding box estimate on each frame of the exported mp4 movie. + draw_zones : bool + Only used for nVision `.isxb` movies. + If there is nVision tracking data in the `.isxb` movie, and this flag is enabled, + then draw the zones of the tracking area on each frame of the exported mp4 movie. + """ + num_movies, in_movie_arr = isx._internal.check_input_files(input_movie_files) + + frame_rate_format_map = {'float' : 0, 'int' : 1} + if not frame_rate_format in frame_rate_format_map.keys(): + raise ValueError('Invalid frame rate format. Valid frame rate formats include: {}'.format(*frame_rate_format_map.keys())) + + isx._internal.c_api.isx_export_movie_mp4( + num_movies, + in_movie_arr, + output_mp4_file.encode('utf-8'), + compression_quality, + write_invalid_frames, + frame_rate_format_map[frame_rate_format], + draw_bounding_box, + draw_bounding_box_center, + draw_zones + ) + +def export_movie_timestamps_to_csv(input_movie_files, output_csv_file, time_ref='start'): + """ + Export movie frame timestamps to a csv file. + This operation is supported for .isxd and .isxb movies. + + For more details see :ref:`exportMovie`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to export frame timestamps from. + output_csv_file : str + The path of the csv file to be written. + time_ref : {'start', 'unix', 'tsc'} + The time reference for the CSV time stamps. + If 'start' is used, the timestamps represent the seconds since the start of the movie. + If 'unix' is used, the timestamps represent the seconds since the Unix epoch. + If 'tsc' is used, the timestamps represent the hardware counter value on the acquisition box when each frame was captured. + """ + num_movies, in_movie_arr = isx._internal.check_input_files(input_movie_files) + time_ref_int = isx._internal.lookup_enum('time_ref', isx._internal.TIME_REF_FROM_STRING, time_ref) + isx._internal.c_api.isx_export_movie_timestamps_to_csv( + num_movies, in_movie_arr, output_csv_file.encode('utf-8'), time_ref_int) + +def export_nvision_movie_tracking_frame_data_to_csv(input_movie_files, output_csv_file, time_ref='start'): + """ + Export frame tracking metadata from an nVision movie. + This operation is supported for .isxb movies. + + + The frame tracking metadata is generated by the nVision tracking model, and includes the following columns: + * Global Frame Number: The frame number in the input movie series. + * Movie Number: The movie number in the series. + * Local Frame Number: The frame number in the individual movie. + * Frame Timestamp: The frame timestamp. In units of seconds or microseconds based on the input `time_ref` parameter. + * Bounding Box Left: X coordinate of the top left corner of the bounding box. In units of pixels. + * Bounding Box Top: Y coordinate of the top left corner of the bounding box. In units of pixels. + * Bounding Box Right: X coordinate of the bottom right corner of the bounding box. In units of pixels. + * Bounding Box Bottom: Y coordinate of the bottom right corner of the bounding box. In units of pixels. + * Bounding Box Center X: X coordinate of the center point of the bounding box. In units of pixels. + * Bounding Box Center Y: Y coordinate of the center point of the bounding box. In units of pixels. + * Confidence: The nVision tracking model confidence of the bounding box estimate. In units of %. + * Zone ID: If the nVision tracking model detected the mouse was inside a zone, this column contains the id of that zone. + * Zone Name: If the nVision tracking model detected the mouse was inside a zone, this column contains the name of that zone. + + For more details see :ref:`exportMovie`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to export frame tracking metadata from. + output_csv_file : str + The path of the csv file to be written. + time_ref : {'start', 'unix', 'tsc'} + The time reference for the CSV time stamps. + If 'start' is used, the timestamps represent the seconds since the start of the movie. + If 'unix' is used, the timestamps represent the seconds since the Unix epoch. + If 'tsc' is used, the timestamps represent the hardware counter value on the acquisition box when each frame was captured. + """ + + num_movies, in_movie_arr = isx._internal.check_input_files(input_movie_files) + time_ref_int = isx._internal.lookup_enum('time_ref', isx._internal.TIME_REF_FROM_STRING, time_ref) + isx._internal.c_api.isx_export_nvision_movie_tracking_frame_data_to_csv( + num_movies, in_movie_arr, output_csv_file.encode('utf-8'), time_ref_int) + + +def export_nvision_movie_tracking_zone_data_to_csv(input_movie_files, output_csv_file): + """ + Export zone tracking metadata from an nVision movie. + This operation is supported for .isxb movies. + + The zone tracking metadata used by the nVision tracking model, and includes the following columns: + * ID: Unique identifier for the zone. + * Enabled: Flag indicating whether the zone is enabled for tracking. + * Name: User friendly name for the zone. + * Description: Optional description for the zone + * Type: The shape of the zone. Can be either rectangle, polygon, or ellipse. + * X `i`: The ith X coordinate of the zone. + Note: Since zones can have different shapes, they can have a different number of coordinates. + The csv output will contain `n` columns for the X coordinate, where `n` is the maxmimum + number of coordinates across all zones in the metadata. + * Y `i`: The ith Y coordinate of the zone. + Note: Since zones can have different shapes, they can have a different number of coordinates. + The csv output will contain `n` columns for the Y coordinate, where `n` is the maxmimum + number of coordinates across all zones in the metadata. + * Major Axis: only outputted for ellipse shaped zones. Length of the major axis. + * Minor Axis: only outputted for ellipse shaped zones. Length of the minor axis. + * Angle: only outputted for ellipse shaped zones. Ellipse rotation angle in degrees. + + For more details see :ref:`exportMovie`. + + Arguments + --------- + input_movie_files : list + The file paths of the movies to export frame tracking metadata from. + output_csv_file : str + The path of the csv file to be written. + """ + + num_movies, in_movie_arr = isx._internal.check_input_files(input_movie_files) + isx._internal.c_api.isx_export_nvision_movie_tracking_zone_data_to_csv( + num_movies, in_movie_arr, output_csv_file.encode('utf-8')) + +def export_cell_set_to_csv_tiff(input_cell_set_files, output_csv_file, output_tiff_file, time_ref='start', output_props_file=''): + """ + Export cell sets to a CSV file with trace data and TIFF files with image data. + + For more details see :ref:`exportCellsAndStuff`. + + Unlike the desktop application, this will only produce a TIFF cell map image file + and not a PNG file too. + + Arguments + --------- + input_cell_set_files : list + The file paths of the cell sets to export. + output_csv_file : str + The path of the CSV file to write. + output_tiff_file : str + The base name of the TIFF files to write. + time_ref : {'start', 'unix'} + The time reference for the CSV time stamps. + If 'start' is used, the time stamps represent the seconds since the start of the cell set. + If 'unix' is used, the time stamps represents the second since the Unix epoch. + output_props_file : str + The path of the properties CSV file to write. + """ + num_cell_sets, in_cell_sets = isx._internal.check_input_files(input_cell_set_files) + time_ref_int = isx._internal.lookup_enum('time_ref', isx._internal.TIME_REF_FROM_STRING, time_ref) + isx._internal.c_api.isx_export_cell_set( + num_cell_sets, in_cell_sets, output_csv_file.encode('utf-8'), + output_tiff_file.encode('utf-8'), time_ref_int, False, output_props_file.encode('utf-8')) + + +def export_vessel_set_to_csv_tiff(input_vessel_set_files, output_trace_csv_file='', output_line_csv_file='', output_map_tiff_file='', output_heatmaps_tiff_dir='', time_ref='start'): + """ + Export vessel sets to a CSV file with trace data and TIFF files with image data. + + Arguments + --------- + input_vessel_set_files : list + The file paths of the vessel sets to export. + output_trace_csv_file : str + The path of the trace CSV file to write. + output_line_csv_file : str + The path of the line CSV file to write. + output_map_tiff_file : str + The name of the vessel map TIFF file to write. + output_heatmaps_tiff_dir : str + The name of the directory to write correlation heatmaps as TIFF stacks. + time_ref : {'start', 'unix'} + The time reference for the CSV time stamps. + If 'start' is used, the time stamps represent the seconds since the start of the vessel set. + If 'unix' is used, the time stamps represents the second since the Unix epoch. + """ + num_vessel_sets, in_vessel_sets = isx._internal.check_input_files(input_vessel_set_files) + time_ref_int = isx._internal.lookup_enum('time_ref', isx._internal.TIME_REF_FROM_STRING, time_ref) + + if all([not f for f in [output_trace_csv_file, output_line_csv_file, output_map_tiff_file, output_heatmaps_tiff_dir]]): + raise ValueError('Must provide at least one output file path.') + + isx._internal.c_api.isx_export_vessel_set( + num_vessel_sets, in_vessel_sets, + output_trace_csv_file.encode('utf-8'), output_line_csv_file.encode('utf-8'), output_map_tiff_file.encode('utf-8'), output_heatmaps_tiff_dir.encode('utf-8'), + time_ref_int) + + +def export_event_set_to_csv(input_event_set_files, output_csv_file, time_ref='start', output_props_file='', + sparse_output=True, write_amplitude=True): + """ + Export event sets to a CSV file. + + For more details see :ref:`exportCellsAndStuff`. + + Arguments + --------- + input_event_set_files : list + The file paths of the cell sets to export. + output_csv_file : str + The path of the CSV file to write. + time_ref : {'start', 'unix'} + The time reference for the CSV time stamps. + If 'start' is used, the time stamps represent the seconds since the start of the cell set. + If 'unix' is used, the time stamps represents the second since the Unix epoch. + output_props_file : str + The path of the properties CSV file to write. + sparse_output: bool + If true, output events in sparse format showing all time points, + otherwise, output events in dense format showing only timepoints with events. + write_amplitude: bool + Only relevant when sparse_output is True. + If true, write amplitudes of each event, + otherwise, writes 1 where events occur and 0 elsewhere. + """ + num_event_sets, in_event_sets = isx._internal.check_input_files(input_event_set_files) + time_ref_int = isx._internal.lookup_enum('time_ref', isx._internal.TIME_REF_FROM_STRING, time_ref) + isx._internal.c_api.isx_export_event_set( + num_event_sets, in_event_sets, output_csv_file.encode('utf-8'), time_ref_int, + output_props_file.encode('utf-8'), sparse_output, write_amplitude) + + +def export_gpio_to_isxd(input_gpio_file, output_isxd_dir): + """ + Export GPIO file (.gpio, .raw, .hdf5, .imu) to an Inscopix Data File (.isxd). + + Output file will have the name _gpio.isxd + + Arguments + --------- + input_gpio_file : list + The file path of the gpio file to be exported. + output_isxd_dir : str + The path of the directory to write isxd file. + """ + isx._internal.c_api.isx_export_gpio_isxd(input_gpio_file.encode('utf-8'), output_isxd_dir.encode('utf-8')) + + +def export_gpio_set_to_csv(input_gpio_set_files, output_csv_file, inter_isxd_file_dir='/tmp', time_ref='start'): + """ + Export gpio sets to a CSV file. + + If exporting more than one file, for correct formatting, files should either be all non .imu files or all .imu files. + + For more details see :ref:`exportCellsAndStuff`. + + Arguments + --------- + input_gpio_set_files : list + The file paths of the cell sets to export. + output_csv_file : str + The path of the CSV file to write. + inter_isxd_file_dir : str + The path of the directory to put intermediate .isxd file + The default path for Mac & Linux is /tmp. The default path for Windows is the directory containing the input gpio file. + time_ref : {'start', 'unix'} + The time reference for the CSV time stamps. + If 'start' is used, the time stamps represent the seconds since the start of the cell set. + If 'unix' is used, the time stamps represents the second since the Unix epoch. + """ + num_gpio_sets, in_gpio_sets = isx._internal.check_input_files(input_gpio_set_files) + time_ref_int = isx._internal.lookup_enum('time_ref', isx._internal.TIME_REF_FROM_STRING, time_ref) + isx._internal.c_api.isx_export_gpio_set( + num_gpio_sets, in_gpio_sets, output_csv_file.encode('utf-8'), inter_isxd_file_dir.encode('utf-8'), + time_ref_int) + +def align_start_times(input_ref_file, input_align_files): + """ + Align the epoch start times of files originating from the same paired and synchronized start-stop recording session. + The epoch start time stored in the input align files are modified in-place + so that they are aligned relative to the epoch start time of the input timing reference file + + For each input align file, the epoch start time is recomputed using the following formula: + align_epoch_start_ms = ref_epoch_start_ms + ((align_first_tsc_us - ref_first_tsc_us) / 1e3) + + In the event that the first sample of an input align file is dropped, the tsc value of the first sample is inferred using the following formula: + align_first_tsc_us = align_first_valid_tsc_us - (align_first_valid_idx * align_sample_period_us) + + Arguments + --------- + input_ref_file : str + The path of the file to use as the timing reference to align with the other input files. + This can be either a .gpio file, .isxd movie, or .isxb movie, otherwise the function will throw an error. + If the timing reference is a movie, the movie must contain frame timestamps, otherwise this function will throw an error. + input_align_files : list + The path of the files to align to the epoch start time of the input timing reference file. + These files can either be an .isxd movie or .isxb movie, otherwise the function will throw an error. + The movies must contain frame timestamps, otherwise this function will throw an error. + """ + num_align_files, in_align_files = isx._internal.check_input_files(input_align_files) + isx._internal.c_api.isx_align_start_times( + input_ref_file.encode('utf-8'), + num_align_files, in_align_files + ) + +def export_aligned_timestamps(input_ref_file, input_align_files, input_ref_name, input_align_names, output_csv_file, time_ref='start'): + """ + Export timestamps from files which originate from the same paired and synced start-stop recording session to a .csv file. + Timestamps are aligned to a single start time which is defined as the start time of the specified timing reference file. + + Arguments + --------- + input_ref_file : str + The path of the file to use as the timing reference to align with the other input files. + Timestamps are exported relative to the start time of this file. + This can be either a .gpio file, .isxd movie, or .isxb movie, otherwise the function will throw an error. + If the timing reference is a movie, the movie must contain frame timestamps, otherwise this function will throw an error. + input_align_files : list + The path of the files to align to the epoch start time of the input timing reference file. + These files can either be a .gpio file, .isxd movie, or .isxb movie, otherwise the function will throw an error. + The movies must contain frame timestamps, otherwise this function will throw an error. + input_ref_name : str + The name of the reference data set to use in the output csv. + input_align_names : list + The names of the align data sets to use in the output csv. + output_csv_file : str + The path of the csv file to be written. + time_ref : {'start', 'unix', 'tsc'} + The time reference for the CSV time stamps. + If 'start' is used, the timestamps represent the seconds since the start of the movie. + If 'unix' is used, the timestamps represent the seconds since the Unix epoch. + If 'tsc' is used, the timestamps represent the hardware counter value on the acquisition box when each frame was captured. + """ + num_align_files, in_align_files = isx._internal.check_input_files(input_align_files) + _, in_align_names = isx._internal.check_input_files(input_align_names) + time_ref_int = isx._internal.lookup_enum('time_ref', isx._internal.TIME_REF_FROM_STRING, time_ref) + isx._internal.c_api.isx_export_aligned_timestamps( + input_ref_file.encode('utf-8'), + num_align_files, in_align_files, + input_ref_name.encode('utf-8'), in_align_names, + output_csv_file.encode('utf-8'), + time_ref_int + ) + +def _get_ethovision_header_size(ethovision_file): + """ + Open an Ethovision file to get the header length. + + Arguments + --------- + ethovision_file: str + The path to the Ethovision file. + + Returns + ------- + int: + The number of rows in the header of the Ethovision file. + """ + # Check ethovision file format, use appropriate Pandas import method + if ethovision_file.lower().endswith('.csv'): + raw_ethovision_file = pd.read_csv(ethovision_file, header=None).set_index(0) + elif ethovision_file.lower().endswith('.xlsx'): + raw_ethovision_file = pd.read_excel(ethovision_file, header=None).set_index(0) + else: + raise ValueError("Only .csv or .xlsx file formats are accepted for Ethovision files.") + + # Extract the number of header lines from the ethovision file + number_of_header_lines = int(raw_ethovision_file.loc['Number of header lines:'][1]) + return number_of_header_lines + +def _load_ethovision_data(ethovision_file): + """ + Load the data from the Ethovision file, ignoring the header. + Header length will be read from the file to ensure that the correct number of rows are ignored. + Note that Ethovision convention is to encode NaN data with the '-' character. This function + replaces all '-' with NaN. + + Arguments + --------- + ethovision_file: str + The path to the Ethovision file. + + Returns + ------- + Pandas.DataFrame: + Dataframe with all columns from Ethovision file returned. + """ + # get number of header lines + number_of_header_lines = _get_ethovision_header_size(ethovision_file) + + # load the data using either `pd.read_csv` or `pd.read_excel`, depending on extension + if ethovision_file.endswith('.csv'): + ethovision_data = pd.read_csv( + ethovision_file, + header=number_of_header_lines-2, + skiprows=[number_of_header_lines-1], + low_memory=False + ).replace('-', np.nan) + elif ethovision_file.endswith('.xlsx'): + ethovision_data = pd.read_excel( + ethovision_file, + header=number_of_header_lines-2, + skiprows=[number_of_header_lines-1] + ).replace('-', np.nan) + else: + raise ValueError("Only .csv or .xlsx file formats are accepted for Ethovision files.") + + return ethovision_data + +def export_ethovision_data_with_isxb_timestamps( + input_ethovision_file, + input_isxb_file, + output_csv_file, + input_ref_file=None, + time_ref='start' +): + """ + Given paths to an Ethovision file and a nVision (.isxb) movie file, + writes a csv file with Ethovision data and a dedicated column, + either `isxb Frame Timestamp (s)` or `isxb Frame Timestamp (us)` + depending on the format of the timestamps, + that contains aligned timestamps from the nVision movie. + + It is important to note that internal Inscopix testing has shown that the Ethovision output file + can have a length that is one less than the number of frames in the input movie. When this is true, + the missing data row appears to be at the beginning. + This function applies .isxb timestamps to the Ethovision table while ignoring the first timestamp + to compensate for the length mismatch if the mismatch exists. More recent versions of Ethovision + should prevent this from occuring. + + Arguments + --------- + input_ethovision_file: str + The path to the Ethovision file (.csv, .xlsx). + input_isxb_file: str + The path to the nVision (.isxb) movie file. + output_csv_file: str + The path to the output csv file. + input_ref_file: str | None + The path to the reference file (.isxd, .gpio, .isxb). + Timestamps from the .isxb file are aligned to the start time of this file. + Generally, this reference is a file from a miniscope recording which was synchronized with this .isxb file. + This argument is required if the `time_ref` is selected as 'start' or 'unix'. + If the .isxb file is a standalone behaviour recording that is not synchronized to any miniscope file, + simply provide the .isxb file as the reference (if it's required). + time_ref : {'start', 'unix', 'tsc'} + The time reference for the nVision (.isxb) timestamps. + If 'tsc' is used, the timestamps represent the hardware counter value in microseconds on the acquisition box when each frame was captured. + If 'start' is used, the timestamps represent the seconds since the start of the experiment. + Note: `input_ref_file` must be specified if this option is used, otherwise an exception is thrown. + Timestamps are exported relative to the start time of the reference file. + If 'unix' is used, the timestamps represent the seconds since the Unix epoch. + Note: `input_ref_file` must be specified if this option is used, otherwise an exception is thrown. + The `input_ref_file` ensures that `isx.align_start_times` has been called + on the .isxb file with the corresponding, synchronized miniscope file. + """ + if os.path.exists(output_csv_file): + raise FileExistsError('File already exists: {}'.format(output_csv_file)) + + if (time_ref == 'start' or time_ref == 'unix') and input_ref_file is None: + raise ValueError("An input reference file is required for time_ref = 'start' or time_ref = 'unix'.") + + # read timestamps for isxb movie + if time_ref == 'start': + isx.export_aligned_timestamps( + input_ref_file=input_ref_file, + input_align_files=[input_isxb_file], + input_ref_name='ref', + input_align_names=['isxb'], + output_csv_file=output_csv_file, + time_ref='start' + ) + assert os.path.exists(output_csv_file) + isxb_timestamps_df = pd.read_csv(output_csv_file) + isxb_timestamps = isxb_timestamps_df['isxb Timestamp (s)'].tolist() + timestamp_column_name = 'isxb Frame Timestamp (s)' + else: + if time_ref == 'unix': + isx.align_start_times( + input_ref_file=input_ref_file, + input_align_files=[input_isxb_file] + ) + + isx.export_movie_timestamps_to_csv( + input_movie_files=input_isxb_file, + output_csv_file=output_csv_file, + time_ref=time_ref + ) + assert os.path.exists(output_csv_file) + timestamp_column_index = 3 + isxb_timestamps_df = pd.read_csv(output_csv_file) + timestamp_column_name = isxb_timestamps_df.columns[timestamp_column_index] + isxb_timestamps = isxb_timestamps_df[timestamp_column_name].tolist() + timestamp_column_name = "isxb " + timestamp_column_name + + os.remove(output_csv_file) + + # get ethovision data + ethovision_data = _load_ethovision_data(input_ethovision_file) + if len(ethovision_data) == len(isxb_timestamps) - 1: + # deal with off-by-one issue + warning_text = ( + "Your Ethovision data file is one element shorter " + "than the number of frames in your movie. " + "This is a known issue in earlier releases of Ethovision. " + "To correct for it, the first timestamp in your array " + "of timestamps is being dropped. Please consider updating to " + "the newest version of Ethovision to avoid this behavior." + ) + warnings.warn(warning_text) + + isxb_timestamps_to_use = isxb_timestamps[1:] + + elif len(ethovision_data) == len(isxb_timestamps): + # equal lengths are ideal + isxb_timestamps_to_use = isxb_timestamps + else: + # else, throw a ValueError + raise ValueError( + "Length of timestamps array " + f"({len(isxb_timestamps)}) " + "is not the same as (or within one) of the ethovision table " + f"({len(ethovision_data)})" + ) + + # get ethovision columns + ethovision_columns = ethovision_data.columns + + # add a column with `isxb Frame Timestamp (s)/(us)`` to the ethovision data, ignoring the first to deal with mismatch + ethovision_data[timestamp_column_name] = isxb_timestamps_to_use + + # return ethovision data with `isxb Frame Timestamp (s)/(us)` column in leftmost position + aligned_data_table = ethovision_data[[timestamp_column_name] + ethovision_columns.tolist()] + aligned_data_table.to_csv(output_csv_file, index=False) diff --git a/isx/test/__init__.py b/isx/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/isx/test/asserts.py b/isx/test/asserts.py new file mode 100644 index 0000000..4d1164e --- /dev/null +++ b/isx/test/asserts.py @@ -0,0 +1,281 @@ +import json + +import h5py +import numpy as np +import pandas +import tifffile as tf +import scipy.spatial.distance + +import isx + + +def compare_h5_attrs(actual, expected): + for k in expected.keys(): + if isinstance(expected[k], np.ndarray): + assert (actual[k] == expected[k]).all() + else: + if not str(expected[k]) == 'nan': + assert actual[k] == expected[k] + + +def compare_h5_datasets(actual, expected): + assert actual.shape == expected.shape + assert actual.size == expected.size + assert actual.dtype == expected.dtype + compare_h5_attrs(actual.attrs, expected.attrs) + if actual.shape == (): + assert expected.shape == () + else: + assert (actual[:] == expected[:]).all() + + +def compare_h5_groups(actual, expected, keys_to_skip): + for k in expected.keys(): + if k in keys_to_skip: + continue + else: + compare_h5_attrs(actual[k].attrs, expected[k].attrs) + if isinstance(expected[k], h5py.Dataset): + compare_h5_datasets(actual[k], expected[k]) + elif isinstance(expected[k], h5py.Group): + compare_h5_groups(actual[k], expected[k], keys_to_skip) + else: + assert False + + +def assert_isxd_movies_are_close(exp_movie, act_movie, relative_tolerance=1e-05): + assert exp_movie.data_type == act_movie.data_type + assert exp_movie.spacing == act_movie.spacing + assert exp_movie.timing == act_movie.timing + for f in range(act_movie.timing.num_samples): + exp_frame = exp_movie.get_frame_data(f) + act_frame = act_movie.get_frame_data(f) + np.testing.assert_allclose(exp_frame, act_frame, rtol=relative_tolerance) + + +def assert_isxd_movies_are_close_by_path(exp_movie_path, act_movie_path, relative_tolerance=1e-05): + exp_movie = isx.Movie.read(exp_movie_path) + assert exp_movie.file_path == exp_movie_path + act_movie = isx.Movie.read(act_movie_path) + assert act_movie.file_path == act_movie_path + + assert exp_movie.data_type == act_movie.data_type + assert exp_movie.spacing == act_movie.spacing + assert exp_movie.timing == act_movie.timing + for f in range(act_movie.timing.num_samples): + exp_frame = exp_movie.get_frame_data(f) + act_frame = act_movie.get_frame_data(f) + np.testing.assert_allclose(exp_frame, act_frame, rtol=relative_tolerance) + + +def assert_isxd_movies_are_close_range_by_path(exp_movie_path, act_movie_path, + x_range, y_range, relative_tolerance=1e-05): + exp_movie = isx.Movie.read(exp_movie_path) + assert exp_movie.file_path == exp_movie_path + act_movie = isx.Movie.read(act_movie_path) + assert act_movie.file_path == act_movie_path + + assert exp_movie.data_type == act_movie.data_type + assert exp_movie.spacing == act_movie.spacing + assert exp_movie.timing == act_movie.timing + for f in range(act_movie.timing.num_samples): + exp_frame = exp_movie.get_frame_data(f) + act_frame = act_movie.get_frame_data(f) + np.testing.assert_allclose(exp_frame[slice(*y_range), slice(*x_range)], + act_frame[slice(*y_range), slice(*x_range)], + rtol=relative_tolerance) + + +def assert_isxd_images_are_close_by_path_nan_zero(exp_image_path, act_image_path, relative_tolerance=1e-05): + exp_image = isx.Image.read(exp_image_path) + assert exp_image.file_path == exp_image_path + act_image = isx.Image.read(act_image_path) + assert act_image.file_path == act_image_path + + assert exp_image.data_type == act_image.data_type + assert exp_image.spacing == act_image.spacing + + exp_data = np.nan_to_num(exp_image.get_data()) + act_data = np.nan_to_num(act_image.get_data()) + np.testing.assert_allclose(exp_data, act_data, rtol=relative_tolerance) + + +def assert_isxd_cellsets_are_close_by_path(exp_cellset_path, act_cellset_path, relative_tolerance=1e-05, assert_spacing=True, assert_status=True, use_cosine=False): + exp_cellset = isx.CellSet.read(exp_cellset_path) + assert exp_cellset.file_path == exp_cellset_path + act_cellset = isx.CellSet.read(act_cellset_path) + assert act_cellset.file_path == act_cellset_path + + assert exp_cellset.num_cells == act_cellset.num_cells + if assert_spacing: + assert exp_cellset.spacing == act_cellset.spacing + assert exp_cellset.timing == act_cellset.timing + for c in range(exp_cellset.num_cells): + if assert_status: + assert exp_cellset.get_cell_status(c) == act_cellset.get_cell_status(c) + + if use_cosine: + if np.linalg.norm(exp_cellset.get_cell_image_data(c).flatten()) == 0 or np.linalg.norm(act_cellset.get_cell_image_data(c).flatten()) == 0: + assert np.linalg.norm(exp_cellset.get_cell_image_data(c).flatten()) == np.linalg.norm(act_cellset.get_cell_image_data(c).flatten()) + else: + assert (1.0 - scipy.spatial.distance.cosine(exp_cellset.get_cell_image_data(c).flatten(), act_cellset.get_cell_image_data(c).flatten())) >= relative_tolerance + if np.linalg.norm(exp_cellset.get_cell_trace_data(c)) == 0 or np.linalg.norm(act_cellset.get_cell_trace_data(c)) == 0: + assert np.linalg.norm(exp_cellset.get_cell_trace_data(c)) == np.linalg.norm(act_cellset.get_cell_trace_data(c)) + else: + assert (1.0 - scipy.spatial.distance.cosine(exp_cellset.get_cell_trace_data(c), act_cellset.get_cell_trace_data(c))) >= relative_tolerance + else: + np.testing.assert_allclose(exp_cellset.get_cell_image_data(c), act_cellset.get_cell_image_data(c), rtol=relative_tolerance) + np.testing.assert_allclose(exp_cellset.get_cell_trace_data(c), act_cellset.get_cell_trace_data(c), rtol=relative_tolerance) + +def assert_isxd_cellsets_trace_sums(output_cell_set_files, expected_trace_sums): + cell_sets = [isx.CellSet.read(f) for f in output_cell_set_files] + num_cells = cell_sets[0].num_cells + for i in range(num_cells): + trace_sum = 0 + for cell_set in cell_sets: + trace = cell_set.get_cell_trace_data(i) + trace_sum += np.sum(trace) + + assert round(trace_sum) == expected_trace_sums[i] + +def assert_isxd_cellsets_cell_names(output_cell_set_files, cell_names): + cell_sets = [isx.CellSet.read(f) for f in output_cell_set_files] + num_cells = cell_sets[0].num_cells + for i in range(num_cells): + for cell_set in cell_sets: + assert cell_set.get_cell_name(i) == cell_names[i] + +def assert_isxd_vesselsets_are_close_by_path(exp_vesselset_path, act_vesselset_path, relative_tolerance=1e-05, assert_spacing=True, assert_status=True): + exp_vesselset = isx.VesselSet.read(exp_vesselset_path) + assert exp_vesselset.file_path == exp_vesselset_path + act_vesselset = isx.VesselSet.read(act_vesselset_path) + assert act_vesselset.file_path == act_vesselset_path + + assert exp_vesselset.num_vessels == act_vesselset.num_vessels + if assert_spacing: + assert exp_vesselset.spacing == act_vesselset.spacing + + vessel_type = exp_vesselset.get_vessel_set_type() + assert vessel_type == act_vesselset.get_vessel_set_type() + assert exp_vesselset.has_correlation_heatmaps() == act_vesselset.has_correlation_heatmaps() + + assert exp_vesselset.timing == act_vesselset.timing + for c in range(exp_vesselset.num_vessels): + if assert_status: + assert exp_vesselset.get_vessel_status(c) == act_vesselset.get_vessel_status(c) + + np.testing.assert_allclose(exp_vesselset.get_vessel_image_data(c), act_vesselset.get_vessel_image_data(c), rtol=relative_tolerance) + np.testing.assert_allclose(exp_vesselset.get_vessel_trace_data(c), act_vesselset.get_vessel_trace_data(c), rtol=relative_tolerance) + + if vessel_type == isx.VesselSet.VesselSetType.VESSEL_DIAMETER: + np.testing.assert_allclose(exp_vesselset.get_vessel_center_trace_data(c), act_vesselset.get_vessel_center_trace_data(c), rtol=relative_tolerance) + else: + np.testing.assert_allclose(exp_vesselset.get_vessel_direction_trace_data(c), act_vesselset.get_vessel_direction_trace_data(c), rtol=relative_tolerance) + + if exp_vesselset.has_correlation_heatmaps(): + for t in range(exp_vesselset.timing.num_samples): + np.testing.assert_allclose(exp_vesselset.get_vessel_correlations_data(c, t), act_vesselset.get_vessel_correlations_data(c, t), rtol=relative_tolerance) + + +def assert_isxd_cellsets_are_close_range_by_path(exp_cellset_path, act_cellset_path, + x_range, y_range, relative_tolerance=1e-05, + absolute_tolerance=0): + exp_cellset = isx.CellSet.read(exp_cellset_path) + assert exp_cellset.file_path == exp_cellset_path + act_cellset = isx.CellSet.read(act_cellset_path) + assert act_cellset.file_path == act_cellset_path + + assert exp_cellset.num_cells == act_cellset.num_cells + assert exp_cellset.spacing == act_cellset.spacing + assert exp_cellset.timing == act_cellset.timing + for c in range(exp_cellset.num_cells): + assert exp_cellset.get_cell_status(c) == act_cellset.get_cell_status(c) + np.testing.assert_allclose(exp_cellset.get_cell_image_data(c)[slice(*y_range), slice(*x_range)], + act_cellset.get_cell_image_data(c)[slice(*y_range), slice(*x_range)], + rtol=relative_tolerance, atol=absolute_tolerance) + np.testing.assert_allclose(exp_cellset.get_cell_trace_data(c), act_cellset.get_cell_trace_data(c), + rtol=relative_tolerance) + + +def assert_isxd_event_sets_are_close_by_path(exp_set_path, act_set_path, relative_tolerance=1e-05, use_cosine=False): + exp_events = isx.EventSet.read(exp_set_path) + assert exp_events.file_path == exp_set_path + act_events = isx.EventSet.read(act_set_path) + assert act_events.file_path == act_set_path + + assert exp_events.num_cells == act_events.num_cells + assert exp_events.timing == act_events.timing + for c in range(exp_events.num_cells): + assert act_events.get_cell_name(c) == exp_events.get_cell_name(c) + exp_cell_time, exp_cell_data = exp_events.get_cell_data(c) + act_cell_time, act_cell_data = act_events.get_cell_data(c) + + if use_cosine: + assert (1.0 - scipy.spatial.distance.cosine(exp_cell_time, act_cell_time)) >= relative_tolerance + assert (1.0 - scipy.spatial.distance.cosine(exp_cell_data, act_cell_data)) >= relative_tolerance + else: + np.testing.assert_allclose(exp_cell_time, act_cell_time, rtol=relative_tolerance) + np.testing.assert_allclose(exp_cell_data, act_cell_data, rtol=relative_tolerance) + + +def assert_tiff_files_equal_by_path(expected_file_path, tiff_file_path): + exp_tiff = tf.imread(expected_file_path) + actual_tiff = tf.imread(tiff_file_path) + np.testing.assert_array_equal(exp_tiff, actual_tiff) + + +def assert_csv_traces_are_close_by_path(expected_csv_path, output_csv_path, relative_tolerance=1e-05): + with open(expected_csv_path) as expected_csv, open(output_csv_path) as output_csv: + expected_data = pandas.read_csv(expected_csv, header=[0, 1]) + output_data = pandas.read_csv(output_csv, header=[0, 1]) + np.testing.assert_allclose(expected_data, output_data, rtol=relative_tolerance) + + +def assert_csv_events_are_equal_by_path(expected_csv_path, output_csv_path): + assert_csv_files_are_equal_by_path(expected_csv_path, output_csv_path) + + +def assert_csv_files_are_equal_by_path(expected_csv_path, output_csv_path): + with open(expected_csv_path) as expected_csv, open(output_csv_path) as output_csv: + expected_data = pandas.read_csv(expected_csv) + output_data = pandas.read_csv(output_csv) + np.testing.assert_array_equal(expected_data, output_data) + assert expected_data.columns.tolist() == output_data.columns.tolist() + + +def assert_csv_files_are_close_by_path(expected_csv_path, output_csv_path, relative_tolerance=1e-05): + with open(expected_csv_path) as expected_csv, open(output_csv_path) as output_csv: + expected_data = pandas.read_csv(expected_csv) + output_data = pandas.read_csv(output_csv) + np.testing.assert_allclose(expected_data, output_data, rtol=relative_tolerance) + + +def assert_csv_cell_metrics_are_close_by_path(expected_csv_path, output_csv_path, relative_tolerance=1e-05): + with open(expected_csv_path) as expected_csv, open(output_csv_path) as output_csv: + expected_data = pandas.read_csv(expected_csv, header=0) + output_data = pandas.read_csv(output_csv, header=0) + np.testing.assert_array_equal(expected_data['cellName'], output_data['cellName']) + + exp_num_data, out_num_data = [df.drop('cellName', axis=1) for df in [expected_data, output_data]] + np.testing.assert_allclose(exp_num_data, out_num_data, rtol=relative_tolerance) + + +def assert_json_files_equal_by_path(expected, output): + with open(expected) as expected_file, open(output) as output_file: + expected_json = json.load(expected_file) + output_json = json.load(output_file) + assert expected_json == output_json + +def assert_txt_files_are_equal_by_path(expected, output): + with open(expected) as expected_txt, open(output) as output_txt: + for expected_line, output_line in zip(expected_txt, output_txt): + assert expected_line == output_line + +def assert_csv_pairwise_spatial_overlap_matrices_are_close_by_path(expected_csv_path, output_csv_path, relative_tolerance=1e-5): + with open(expected_csv_path) as expected_csv, open(output_csv_path) as output_csv: + expected_data = pandas.read_csv(expected_csv) + output_data = pandas.read_csv(output_csv) + assert expected_data.columns.tolist() == output_data.columns.tolist() + expected_scores = np.array(expected_data.values[:, 1:], dtype=float) + output_scores = np.array(output_data.values[:, 1:], dtype=float) + np.testing.assert_allclose(expected_scores, output_scores, rtol=relative_tolerance) diff --git a/isx/test/test_algos.py b/isx/test/test_algos.py new file mode 100644 index 0000000..347f7cb --- /dev/null +++ b/isx/test/test_algos.py @@ -0,0 +1,2207 @@ +from test.utilities.setup import delete_files_silently, delete_dirs_silently, test_data_path, is_file + +import os +import csv +import numpy as np +import pandas as pd +import pytest +from shutil import copyfile + +import isx + +from test.asserts import assert_csv_cell_metrics_are_close_by_path, assert_isxd_cellsets_are_close_by_path, \ + assert_isxd_movies_are_close, assert_isxd_movies_are_close_by_path, assert_isxd_event_sets_are_close_by_path, \ + assert_csv_files_are_equal_by_path, assert_csv_files_are_close_by_path, assert_txt_files_are_equal_by_path, \ + assert_csv_pairwise_spatial_overlap_matrices_are_close_by_path, \ + assert_isxd_vesselsets_are_close_by_path, assert_csv_traces_are_close_by_path, \ + assert_json_files_equal_by_path, assert_tiff_files_equal_by_path, \ + assert_isxd_cellsets_trace_sums, \ + assert_isxd_cellsets_cell_names + +@pytest.mark.skipif(not isx._is_with_algos, reason="Only for algo tests") +class TestAlgorithms: + @pytest.mark.isxd_movie + def test_DeinterleaveMovie(self): + test_dir = test_data_path + '/unit_test/VI' + input_movie_bases = [ + 'de_interleave_15_1_2_0_simple', + 'de_interleave_15_1_2_0_complex', + 'de_interleave_15_1_2_1_simple' + ] + efocus_values = [599, 800, 1000] + + input_movie_files = [] + output_movie_files = [] + expected = [] + for file_base in input_movie_bases: + input_movie_files.append(test_dir + '/' + file_base + '.isxd') + for efocus in efocus_values: + output_movie_files.append(test_data_path + '/unit_test/output/' + file_base + '-efocus_' + str(efocus).zfill(4) + '.isxd') + + # IDPS-857 Upgrade version of test files since higher precision sampling rate results + # in a slightly different computed start time for de-interleaved movies + # IDPS-900 Upgrade version of test files since epoch start time is calculated based on tsc values + # IDPS-1022 Upgrade version of test files after fixing bug with start time calculation + expected.append(test_data_path + '/unit_test/guilded/' + file_base + '-efocus_' + str(efocus).zfill(4) + '_v3.isxd') + + delete_files_silently(output_movie_files) + + isx.de_interleave(input_movie_files, output_movie_files, efocus_values) + + for o, e in zip(output_movie_files, expected): + assert_isxd_movies_are_close_by_path(e, o) + + delete_files_silently(output_movie_files) + + @pytest.mark.isxd_movie + def test_PreprocessMovie(self): + input_movie_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_movie_file = test_data_path + '/unit_test/output/test_output_pp.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicPreProcessMovie_output.isxd' + + delete_files_silently([output_movie_file]) + + isx.preprocess(input_movie_file, + output_movie_file, + temporal_downsample_factor=1, + spatial_downsample_factor=2, + crop_rect=[245, 245, 254, 254], + fix_defective_pixels=True) + + exp_movie = isx.Movie.read(expected) + assert exp_movie.file_path == expected + act_movie = isx.Movie.read(output_movie_file) + assert act_movie.file_path == output_movie_file + + exp_movie.spacing._impl.pixel_width = isx._internal.IsxRatio(6, 1) + exp_movie.spacing._impl.pixel_height = isx._internal.IsxRatio(6, 1) + exp_movie.spacing._impl.left = isx._internal.IsxRatio(735, 1) + exp_movie.spacing._impl.top = isx._internal.IsxRatio(735, 1) + + assert_isxd_movies_are_close(exp_movie, act_movie) + + del exp_movie + del act_movie + delete_files_silently([output_movie_file]) + + + @pytest.mark.isxd_movie + def test_PreprocessMovieCropTLWH(self): + # Test specifying crop rect as tlwh instead of tlbr and expect the same output + input_movie_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_movie_file = test_data_path + '/unit_test/output/test_output_pp.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicPreProcessMovie_output.isxd' + + delete_files_silently([output_movie_file]) + + isx.preprocess(input_movie_file, + output_movie_file, + temporal_downsample_factor=1, + spatial_downsample_factor=2, + crop_rect=[245, 245, 10, 10], + crop_rect_format="tlwh", + fix_defective_pixels=True) + + exp_movie = isx.Movie.read(expected) + assert exp_movie.file_path == expected + act_movie = isx.Movie.read(output_movie_file) + assert act_movie.file_path == output_movie_file + + exp_movie.spacing._impl.pixel_width = isx._internal.IsxRatio(6, 1) + exp_movie.spacing._impl.pixel_height = isx._internal.IsxRatio(6, 1) + exp_movie.spacing._impl.left = isx._internal.IsxRatio(735, 1) + exp_movie.spacing._impl.top = isx._internal.IsxRatio(735, 1) + + assert_isxd_movies_are_close(exp_movie, act_movie) + + del exp_movie + del act_movie + delete_files_silently([output_movie_file]) + + + @pytest.mark.isxd_movie + def test_PreprocessMovieCropTLWHRectangle(self): + # Test specifying crop rect with unequal sides as tlwh instead of tlbr + input_movie_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_movie_file = test_data_path + '/unit_test/output/test_output_pp.isxd' + + delete_files_silently([output_movie_file]) + + top_left_x, top_left_y = (245, 245) + width, height = (10, 20) + isx.preprocess(input_movie_file, + output_movie_file, + temporal_downsample_factor=1, + spatial_downsample_factor=1, + crop_rect=[top_left_x, top_left_y, width, height], + crop_rect_format="tlwh", + fix_defective_pixels=True) + + act_movie = isx.Movie.read(output_movie_file) + assert act_movie.file_path == output_movie_file + assert act_movie.spacing.num_pixels == (height, width) # num_pixels returns (num_rows, num_cols) -> (height, width) + + del act_movie + delete_files_silently([output_movie_file]) + + + @pytest.mark.isxd_movie + def test_PreprocessMovieInvalidCropRectFormat(self): + # Test specifying invalid crop rect raises error + input_movie_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_movie_file = test_data_path + '/unit_test/output/test_output_pp.isxd' + + with pytest.raises(Exception) as error: + isx.preprocess(input_movie_file, + output_movie_file, + crop_rect_format="something bad" + ) + assert "Invalid crop rect format (something bad), must be one of the following: ('tlbr', 'tlwh')" in str(error.value) + + + @pytest.mark.isxd_movie + def test_PreprocessMovieNoCropRect(self): + input_movie_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_movie_file = test_data_path + '/unit_test/output/test_output_ppnorect.isxd' + + delete_files_silently([output_movie_file]) + + isx.preprocess(input_movie_file, + output_movie_file, + temporal_downsample_factor=1, + spatial_downsample_factor=2, + fix_defective_pixels=True) + + mov1 = isx.Movie.read(input_movie_file) + mov2 = isx.Movie.read(output_movie_file) + + assert np.sum(np.abs(np.array(mov1.spacing.num_pixels) - np.array(mov2.spacing.num_pixels)*2)) == 0 + + del mov1 + del mov2 + delete_files_silently([output_movie_file]) + + @pytest.mark.tiff_movie + def test_PreprocessTiffMovie(self): + # I added this test to verify that TIFF inputs can be used in processing steps, + # but get the default timing. + input_movie_file = test_data_path + '/unit_test/recording_20161104_145443.tif' + output_movie_file = test_data_path + '/unit_test/output/test_output_pp_tiff.isxd' + delete_files_silently([output_movie_file]) + + isx.preprocess(input_movie_file, output_movie_file) + + movie = isx.Movie.read(output_movie_file) + assert movie.timing.start == isx.Time() + assert movie.timing.period == isx.Duration.from_msecs(50) + + del movie + delete_files_silently([output_movie_file]) + + def test_PreprocessTrimmedMovie(self): + # (IDPS-1120): A bug was discovered with the preprocess function where it assumes + # that the start time of isxd files is stored as a ratio of ms / 1000 + # but the trim movie operation changes the start time to be a ratio fo us / 1000000. + # This assumption led to output preprocessed files with an incorrect start time far into the future. + # This test validates that trimming and then preprocessing a movie produces + # an output with a valid start time. + + input_movie_file = test_data_path + '/unit_test/early_frames/2019-03-18-15-56-10_video_trig_0.isxd' + trimmed_movie_file = test_data_path + '/unit_test/output/2019-03-18-15-56-10_video_trig_0-TPC.isxd' + output_movie_file = test_data_path + '/unit_test/output/2019-03-18-15-56-10_video_trig_0-TPC-PP.isxd' + delete_files_silently([trimmed_movie_file, output_movie_file]) + + trim_range = [(0, 84)] + isx.trim_movie(input_movie_file, trimmed_movie_file, trim_range) + + isx.preprocess(trimmed_movie_file, output_movie_file) + + movie = isx.Movie.read(output_movie_file) + + import datetime + assert movie.timing.start.to_datetime() == datetime.datetime( + year=2019, + month=3, + day=18, + hour=15, + minute=56, + second=19, + microsecond=82155 + ) + + del movie + delete_files_silently([trimmed_movie_file, output_movie_file]) + + @pytest.mark.isxd_movie + def test_MotionCorrectMovie(self): + input_movie = test_data_path + '/unit_test/motionCorrection/Inscopix-ratHipp2-recording_20160707_104710-pp-trim.hdf5' + output_movie = test_data_path + '/unit_test/output/test_output_mc.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicMotionCorrectMovie_output-v3.isxd' + + delete_files_silently([output_movie]) + + lowCutoff = 0.025 + highCutoff = 0.2 + roi = [[350, 225], [370, 390], [660, 390], [660, 225]] + + isx.motion_correct(input_movie, output_movie, max_translation=50, + low_bandpass_cutoff=lowCutoff, high_bandpass_cutoff=highCutoff, roi=roi, + reference_segment_index=0, reference_frame_index=0, reference_file_name='') + + exp_movie = isx.Movie.read(expected) + assert exp_movie.file_path == expected + act_movie = isx.Movie.read(output_movie) + assert act_movie.file_path == output_movie + + exp_movie.spacing._impl.pixel_width = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.pixel_height = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.left = isx._internal.IsxRatio(6, 1) + exp_movie.spacing._impl.top = isx._internal.IsxRatio(39, 1) + + assert_isxd_movies_are_close(exp_movie, act_movie) + + del exp_movie + del act_movie + delete_files_silently([output_movie]) + + @pytest.mark.isxd_movie + def test_MotionCorrectMovieNoRoi(self): + input_movie = test_data_path + '/unit_test/motionCorrection/Inscopix-ratHipp2-recording_20160707_104710-pp-trim.hdf5' + output_movie = test_data_path + '/unit_test/output/test_output_mc_nr.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicMotionCorrectMovieNoRoi_output.isxd' + + delete_files_silently([output_movie]) + + isx.motion_correct(input_movie, output_movie, max_translation=50, low_bandpass_cutoff=0.025, high_bandpass_cutoff=0.2) + + exp_movie = isx.Movie.read(expected) + assert exp_movie.file_path == expected + act_movie = isx.Movie.read(output_movie) + assert act_movie.file_path == output_movie + + exp_movie.spacing._impl.pixel_width = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.pixel_height = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.left = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.top = isx._internal.IsxRatio(18, 1) + + assert_isxd_movies_are_close(exp_movie, act_movie) + + del exp_movie + del act_movie + delete_files_silently([output_movie]) + + @pytest.mark.isxd_movie + def test_MotionCorrectMovieHourglassRoi(self): + input_movie = test_data_path + '/unit_test/recording_20161104_145543-PP.isxd' + output_movie = test_data_path + '/unit_test/output/test_output_mc_hr.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicMotionCorrectMovieHourglassRoi_output-v2.isxd' + delete_files_silently([output_movie]) + + isx.motion_correct(input_movie, output_movie, roi=[[42, 54], [203, 294], [53, 301], [161, 38]]) + + assert_isxd_movies_are_close_by_path(expected, output_movie) + + delete_files_silently([output_movie]) + + @pytest.mark.isxd_movie + def test_MotionCorrectMovieOutputTranslations(self): + input_movie = test_data_path + '/unit_test/motionCorrection/Inscopix-ratHipp2-recording_20160707_104710-pp-trim.hdf5' + output_movie = test_data_path + '/unit_test/output/test_output_mc_ot.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicMotionCorrectMovie_output-v3.isxd' + output_trans = test_data_path + '/unit_test/output/test_output_mc_translations.csv' + + delete_files_silently([output_movie, output_trans]) + + lowCutoff = 0.025 + highCutoff = 0.2 + roi = [[350, 225], [370, 390], [660, 390], [660, 225]] + + isx.motion_correct(input_movie, output_movie, max_translation=50, + low_bandpass_cutoff=lowCutoff, high_bandpass_cutoff=highCutoff, roi=roi, + reference_segment_index=0, reference_frame_index=0, reference_file_name='', + output_translation_files=output_trans) + + assert os.path.exists(output_trans), "Missing expected output translations" + + df = pd.read_csv(output_trans) + col_names = df.keys() + assert col_names[0] == 'translationX' + assert col_names[1] == 'translationY' + + out_mov = isx.Movie.read(output_movie) + num_frames = out_mov.timing.num_samples + del out_mov + + assert len(df) == num_frames, "# of output translations does not match number of frames: {} != {}".format(len(df), num_frames) + + delete_files_silently([output_movie]) + + @pytest.mark.isxd_movie + def test_MotionCorrectMovieOutputCropRect(self): + input_movie = test_data_path + '/unit_test/motionCorrection/Inscopix-ratHipp2-recording_20160707_104710-pp-trim.hdf5' + output_movie = test_data_path + '/unit_test/output/test_output_mc_cr.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicMotionCorrectMovie_output-v3.isxd' + output_rect = test_data_path + '/unit_test/output/test_output_mc_crop_rect.csv' + + delete_files_silently([output_movie, output_rect]) + + lowCutoff = 0.025 + highCutoff = 0.2 + roi = [[350, 225], [370, 390], [660, 390], [660, 225]] + + isx.motion_correct(input_movie, output_movie, max_translation=50, + low_bandpass_cutoff=lowCutoff, high_bandpass_cutoff=highCutoff, roi=roi, + reference_segment_index=0, reference_frame_index=0, reference_file_name='', + output_crop_rect_file=output_rect) + + assert os.path.exists(output_rect), "Missing expected output crop rectangle" + + with open(output_rect, 'r') as f: + lines = f.readlines() + + assert len(lines) == 1 + crop_rect = [int(x.strip()) for x in lines[0].split(',')] + + assert len(crop_rect) == 4 + x,y,width,height = crop_rect + + # verify that the output movie size matches the crop rectangle width and height + out_mov = isx.Movie.read(output_movie) + num_rows,num_cols = out_mov.spacing.num_pixels + out_first_frame = out_mov.get_frame_data(0) + del out_mov + + assert width == num_cols, "# of columns in output movie does not match width of crop rect: {} != {}".format(num_cols, width) + assert height == num_rows, "# of rows in output movie does not match height of crop rect: {} != {}".format(num_rows, height) + + # verify that the first frame of the input and output movies match after cropping + in_mov = isx.Movie.read(input_movie) + num_rows,num_cols = in_mov.spacing.num_pixels + in_first_frame = in_mov.get_frame_data(0) + del in_mov + + in_first_frame_cropped = in_first_frame[y:(y+height), x:(x+width)] + assert in_first_frame_cropped.shape == out_first_frame.shape + + assert np.max(np.abs(in_first_frame_cropped - out_first_frame)) < 1e-6 + + delete_files_silently([output_movie]) + + @pytest.mark.isxd_movie + def test_MotionCorrectMovieWithNoPadding(self): + input_movie_file = test_data_path + '/unit_test/motionCorrection/Inscopix-ratHipp2-recording_20160707_104710-pp-trim.hdf5' + output_movie_file = test_data_path + '/unit_test/output/test_output_mc_nr.isxd' + + delete_files_silently([output_movie_file]) + + isx.motion_correct(input_movie_file, output_movie_file, max_translation=50, low_bandpass_cutoff=0.025, high_bandpass_cutoff=0.2, preserve_input_dimensions=False) + + input_movie = isx.Movie.read(input_movie_file) + output_movie = isx.Movie.read(output_movie_file) + + assert output_movie.spacing.num_pixels != input_movie.spacing.num_pixels + + del input_movie + del output_movie + delete_files_silently([output_movie_file]) + + + @pytest.mark.isxd_movie + def test_MotionCorrectMovieWithPadding(self): + input_movie_file = test_data_path + '/unit_test/motionCorrection/Inscopix-ratHipp2-recording_20160707_104710-pp-trim.hdf5' + output_movie_file = test_data_path + '/unit_test/output/test_output_mc_nr.isxd' + + delete_files_silently([output_movie_file]) + + isx.motion_correct(input_movie_file, output_movie_file, max_translation=50, low_bandpass_cutoff=0.025, high_bandpass_cutoff=0.2, preserve_input_dimensions=True) + + input_movie = isx.Movie.read(input_movie_file) + output_movie = isx.Movie.read(output_movie_file) + + assert output_movie.spacing.num_pixels == input_movie.spacing.num_pixels + + del input_movie + del output_movie + delete_files_silently([output_movie_file]) + + @pytest.mark.isxd_trace + def test_PcaIcaMovie(self): + movie_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + output_file = test_data_path + '/unit_test/output/test_output_pcaica.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicPcaIcaMovie_output.isxd' + + delete_files_silently([output_file]) + + converged = isx.pca_ica(movie_file, output_file, 150, 120, unmix_type='temporal', ica_temporal_weight=0, + max_iterations=500, convergence_threshold=1e-5, block_size=1000, + auto_estimate_num_ics=False, average_cell_diameter=10) + + assert converged + + assert_isxd_cellsets_are_close_by_path(expected, output_file, relative_tolerance=1e-3) + + delete_files_silently([output_file]) + + # TODO: The test below is when using SCS as opposed to OASIS. SCS is not currently exposed via the Python API. + # @pytest.mark.isxd_movie + # def test_CnmfeMovie(self): + # input_movie_files = [test_data_path + '/unit_test/cnmfe-cpp/CnmfeAlgoMovie30x30x100.isxd'] + # output_cellset_files = [test_data_path + '/unit_test/output/test_output_cellset_cnmfe.isxd'] + # output_memory_map_files = [test_data_path + '/unit_test/output/test_movie_mmap.bin'] + # + # output_events_files = [test_data_path + '/unit_test/output/test_output_events_cnmfe.isxd'] + # expected_cellsets = [test_data_path + '/unit_test/guilded/exp_mosaicCnmfeMovie_output_CS.isxd'] + # + # delete_files_silently(output_cellset_files) + # delete_files_silently(output_memory_map_files) + # # delete_files_silently(output_events_files) + # + # isx.run_cnmfe(input_movie_files, output_cellset_files, output_memory_map_files, + # deconvolution_method='scs', processing_mode='all_in_memory') + # + # # TODO: Implement event detection as a seperate module + # # assert_isxd_cellsets_are_close_by_path(expected_cellsets[0], output_cellset_files[0], relative_tolerance=1e-7, use_cosine=True) + # # assert_isxd_event_sets_are_close_by_path(expected_events[0], output_events_files[0], relative_tolerance=1e-7, use_cosine=True) + # + # delete_files_silently(output_cellset_files) + # delete_files_silently(output_memory_map_files) + # # delete_files_silently(output_events_files) + + @pytest.mark.isxd_movie + def test_CnmfeMovie_AllInMemory(self): + input_movie_files = [test_data_path + '/unit_test/cnmfe-cpp/CnmfeAlgoMovie128x128x100.isxd'] + output_cellset_files = [test_data_path + '/unit_test/output/test_output_cellset_cnmfe.isxd'] + output_dir = test_data_path + '/unit_test/tmp-cnmfe' + + expected_cellsets = [test_data_path + '/unit_test/guilded/exp_mosaicCnmfeMovie128x128x100AllMem_output_cellset.isxd'] + + delete_files_silently(output_cellset_files) + delete_dirs_silently(output_dir) + os.makedirs(output_dir) + + isx.run_cnmfe( + input_movie_files, output_cellset_files, output_dir, + cell_diameter=7, # was set to 13 before we decided to internally double the user-specified cell diameter; old results stored in exp_mosaicCnmfeMovie128x128x100AllMem_output_cellset_celldiameter13.isxd + min_corr=0.8, min_pnr=10, bg_spatial_subsampling=2, ring_size_factor=1.4, + gaussian_kernel_size=3, closing_kernel_size=3, merge_threshold=0.7, + processing_mode='all_in_memory', num_threads=4, patch_size=80, patch_overlap=20, + output_unit_type='df_over_noise') + + assert_isxd_cellsets_are_close_by_path(expected_cellsets[0], output_cellset_files[0], relative_tolerance=1e-5, use_cosine=True) + + delete_files_silently(output_cellset_files) + delete_dirs_silently(output_dir) + + @pytest.mark.isxd_movie + def test_CnmfeMovie_PatchMode(self): + input_movie_files = [test_data_path + '/unit_test/cnmfe-cpp/CnmfeAlgoMovie128x128x100.isxd'] + output_cellset_files = [test_data_path + '/unit_test/output/test_output_cellset_cnmfe.isxd'] + output_dir = test_data_path + '/unit_test/tmp-cnmfe' + + # - Found a bug in patch mode where patch coordinates were reversed for rows and columns + # - Fixing that issue results in a different order for patches which is why this is a "revised" version of the original test data + # /unit_test/guilded/exp_mosaicCnmfeMovie128x128x100Patch_output_cellset.isxd + # - The revised data has been updated to take into account the cell diameter change described below. + + expected_cellsets = [test_data_path + '/unit_test/guilded/exp_mosaicCnmfeMovie128x128x100Patch_output_cellset.isxd'] + + delete_files_silently(output_cellset_files) + delete_dirs_silently(output_dir) + os.makedirs(output_dir) + + isx.run_cnmfe( + input_movie_files, output_cellset_files, output_dir, + cell_diameter=7, # was set to 13 before we decided to internally double the user-specified cell diameter; old results stored in exp_mosaicCnmfeMovie128x128x100AllMem_output_cellset_revised_celldiameter13.isxd + min_corr=0.8, min_pnr=10, bg_spatial_subsampling=2, ring_size_factor=1.4, + gaussian_kernel_size=0, closing_kernel_size=0, merge_threshold=0.7, + processing_mode="parallel_patches", num_threads=4, patch_size=80, patch_overlap=20, + output_unit_type='df') + + assert_isxd_cellsets_are_close_by_path(expected_cellsets[0], output_cellset_files[0], relative_tolerance=1e-5, use_cosine=True) + + delete_files_silently(output_cellset_files) + delete_dirs_silently(output_dir) + + @pytest.mark.isxd_movie + def test_EstimateIcsImage(self): + input_file = test_data_path + '/unit_test/cell_count_est/2019-07-11-10-22-45_video-efocus_0189-PP-BP-MC-TPC-DFF_maxproj.isxd' + expected = 304 + + ic_count = isx.estimate_num_ics(input_file, average_diameter = 12) + assert ic_count == expected + ic_count = isx.estimate_num_ics(input_file, min_diameter = 4, max_diameter = 20) + assert ic_count == expected + + @pytest.mark.isxd_movie + def test_SpatialBandPassMovie(self): + input_file = test_data_path + '/unit_test/single_10x10_frameMovie.isxd' + output_file = test_data_path + '/unit_test/output/test_output_sbp.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicSpatialBandPass_output-v3.isxd' + + delete_files_silently([output_file]) + + isx.spatial_filter(input_file, output_file, low_cutoff=0.1, high_cutoff=0.9, + retain_mean=False, subtract_global_minimum=False) + + assert_isxd_movies_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + @pytest.mark.isxd_movie + def test_SpatialBandPassImage(self): + input_file = test_data_path + '/unit_test/create_cell_map/cell_map_image.isxd' + output_file = test_data_path + '/unit_test/output/spatial_filtered.isxd' + expected = test_data_path + '/unit_test/create_cell_map/spatial_filtered.isxd' + + delete_files_silently([output_file]) + + isx.spatial_filter(input_file, output_file, low_cutoff=0.1, high_cutoff=0.9, + retain_mean=False, subtract_global_minimum=False) + + assert_isxd_movies_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + + @pytest.mark.isxd_movie + def test_SpatialHighPassMovie(self): + input_file = test_data_path + '/unit_test/single_10x10_frameMovie.isxd' + output_file = test_data_path + '/unit_test/output/test_output_sbp.isxd' + + delete_files_silently([output_file]) + + isx.spatial_filter(input_file, output_file, low_cutoff=None, high_cutoff=0.9, + retain_mean=False, subtract_global_minimum=False) + + assert os.path.exists(output_file) + + delete_files_silently([output_file]) + + + @pytest.mark.isxd_movie + def test_SpatialLowPassMovie(self): + input_file = test_data_path + '/unit_test/single_10x10_frameMovie.isxd' + output_file = test_data_path + '/unit_test/output/test_output_sbp.isxd' + + delete_files_silently([output_file]) + + isx.spatial_filter(input_file, output_file, low_cutoff=0.1, high_cutoff=None, + retain_mean=False, subtract_global_minimum=False) + + assert os.path.exists(output_file) + + delete_files_silently([output_file]) + + + @pytest.mark.isxd_movie + def test_DeltaFoverF(self): + input_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_file = test_data_path + '/unit_test/output/test_output_dff.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicDeltaFoverF_output-v2.isxd' + + delete_files_silently([output_file]) + + isx.dff(input_file, output_file, f0_type='mean') + + exp_movie = isx.Movie.read(expected) + assert exp_movie.file_path == expected + act_movie = isx.Movie.read(output_file) + assert act_movie.file_path == output_file + + exp_movie.spacing._impl.pixel_width = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.pixel_height = isx._internal.IsxRatio(3, 1) + + assert_isxd_movies_are_close(exp_movie, act_movie) + + del exp_movie + del act_movie + delete_files_silently([output_file]) + + @pytest.mark.isxd_movie + def test_ProjectMovieMean(self): + input_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + output_file = test_data_path + '/unit_test/output/test_output_project_movie_mean.isxd' + expected = test_data_path + '/unit_test/50fr10_l1-3cells_he-Mean Image-v2.isxd' + + delete_files_silently([output_file]) + + isx.project_movie(input_file, output_file, 'mean') + + assert_isxd_movies_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + @pytest.mark.isxd_movie + def test_ProjectMovieMax(self): + input_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + output_file = test_data_path + '/unit_test/output/test_output_project_movie_max.isxd' + expected = test_data_path + '/unit_test/50fr10_l1-3cells_he-Maximum Image-v2.isxd' + + delete_files_silently([output_file]) + + isx.project_movie(input_file, output_file, 'max') + + assert_isxd_movies_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + @pytest.mark.isxd_movie + def test_ProjectMovieMin(self): + input_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + output_file = test_data_path + '/unit_test/output/test_output_project_movie_min.isxd' + expected = test_data_path + '/unit_test/50fr10_l1-3cells_he-Minimum Image-v2.isxd' + + delete_files_silently([output_file]) + + isx.project_movie(input_file, output_file, 'min') + + assert_isxd_movies_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + @pytest.mark.isxd_movie + def test_ProjectMovieStandardDeviation(self): + input_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + output_file = test_data_path + '/unit_test/output/test_output_project_movie_min.isxd' + expected = test_data_path + '/unit_test/50fr10_l1-3cells_he-Standard Deviation Image-v2.isxd' + + delete_files_silently([output_file]) + + isx.project_movie(input_file, output_file, 'standard_deviation') + + assert_isxd_movies_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + @pytest.mark.isxd_events + def test_EventDetection(self): + input_file = test_data_path + '/unit_test/eventDetectionCellSet.isxd' + output_file = test_data_path + '/unit_test/output/event_output.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicEventDetection_output-v2.isxd' + + delete_files_silently([output_file]) + + isx.event_detection(input_file, output_file, threshold=0.25, + tau=0.500, event_time_ref='beginning', + ignore_negative_transients=True, accepted_cells_only=False) + + assert_isxd_event_sets_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + def test_EventDetection_negative_not_existing_input_file(self): + input_file = test_data_path + '/unit_test/not_existing_input_file.isxd' + output_file = test_data_path + '/unit_test/output/event_output_olb.isxd' + + delete_files_silently([output_file]) + + with pytest.raises(Exception) as error: + isx.event_detection(input_file, output_file, threshold=0.25, + tau=0.500, event_time_ref='beginning', + ignore_negative_transients=True, accepted_cells_only=False) + assert 'File does not exist' in str(error.value) + + assert not is_file(output_file) + delete_files_silently([output_file]) + + def test_EventDetection_negative_input_file_not_cellset(self): + input_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + output_file = test_data_path + '/unit_test/output/event_output_olb.isxd' + + delete_files_silently([output_file]) + + with pytest.raises(Exception) as error: + isx.event_detection(input_file, output_file, threshold=0.25, + tau=0.500, event_time_ref='beginning', + ignore_negative_transients=True, accepted_cells_only=False) + assert 'Expected data set to be of type: Cell Set' in str(error.value) + + assert not is_file(output_file) + delete_files_silently([output_file]) + + @pytest.mark.isxd_movie + def test_TemporalCropMovie(self): + input_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_file = test_data_path + '/unit_test/output/trim_output.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicTemporalCropMovie_output-v2.isxd' + delete_files_silently([output_file]) + + seg_indices = [(1, 5)] + + isx.trim_movie(input_file, output_file, seg_indices) + + exp_movie = isx.Movie.read(expected) + assert exp_movie.file_path == expected + act_movie = isx.Movie.read(output_file) + assert act_movie.file_path == output_file + + exp_movie.spacing._impl.pixel_width = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.pixel_height = isx._internal.IsxRatio(3, 1) + + assert_isxd_movies_are_close(exp_movie, act_movie) + + assert act_movie.timing.cropped == seg_indices + + del exp_movie + del act_movie + delete_files_silently([output_file]) + + @pytest.mark.isxd_movie + def test_TemporalCropMovieManySegments(self): + input_file = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_file = test_data_path + '/unit_test/output/trim_output_ms.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicTemporalCropMovieManySegments_output.isxd' + delete_files_silently([output_file]) + + seg_indices = [(1, 3), (5, 6)] + isx.trim_movie(input_file, output_file, seg_indices) + + exp_movie = isx.Movie.read(expected) + assert exp_movie.file_path == expected + act_movie = isx.Movie.read(output_file) + assert act_movie.file_path == output_file + + exp_movie.spacing._impl.pixel_width = isx._internal.IsxRatio(3, 1) + exp_movie.spacing._impl.pixel_height = isx._internal.IsxRatio(3, 1) + + assert_isxd_movies_are_close(exp_movie, act_movie) + + assert act_movie.timing.cropped == seg_indices + + del exp_movie + del act_movie + delete_files_silently([output_file]) + + @pytest.mark.isxd_trace + @pytest.mark.csv_trace + def test_ComputeCellMetrics(self): + cell_set_file = test_data_path + '/unit_test/cell_metrics/cell_metrics_movie-PCA-ICA.isxd' + events_file = test_data_path + '/unit_test/cell_metrics/cell_metrics_movie-PCA-ICA-ED.isxd' + output_file = test_data_path + '/unit_test/output/cell_metrics_3cells_python.csv' + expected = test_data_path + '/unit_test/cell_metrics/expected_cell_metrics_3cells-v3.csv' + + delete_files_silently([output_file]) + + isx.cell_metrics(cell_set_file, events_file, output_file) + + assert_csv_cell_metrics_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + @pytest.mark.isxd_trace + @pytest.mark.csv_trace + def test_ExportCellMetrics(self): + cell_set_file = test_data_path + '/unit_test/cell_metrics/cell_metrics_computed-CNMFe.isxd' + events_file = test_data_path + '/unit_test/cell_metrics/cell_metrics_computed-CNMFe-ED.isxd' + output_file = test_data_path + '/unit_test/output/test.csv' + + delete_files_silently([output_file]) + + isx.cell_metrics(cell_set_file, events_file, output_file, recompute_metrics=False) + + df = pd.read_csv(output_file) + + # Verify first and last rows of output file + assert (df.iloc[0] == + pd.DataFrame({ + 'cellName' : ['C00'], + 'snr' : [48.5975], + 'mad' : [0.198209], + 'eventRate' : [0.2], + 'eventAmpMedian' : [9.63244], + 'eventAmpSD' : [0.0], + 'riseMedian' : [0.1], + 'riseSD' : [0.0], + 'decayMedian' : [0.2], + 'decaySD' : [0.0], + 'numContourComponents' : [1], + 'overallCenterInPixelsX' : [60], + 'overallCenterInPixelsY' : [56], + 'overallAreaInPixels' : [5.5], + 'overallMaxContourWidthInPixels' : [3.60555], + 'largestComponentCenterInPixelsX' : [60], + 'largestComponentCenterInPixelsY' : [56], + 'largestComponentAreaInPixels' : [5.5], + 'largestComponentMaxContourWidthInPixels' : [3.60555] + }) + ).all(axis=None) + + assert (df.iloc[-1] == + pd.DataFrame({ + 'cellName' : ['C19'], + 'snr' : [0.0], + 'mad' : [0.0], + 'eventRate' : [0.0], + 'eventAmpMedian' : [0.0], + 'eventAmpSD' : [0.0], + 'riseMedian' : [0.0], + 'riseSD' : [0.0], + 'decayMedian' : [0.0], + 'decaySD' : [0.0], + 'numContourComponents' : [1], + 'overallCenterInPixelsX' : [72], + 'overallCenterInPixelsY' : [67], + 'overallAreaInPixels' : [7.5], + 'overallMaxContourWidthInPixels' : [4.47214], + 'largestComponentCenterInPixelsX' : [72], + 'largestComponentCenterInPixelsY' : [67], + 'largestComponentAreaInPixels' : [7.5], + 'largestComponentMaxContourWidthInPixels' : [4.47214] + }) + ).all(axis=None) + + delete_files_silently([output_file]) + + @pytest.mark.isxd_trace + @pytest.mark.csv_trace + def test_ExportCellMetricsNoCellMetrics(self): + cell_set_file = test_data_path + '/unit_test/cell_metrics/cell_metrics_movie-PCA-ICA.isxd' + events_file = test_data_path + '/unit_test/cell_metrics/cell_metrics_movie-PCA-ICA-ED.isxd' + output_file = test_data_path + '/unit_test/output/test.csv' + + delete_files_silently([output_file]) + + with pytest.raises(Exception) as error: + isx.cell_metrics(cell_set_file, events_file, output_file, recompute_metrics=False) + assert 'Input files do not have pre-computed cell metrics stored on disk. Please compute metrics.' in str(error.value) + + @pytest.mark.isxd_trace + def test_ApplyCellSet(self): + input_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + input_cell_set = test_data_path + '/unit_test/eventDetectionCellSet.isxd' + output_file = test_data_path + '/unit_test/output/test_output_applyCellSet.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicApplyCellSet_output.isxd' + + delete_files_silently([output_file]) + + isx.apply_cell_set(input_file, input_cell_set, output_file, threshold=0.0) + + assert_isxd_cellsets_are_close_by_path(expected, output_file) + + delete_files_silently([output_file]) + + def test_ApplyCellSet_negative_input_file_not_cellset(self): + input_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + input_cell_set = test_data_path + '/unit_test/50fr10_l2-3cells_he.isxd' + output_file = test_data_path + '/unit_test/output/test_output_applyCellSet.isxd' + expected = test_data_path + '/unit_test/guilded/exp_mosaicApplyCellSet_output.isxd' + + delete_files_silently([output_file]) + + with pytest.raises(Exception) as error: + isx.apply_cell_set(input_file, input_cell_set, output_file, threshold=0.0) + assert 'Expected data set to be of type: Cell Set' in str(error.value) + + assert not is_file(output_file) + delete_files_silently([output_file]) + + @pytest.mark.isxd_trace + def test_ApplyRois(self): + input_movie_files = [ + test_data_path + '/unit_test/longReg_movie0.isxd', + test_data_path + '/unit_test/longReg_movie1.isxd' + ] + output_cell_set_files = [ + test_data_path + '/unit_test/output/test_output_applyRoi0.isxd', + test_data_path + '/unit_test/output/test_output_applyRoi1.isxd' + ] + + delete_files_silently(output_cell_set_files) + + rois = [ + [(30, 71),(30, 71),(29, 71),(28, 69),(27, 69),(26, 69),(25, 69),(25, 68),(24, 68),(23, 68),(22, 68),(22, 68),(21, 68),(21, 69),(20, 69),(20, 69),(19, 70),(19, 71),(19, 72),(19, 73),(18, 73),(18, 75),(18, 76),(19, 77),(19, 78),(20, 79),(20, 80),(21, 80),(24, 81),(25, 81),(27, 81),(28, 81),(29, 81),(30, 81),(30, 80),(30, 80),(31, 78),(31, 77),(31, 76),(31, 75),(31, 75),(31, 75),(31, 74),(31, 73),(31, 73),(31, 72),(31, 72),(30, 72),(30, 72),(30, 72),(30, 71)], + [(95, 55),(95, 55),(94, 55),(92, 55),(91, 54),(90, 54),(90, 54),(89, 54),(89, 54),(88, 54),(87, 55),(86, 55),(86, 56),(85, 56),(85, 57),(85, 57),(85, 58),(85, 58),(85, 59),(85, 60),(85, 61),(85, 61),(86, 62),(86, 62),(87, 63),(88, 64),(88, 65),(89, 65),(90, 66),(90, 66),(91, 66),(92, 66),(93, 65),(94, 65),(95, 64),(95, 64),(95, 64),(95, 62),(96, 60),(96, 59),(96, 58),(96, 57),(96, 57),(95, 57),(95, 56),(95, 56),(95, 56),(95, 56),(95, 55)], + [(153, 47),(153, 47),(153, 47),(150, 47),(149, 47),(148, 47),(147, 47),(146, 47),(146, 47),(145, 48),(145, 48),(145, 49),(145, 49),(145, 50),(145, 51),(145, 52),(145, 52),(145, 53),(146, 54),(147, 55),(148, 55),(149, 55),(150, 55),(152, 55),(153, 55),(154, 54),(155, 53),(155, 52),(156, 52),(156, 50),(156, 49),(156, 48),(155, 47),(155, 47),(154, 46),(154, 46),(153, 46),(153, 46),(153, 46),(153, 47)], + [(104, 96),(103, 96),(103, 96),(101, 96),(101, 96),(100, 95),(99, 95),(99, 95),(99, 95),(98, 95),(98, 95),(98, 96),(97, 96),(97, 96),(97, 97),(97, 97),(97, 97),(97, 98),(96, 98),(96, 98),(96, 99),(95, 100),(95, 100),(95, 101),(95, 102),(95, 102),(95, 103),(95, 104),(96, 104),(96, 105),(97, 105),(97, 106),(98, 106),(99, 106),(100, 106),(101, 106),(101, 106),(102, 106),(103, 106),(104, 106),(104, 106),(104, 105),(104, 105),(105, 104),(105, 104),(106, 103),(106, 102),(106, 101),(106, 101),(106, 100),(106, 99),(106, 99),(105, 98),(105, 98),(104, 97),(104, 97),(104, 97),(104, 97),(103, 97),(103, 97),(104, 96)], + [(104, 174),(104, 174),(104, 173),(102, 172),(101, 172),(100, 171),(100, 171),(99, 171),(97, 171),(96, 171),(96, 172),(95, 172),(95, 173),(95, 173),(95, 175),(95, 176),(95, 177),(95, 178),(96, 179),(97, 180),(98, 181),(100, 181),(101, 181),(103, 180),(104, 179),(104, 179),(105, 178),(105, 177),(105, 176),(105, 175),(105, 174),(104, 173),(104, 173),(104, 173),(104, 174)], + [(177, 147),(177, 147),(176, 147),(174, 146),(174, 146),(173, 146),(172, 147),(171, 147),(171, 147),(171, 148),(171, 148),(171, 148),(171, 149),(171, 150),(171, 150),(171, 151),(171, 152),(172, 153),(172, 154),(173, 154),(174, 155),(175, 155),(176, 155),(178, 155),(180, 154),(180, 153),(181, 152),(181, 151),(181, 149),(181, 148),(180, 147),(179, 147),(178, 146),(178, 146),(177, 146),(177, 146),(177, 146),(177, 147)], + [(22, 121),(22, 121),(22, 121),(19, 121),(18, 121),(18, 121),(18, 121),(18, 122),(17, 122),(16, 123),(16, 124),(15, 124),(15, 125),(15, 126),(15, 127),(16, 128),(16, 128),(17, 129),(18, 130),(19, 130),(21, 130),(22, 130),(23, 129),(24, 128),(25, 128),(25, 128),(25, 127),(25, 127),(25, 126),(25, 124),(26, 123),(26, 122),(26, 122),(25, 122),(25, 121),(24, 121),(24, 121),(23, 121),(23, 121),(23, 121),(22, 121)], + ] + + cell_names = [ + "c0", + "c1", + "c2", + "c3", + "c4", + "c5", + "c6" + ] + + isx.apply_rois( + input_movie_files=input_movie_files, + output_cell_set_files=output_cell_set_files, + rois=rois, + cell_names=cell_names + ) + + # verify trace sums of output cell sets + expected_trace_sums = [ + 127510, + 79431, + 163446, + 154379, + 80295, + 165443, + 75685, + ] + + assert_isxd_cellsets_trace_sums( + output_cell_set_files, + expected_trace_sums + ) + + assert_isxd_cellsets_cell_names( + output_cell_set_files, + cell_names + ) + + delete_files_silently(output_cell_set_files) + + @pytest.mark.isxd_trace + def test_ApplyRoisNoCellNames(self): + input_movie_files = [ + test_data_path + '/unit_test/longReg_movie0.isxd', + test_data_path + '/unit_test/longReg_movie1.isxd' + ] + output_cell_set_files = [ + test_data_path + '/unit_test/output/test_output_applyRoi0.isxd', + test_data_path + '/unit_test/output/test_output_applyRoi1.isxd' + ] + + delete_files_silently(output_cell_set_files) + + rois = [ + [(30, 71),(30, 71),(29, 71),(28, 69),(27, 69),(26, 69),(25, 69),(25, 68),(24, 68),(23, 68),(22, 68),(22, 68),(21, 68),(21, 69),(20, 69),(20, 69),(19, 70),(19, 71),(19, 72),(19, 73),(18, 73),(18, 75),(18, 76),(19, 77),(19, 78),(20, 79),(20, 80),(21, 80),(24, 81),(25, 81),(27, 81),(28, 81),(29, 81),(30, 81),(30, 80),(30, 80),(31, 78),(31, 77),(31, 76),(31, 75),(31, 75),(31, 75),(31, 74),(31, 73),(31, 73),(31, 72),(31, 72),(30, 72),(30, 72),(30, 72),(30, 71)], + [(95, 55),(95, 55),(94, 55),(92, 55),(91, 54),(90, 54),(90, 54),(89, 54),(89, 54),(88, 54),(87, 55),(86, 55),(86, 56),(85, 56),(85, 57),(85, 57),(85, 58),(85, 58),(85, 59),(85, 60),(85, 61),(85, 61),(86, 62),(86, 62),(87, 63),(88, 64),(88, 65),(89, 65),(90, 66),(90, 66),(91, 66),(92, 66),(93, 65),(94, 65),(95, 64),(95, 64),(95, 64),(95, 62),(96, 60),(96, 59),(96, 58),(96, 57),(96, 57),(95, 57),(95, 56),(95, 56),(95, 56),(95, 56),(95, 55)], + [(153, 47),(153, 47),(153, 47),(150, 47),(149, 47),(148, 47),(147, 47),(146, 47),(146, 47),(145, 48),(145, 48),(145, 49),(145, 49),(145, 50),(145, 51),(145, 52),(145, 52),(145, 53),(146, 54),(147, 55),(148, 55),(149, 55),(150, 55),(152, 55),(153, 55),(154, 54),(155, 53),(155, 52),(156, 52),(156, 50),(156, 49),(156, 48),(155, 47),(155, 47),(154, 46),(154, 46),(153, 46),(153, 46),(153, 46),(153, 47)], + [(104, 96),(103, 96),(103, 96),(101, 96),(101, 96),(100, 95),(99, 95),(99, 95),(99, 95),(98, 95),(98, 95),(98, 96),(97, 96),(97, 96),(97, 97),(97, 97),(97, 97),(97, 98),(96, 98),(96, 98),(96, 99),(95, 100),(95, 100),(95, 101),(95, 102),(95, 102),(95, 103),(95, 104),(96, 104),(96, 105),(97, 105),(97, 106),(98, 106),(99, 106),(100, 106),(101, 106),(101, 106),(102, 106),(103, 106),(104, 106),(104, 106),(104, 105),(104, 105),(105, 104),(105, 104),(106, 103),(106, 102),(106, 101),(106, 101),(106, 100),(106, 99),(106, 99),(105, 98),(105, 98),(104, 97),(104, 97),(104, 97),(104, 97),(103, 97),(103, 97),(104, 96)], + [(104, 174),(104, 174),(104, 173),(102, 172),(101, 172),(100, 171),(100, 171),(99, 171),(97, 171),(96, 171),(96, 172),(95, 172),(95, 173),(95, 173),(95, 175),(95, 176),(95, 177),(95, 178),(96, 179),(97, 180),(98, 181),(100, 181),(101, 181),(103, 180),(104, 179),(104, 179),(105, 178),(105, 177),(105, 176),(105, 175),(105, 174),(104, 173),(104, 173),(104, 173),(104, 174)], + [(177, 147),(177, 147),(176, 147),(174, 146),(174, 146),(173, 146),(172, 147),(171, 147),(171, 147),(171, 148),(171, 148),(171, 148),(171, 149),(171, 150),(171, 150),(171, 151),(171, 152),(172, 153),(172, 154),(173, 154),(174, 155),(175, 155),(176, 155),(178, 155),(180, 154),(180, 153),(181, 152),(181, 151),(181, 149),(181, 148),(180, 147),(179, 147),(178, 146),(178, 146),(177, 146),(177, 146),(177, 146),(177, 147)], + [(22, 121),(22, 121),(22, 121),(19, 121),(18, 121),(18, 121),(18, 121),(18, 122),(17, 122),(16, 123),(16, 124),(15, 124),(15, 125),(15, 126),(15, 127),(16, 128),(16, 128),(17, 129),(18, 130),(19, 130),(21, 130),(22, 130),(23, 129),(24, 128),(25, 128),(25, 128),(25, 127),(25, 127),(25, 126),(25, 124),(26, 123),(26, 122),(26, 122),(25, 122),(25, 121),(24, 121),(24, 121),(23, 121),(23, 121),(23, 121),(22, 121)], + ] + + isx.apply_rois( + input_movie_files=input_movie_files, + output_cell_set_files=output_cell_set_files, + rois=rois, + cell_names=[] + ) + + # verify trace sums of output cell sets + expected_trace_sums = [ + 127510, + 79431, + 163446, + 154379, + 80295, + 165443, + 75685, + ] + + assert_isxd_cellsets_trace_sums( + output_cell_set_files, + expected_trace_sums + ) + + delete_files_silently(output_cell_set_files) + + @pytest.mark.isxd_trace + @pytest.mark.isxd_movie + def test_LongitudinalRegistration(self): + input_dir = test_data_path + '/unit_test' + output_dir = input_dir + '/output' + exp_dir = input_dir + '/guilded' + + input_cellset_filenames = [input_dir + '/longReg_cellSet{}.isxd'.format(i) for i in range(3)] + output_cellset_filenames = [output_dir + '/test_output_longReg_cellSet{}.isxd'.format(i) for i in range(3)] + input_movie_filenames = [input_dir + '/longReg_movie{}.isxd'.format(i) for i in range(3)] + output_movie_filenames = [output_dir + '/test_output_longReg_movie{}.isxd'.format(i) for i in range(3)] + expected_cells = [exp_dir + '/exp_mosaicLongitudinalRegistration_CellOutput{}.isxd'.format(i) for i in range(3)] + expected_movies = [exp_dir + '/exp_mosaicLongitudinalRegistration_MovieOutput{}.isxd'.format(i) for i in range(3)] + + delete_files_silently(output_cellset_filenames) + delete_files_silently(output_movie_filenames) + + isx.longitudinal_registration(input_cellset_filenames, output_cellset_filenames, input_movie_files=input_movie_filenames, output_movie_files=output_movie_filenames) + + for f in range(3): + assert_isxd_cellsets_are_close_by_path(expected_cells[f], output_cellset_filenames[f]) + assert_isxd_movies_are_close_by_path(expected_movies[f], output_movie_filenames[f]) + + delete_files_silently(output_cellset_filenames) + delete_files_silently(output_movie_filenames) + + @pytest.mark.isxd_trace + @pytest.mark.isxd_movie + def test_LongitudinalRegistrationWithCsvs(self): + input_dir = test_data_path + '/unit_test' + output_dir = input_dir + '/output' + exp_dir = input_dir + '/guilded' + + input_cellset_filenames = [input_dir + '/longReg_cellSet{}.isxd'.format(i) for i in range(3)] + output_cellset_filenames = [output_dir + '/test_output_longReg_cellSet{}.isxd'.format(i) for i in range(3)] + input_movie_filenames = [input_dir + '/longReg_movie{}.isxd'.format(i) for i in range(3)] + output_movie_filenames = [output_dir + '/test_output_longReg_movie{}.isxd'.format(i) for i in range(3)] + expected_cells = [exp_dir + '/exp_mosaicLongitudinalRegistration_CellOutput{}.isxd'.format(i) for i in range(3)] + expected_movies = [exp_dir + '/exp_mosaicLongitudinalRegistration_MovieOutput{}.isxd'.format(i) for i in range(3)] + + output_corr_filename = output_dir + '/longReg_corr.csv' + expected_corr_filename = exp_dir + '/exp_mosaicLongitudinalRegistration_corr.csv' + + output_tfm_filename = output_dir + '/longReg_transforms.csv' + expected_tfm_filename = exp_dir + '/exp_mosaicLongitudinalRegistration_transforms-v2.csv' + + output_crop_filename = output_dir + '/longReg_crop.csv' + expected_crop_filename = exp_dir + '/exp_mosaicLongitudinalRegistration_crop.csv' + + delete_files_silently(output_cellset_filenames) + delete_files_silently(output_movie_filenames) + delete_files_silently([output_corr_filename]) + delete_files_silently([output_tfm_filename]) + delete_files_silently([output_crop_filename]) + + isx.longitudinal_registration(input_cellset_filenames, output_cellset_filenames, input_movie_files=input_movie_filenames, output_movie_files=output_movie_filenames, csv_file=output_corr_filename, transform_csv_file=output_tfm_filename, crop_csv_file=output_crop_filename) + + for f in range(3): + assert_isxd_cellsets_are_close_by_path(expected_cells[f], output_cellset_filenames[f]) + assert_isxd_movies_are_close_by_path(expected_movies[f], output_movie_filenames[f]) + + assert_csv_files_are_close_by_path(expected_corr_filename, output_corr_filename) + assert_csv_files_are_equal_by_path(expected_crop_filename, output_crop_filename) + assert_csv_files_are_equal_by_path(expected_tfm_filename, output_tfm_filename) + + delete_files_silently(output_cellset_filenames) + delete_files_silently(output_movie_filenames) + delete_files_silently([output_corr_filename]) + delete_files_silently([output_tfm_filename]) + delete_files_silently([output_crop_filename]) + + @pytest.mark.parametrize(('n_of_not_cellset'), range(3)) + def test_LongitudinalRegistration_negativeinput_file_not_cellset(self, n_of_not_cellset): + not_existing_file_path = test_data_path + 'not_existing_file.isxd' + input_cellset_filenames = [test_data_path + '/unit_test/longReg_cellSet0.isxd', + test_data_path + '/unit_test/longReg_cellSet1.isxd', + test_data_path + '/unit_test/longReg_cellSet2.isxd'] + output_cellset_filenames = [test_data_path + '/unit_test/output/test_output_longReg_cellSet0.isxd', + test_data_path + '/unit_test/output/test_output_longReg_cellSet1.isxd', + test_data_path + '/unit_test/output/test_output_longReg_cellSet2.isxd'] + input_movie_filenames = [test_data_path + '/unit_test/longReg_movie0.isxd', + test_data_path + '/unit_test/longReg_movie1.isxd', + test_data_path + '/unit_test/longReg_movie2.isxd'] + output_movie_filenames = [test_data_path + '/unit_test/output/test_output_longReg_movie0.isxd', + test_data_path + '/unit_test/output/test_output_longReg_movie1.isxd', + test_data_path + '/unit_test/output/test_output_longReg_movie2.isxd'] + + delete_files_silently(output_cellset_filenames) + delete_files_silently(output_movie_filenames) + + input_cellset_filenames[n_of_not_cellset] = not_existing_file_path + + with pytest.raises(Exception) as error: + isx.longitudinal_registration(input_cellset_filenames, + output_cellset_filenames, + input_movie_files=input_movie_filenames, + output_movie_files=output_movie_filenames) + assert 'File does not exist' in str(error.value) + + for f in range(3): + assert not is_file(output_cellset_filenames[f]) + assert not is_file(output_movie_filenames[f]) + + delete_files_silently(output_cellset_filenames) + delete_files_silently(output_movie_filenames) + + @pytest.mark.isxd_events + def test_AutoAcceptReject(self): + original_cellset_file = test_data_path + '/unit_test/classify_cell_statuses/50fr10_l1-3cells_he-PCA-ICA.isxd' + input_events_file = test_data_path + '/unit_test/classify_cell_statuses/50fr10_l1-3cells_he-PCA-ICA-ED.isxd' + input_cellset_file = test_data_path + '/unit_test/output/test_output_classifyCellStatus.isxd' + copyfile(original_cellset_file, input_cellset_file) + + expected = test_data_path + '/unit_test/guilded/exp_mosaicClassifyCellStatus_output_default.isxd' + + isx.auto_accept_reject(input_cellset_file, input_events_file) + + assert_isxd_cellsets_are_close_by_path(expected, input_cellset_file) + + delete_files_silently([input_cellset_file]) + + @pytest.mark.isxd_events + def test_AutoAcceptReject_CustomFilter(self): + original_cellset_file = test_data_path + '/unit_test/classify_cell_statuses/50fr10_l1-3cells_he-PCA-ICA.isxd' + input_events_file = test_data_path + '/unit_test/classify_cell_statuses/50fr10_l1-3cells_he-PCA-ICA-ED.isxd' + input_cellset_file = test_data_path + '/unit_test/output/test_output_classifyCellStatus.isxd' + copyfile(original_cellset_file, input_cellset_file) + + expected_ARA = test_data_path + '/unit_test/guilded/exp_mosaicClassifyCellStatus_output_ARA.isxd' + filter_ARA = [('SNR', '>', 1.35)] + + isx.auto_accept_reject(input_cellset_file, input_events_file, filter_ARA) + + assert_isxd_cellsets_are_close_by_path(expected_ARA, input_cellset_file) + + delete_files_silently([input_cellset_file]) + + @pytest.mark.isxd_events + def test_AutoAcceptReject_CustomFilters(self): + original_cellset_file = test_data_path + '/unit_test/classify_cell_statuses/50fr10_l1-3cells_he-PCA-ICA.isxd' + input_events_file = test_data_path + '/unit_test/classify_cell_statuses/50fr10_l1-3cells_he-PCA-ICA-ED.isxd' + input_cellset_file = test_data_path + '/unit_test/output/test_output_classifyCellStatus.isxd' + copyfile(original_cellset_file, input_cellset_file) + + expected_ARR = test_data_path + '/unit_test/guilded/exp_mosaicClassifyCellStatus_output_ARR.isxd' + filter_ARR = [('SNR', '>', 1.32), ('Event Rate', '>', 1), ('Cell Size', '>', 6)] + + isx.auto_accept_reject(input_cellset_file, input_events_file, filter_ARR) + + assert_isxd_cellsets_are_close_by_path(expected_ARR, input_cellset_file) + + delete_files_silently([input_cellset_file]) + + def test_AutoAcceptReject_negative_input_file_not_cellset(self): + input_events_file = test_data_path + '/unit_test/classify_cell_statuses/50fr10_l1-3cells_he-PCA-ICA-ED.isxd' + input_cellset_file = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + + with pytest.raises(Exception) as error: + isx.auto_accept_reject(input_cellset_file, input_events_file) + assert 'Expected data set to be of type: Cell Set' in str(error.value) + + # def test_MultiplaneRegistration(self): + # input_cell_set_files = [ + # test_data_path + '/unit_test/mcr/mcr_in1.isxd', + # test_data_path + '/unit_test/mcr/mcr_in2.isxd', + # test_data_path + '/unit_test/mcr/mcr_in3.isxd' + # ] + # lcr_output_files = [x + '-LCR.isxd' for x in input_cell_set_files] + # mcr_output_file = test_data_path + '/unit_test/mcr/test-MCR.isxd' + # expected_cell_set_file = test_data_path + '/unit_test/mcr/mcr_exp.isxd' + + # isx.multiplane_registration(input_cell_set_files, mcr_output_file) + # assert_isxd_cellsets_are_close_by_path(expected_cell_set_file, mcr_output_file) + + # tr_paths = [ + # test_data_path + '/unit_test/mcr/test-LCR_001-TR.isxd', + # test_data_path + '/unit_test/mcr/test-LCR_002-TR.isxd', + # test_data_path + '/unit_test/mcr/test-LCR_003-TR.isxd' + # ] + # ed_paths = [ + # test_data_path + '/unit_test/mcr/test-LCR_001-TR-ED.isxd', + # test_data_path + '/unit_test/mcr/test-LCR_002-TR-ED.isxd', + # test_data_path + '/unit_test/mcr/test-LCR_003-TR-ED.isxd' + # ] + # delete_files_silently(lcr_output_files + tr_paths + ed_paths + [mcr_output_file]) + + @pytest.mark.isxd_movie + def test_DeinterleaveDualcolorMovie(self): + test_dir = test_data_path + '/unit_test/dual_color' + input_movie_files = [test_dir + '/DualColorMultiplexingMovie.isxd'] + output_green_movie_files = [test_data_path + '/unit_test/output/' + 'tmp_output_green_channel.isxd'] + output_red_movie_files = [test_data_path + '/unit_test/output/' + 'tmp_output_red_channel.isxd'] + + # IDPS-857 Upgrade version of test files since higher precision sampling rate results + # in a slightly different computed start time for de-interleaved movies + # IDPS-900 Upgrade version of test files since epoch start time is calculated based on tsc values + expected_green_movies = [test_data_path + '/unit_test/guilded/' + 'de_interleave_dualcolor_multiplexing_green_channel_v2.isxd'] + expected_red_movies = [test_data_path + '/unit_test/guilded/' + 'de_interleave_dualcolor_multiplexing_red_channel_v2.isxd'] + + delete_files_silently(output_green_movie_files) + delete_files_silently(output_red_movie_files) + + isx.de_interleave_dualcolor(input_movie_files, output_green_movie_files, output_red_movie_files) + + for o, e in zip(output_green_movie_files, expected_green_movies): + assert_isxd_movies_are_close_by_path(e, o) + for o, e in zip(output_red_movie_files, expected_red_movies): + assert_isxd_movies_are_close_by_path(e, o) + + delete_files_silently(output_green_movie_files) + delete_files_silently(output_red_movie_files) + + @pytest.mark.isxd_movie + def test_DeinterleaveDualcolorMovieWideField(self): + test_dir = test_data_path + '/unit_test/widefield' + input_movie_files = [test_dir + '/DualSpheres_2023-11-10-11-34-11_video_multiplexing-PP-TPC.isxd'] + output_green_movie_files = [test_data_path + '/unit_test/output/' + 'tmp_output_green_channel.isxd'] + output_red_movie_files = [test_data_path + '/unit_test/output/' + 'tmp_output_red_channel.isxd'] + + delete_files_silently(output_green_movie_files) + delete_files_silently(output_red_movie_files) + + isx.de_interleave_dualcolor(input_movie_files, output_green_movie_files, output_red_movie_files, correct_chromatic_shift=True) + + green_movie = isx.Movie.read(output_green_movie_files[0]) + first_green_frame = green_movie.get_frame_data(0) + expected_corrected_frame_sum = 73429192 + assert np.sum(first_green_frame) == expected_corrected_frame_sum + del green_movie + + delete_files_silently(output_green_movie_files) + delete_files_silently(output_red_movie_files) + + @pytest.mark.isxd_movie + def test_DeinterleaveDualcolorMovieWideFieldNoCorrection(self): + test_dir = test_data_path + '/unit_test/widefield' + input_movie_files = [test_dir + '/DualSpheres_2023-11-10-11-34-11_video_multiplexing-PP-TPC.isxd'] + output_green_movie_files = [test_data_path + '/unit_test/output/' + 'tmp_output_green_channel.isxd'] + output_red_movie_files = [test_data_path + '/unit_test/output/' + 'tmp_output_red_channel.isxd'] + + delete_files_silently(output_green_movie_files) + delete_files_silently(output_red_movie_files) + + isx.de_interleave_dualcolor(input_movie_files, output_green_movie_files, output_red_movie_files, correct_chromatic_shift=False) + + green_movie = isx.Movie.read(output_green_movie_files[0]) + first_green_frame = green_movie.get_frame_data(0) + expected_corrected_frame_sum = 73429192 + assert np.sum(first_green_frame) != expected_corrected_frame_sum + del green_movie + + delete_files_silently(output_green_movie_files) + delete_files_silently(output_red_movie_files) + + # def test_MulticolorRegistration_AcceptedOnly(self): + # test_dir = test_data_path + '/unit_test/dual_color' + # input_cellset_file1 = test_dir + '/cellset_green_dynamic.isxd' + # input_cellset_file2 = test_dir + '/cellset_red_static.isxd' + # output_directory = test_data_path + '/unit_test/tmp_output_multicolor_reg' + # output_spatial_overlap_csv_file = output_directory + '/tmp_output_spatial_overlap.csv' + # output_registration_matrix_csv_file = output_directory + '/tmp_output_reg_matrix.csv' + + # delete_dirs_silently(output_directory) + # os.makedirs(output_directory) + + # pad_value = np.nan + # lower_threshold = 0.1 + # upper_threshold = 0.3 + # accepted_cells_only = True + + # isx.multicolor_registration( + # input_cellset_file1, input_cellset_file2, output_spatial_overlap_csv_file, output_registration_matrix_csv_file, + # output_directory, pad_value, lower_threshold, upper_threshold, + # accepted_cells_only + # ) + + # # intermediate files + # assert os.path.exists(output_directory + "/cellset_red_static-CSB.isxd") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST.isxd") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST-CSB.isxd") + # assert os.path.exists(output_directory + "/cellset_red_static-CSB-cellmap.tiff") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST-CSB-cellmap.tiff") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST-CSB-cellmap-overlay.tiff") + + # # spatial overlap + # expected_spatial_overlap_csv_file = output_directory + '/expected_output_spatial_overlap.csv' + # spatial_overlap_data = [["", "C01", "C02"], + # ["C0", 1, 0], + # ["C1", 0, 0.333333], + # ["C2", 0, 0]] + # with open(expected_spatial_overlap_csv_file, 'w', newline='') as file: + # writer = csv.writer(file, delimiter=',') + # writer.writerows(spatial_overlap_data) + # assert_txt_files_are_equal_by_path(expected_spatial_overlap_csv_file, output_spatial_overlap_csv_file) + + # # reg matrix + # expected_registration_matrix_csv_file = output_directory + '/expected_output_reg_matrix.csv' + # reg_matrix_data = [["","primary","max_jaccard_index","secondary","match","colocalization"], + # [0,"C0",1,"C01",True,True], + # [1,"C1",0.333333,"C02",True,True], + # [2,"C2",0,"C01",False,False]] + # with open(expected_registration_matrix_csv_file, 'w', newline='') as file: + # writer = csv.writer(file, delimiter=',') + # writer.writerows(reg_matrix_data) + # assert_txt_files_are_equal_by_path(expected_registration_matrix_csv_file, output_registration_matrix_csv_file) + + # delete_dirs_silently(output_directory) + + # def test_MulticolorRegistration_AcceptedAndUndecided(self): + # test_dir = test_data_path + '/unit_test/dual_color' + # input_cellset_file1 = test_dir + '/cellset_green_dynamic.isxd' + # input_cellset_file2 = test_dir + '/cellset_red_static.isxd' + # output_directory = test_data_path + '/unit_test/tmp_output_multicolor_reg' + # output_spatial_overlap_csv_file = output_directory + '/tmp_output_spatial_overlap.csv' + # output_registration_matrix_csv_file = output_directory + '/tmp_output_reg_matrix.csv' + + # delete_dirs_silently(output_directory) + # os.makedirs(output_directory) + + # pad_value = np.nan + # lower_threshold = 0.35 + # upper_threshold = 0.5 + # accepted_cells_only = False + + # isx.multicolor_registration( + # input_cellset_file1, input_cellset_file2, output_spatial_overlap_csv_file, output_registration_matrix_csv_file, + # output_directory, pad_value, lower_threshold, upper_threshold, + # accepted_cells_only + # ) + + # # intermediate files + # assert os.path.exists(output_directory + "/cellset_red_static-CSB.isxd") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST.isxd") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST-CSB.isxd") + # assert os.path.exists(output_directory + "/cellset_red_static-CSB-cellmap.tiff") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST-CSB-cellmap.tiff") + # assert os.path.exists(output_directory + "/cellset_green_dynamic-CST-CSB-cellmap-overlay.tiff") + + # # spatial overlap + # expected_spatial_overlap_csv_file = output_directory + '/expected_output_spatial_overlap.csv' + # spatial_overlap_data = [["", "C01", "C02"], + # ["C0", 1, 0], + # ["C1", 0, 0.333333], + # ["C2", 0, 0], + # ["C3", 0.5625, 0]] + # with open(expected_spatial_overlap_csv_file, 'w', newline='') as file: + # writer = csv.writer(file, delimiter=',') + # writer.writerows(spatial_overlap_data) + # assert_txt_files_are_equal_by_path(expected_spatial_overlap_csv_file, output_spatial_overlap_csv_file) + + # # reg matrix + # expected_registration_matrix_csv_file = output_directory + '/expected_output_reg_matrix.csv' + # reg_matrix_data = [["","primary","max_jaccard_index","secondary","match","colocalization"], + # [0,"C0",1,"C01",True,True], + # [1,"C1",0.333333,"C02",False,False], + # [2,"C2",0,"C01",False,False], + # [3,"C3",0.5625,"C01",False,""]] + # with open(expected_registration_matrix_csv_file, 'w', newline='') as file: + # writer = csv.writer(file, delimiter=',') + # writer.writerows(reg_matrix_data) + # assert_txt_files_are_equal_by_path(expected_registration_matrix_csv_file, output_registration_matrix_csv_file) + + # delete_dirs_silently(output_directory) + + def test_binarize_cellset_absolute_threshold(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file = os.path.join(base_dir, 'cellset_pcaica_2cells.isxd') + actual_output_cellset_file = os.path.join(output_dir, 'actual_binary_cellset.isxd') + expected_output_cellset_file = os.path.join(base_dir, 'cellset_pcaica_2cells_absolute_threshold_5.isxd') + delete_files_silently([actual_output_cellset_file]) + + isx.binarize_cell_set(input_cellset_file, actual_output_cellset_file, threshold=5, use_percentile_threshold=False) + + assert_isxd_cellsets_are_close_by_path(expected_output_cellset_file.replace('\\','/'), actual_output_cellset_file.replace('\\','/'), assert_status=False) + delete_files_silently([actual_output_cellset_file]) + + + def test_binarize_cellset_percentile_threshold(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file = os.path.join(base_dir, 'cellset_pcaica_2cells.isxd') + actual_output_cellset_file = os.path.join(output_dir, 'actual_binary_cellset.isxd') + expected_output_cellset_file = os.path.join(base_dir, 'cellset_pcaica_2cells_percentile_threshold_27.isxd') + delete_files_silently([actual_output_cellset_file]) + + isx.binarize_cell_set(input_cellset_file, actual_output_cellset_file, threshold=27, use_percentile_threshold=True) + + assert_isxd_cellsets_are_close_by_path(expected_output_cellset_file.replace('\\','/'), actual_output_cellset_file.replace('\\','/'), assert_status=False) + delete_files_silently([actual_output_cellset_file]) + + def test_crop_cell_set(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'cellset_crop') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file = os.path.join(base_dir, 'cellset_binary_5x5_3cells.isxd') + actual_output_cellset_file = os.path.join(output_dir, 'actual_cropped_cellset.isxd') + expected_output_cellset_file = os.path.join(base_dir, 'cellset_binary_5x5_3cells_cropped.isxd') + delete_files_silently([actual_output_cellset_file]) + + isx.crop_cell_set(input_cellset_file, actual_output_cellset_file, [2, 0, 2, 0]) + + assert_isxd_cellsets_are_close_by_path(expected_output_cellset_file.replace('\\','/'), actual_output_cellset_file.replace('\\','/')) + delete_files_silently([actual_output_cellset_file]) + + def test_transform_cell_set(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'cellset_transform') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file = os.path.join(base_dir, 'input_cellset_pcaica_uneven_crop.isxd') + actual_output_cellset_file = os.path.join(output_dir, 'actual_transformed_cellset.isxd') + expected_output_cellset_file = os.path.join(base_dir, 'expected_cellset_pcaica_uneven_crop.isxd') + delete_files_silently([actual_output_cellset_file]) + + isx.transform_cell_set(input_cellset_file, actual_output_cellset_file, np.nan) + + assert_isxd_cellsets_are_close_by_path(expected_output_cellset_file.replace('\\','/'), actual_output_cellset_file.replace('\\','/')) + delete_files_silently([actual_output_cellset_file]) + + def test_compute_spatial_overlap_cell_set_analog(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file1 = os.path.join(base_dir, 'movie_920_green_resonant-BP-MC-CNMFE-undecided.isxd') + input_cellset_file2 = os.path.join(base_dir, 'movie_920_green_resonant-BP-MC-CNMFE.isxd') + + actual_output_csv_file = os.path.join(output_dir, 'actual_output_scores.csv') + expected_output_csv_file = os.path.join(base_dir, 'ncc_matrix.csv') + delete_files_silently([actual_output_csv_file]) + + isx.compute_spatial_overlap_cell_set(input_cellset_file1, input_cellset_file2, actual_output_csv_file) + + assert_csv_pairwise_spatial_overlap_matrices_are_close_by_path(expected_output_csv_file.replace('\\','/'), actual_output_csv_file.replace('\\','/')) + delete_files_silently([actual_output_csv_file]) + + def test_compute_spatial_overlap_cell_set_analog_accepted_only(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file1 = os.path.join(base_dir, 'movie_920_green_resonant-BP-MC-CNMFE-undecided.isxd') + input_cellset_file2 = os.path.join(base_dir, 'movie_920_green_resonant-BP-MC-CNMFE.isxd') + + actual_output_csv_file = os.path.join(output_dir, 'actual_output_scores.csv') + expected_output_csv_file = os.path.join(base_dir, 'ncc_matrix_accepted_only.csv') + delete_files_silently([actual_output_csv_file]) + + isx.compute_spatial_overlap_cell_set(input_cellset_file1, input_cellset_file2, actual_output_csv_file, accepted_cells_only=True) + + assert_csv_pairwise_spatial_overlap_matrices_are_close_by_path(expected_output_csv_file.replace('\\','/'), actual_output_csv_file.replace('\\','/')) + delete_files_silently([actual_output_csv_file]) + + def test_compute_spatial_overlap_cell_set_binary_full_overlap(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file1 = os.path.join(base_dir, 'cellset_binary_3cells_3x4fov.isxd') + input_cellset_file2 = os.path.join(base_dir, 'cellset_binary_3cells_3x4fov.isxd') + + input_binarized_cellset_file1 = os.path.join(output_dir, 'cellset_binary_3cells_3x4fov.isxd') + input_binarized_cellset_file2 = os.path.join(output_dir, 'cellset_binary_3cells_3x4fov2.isxd') + + actual_output_csv_file = os.path.join(output_dir, 'actual_output_f1_scores.csv') + expected_output_csv_file = os.path.join(base_dir, 'cellset_binary_f1_scores_full_overlap.csv') + delete_files_silently([actual_output_csv_file, input_binarized_cellset_file1, input_binarized_cellset_file2]) + + # test data is already binarized - however we need the cell sets to have type metadata indicating they are "binary" + # so we first binarize - which simply sets the metadata without actually changing the test data + isx.binarize_cell_set(input_cellset_file1, input_binarized_cellset_file1, threshold=0.5, use_percentile_threshold=False) + isx.binarize_cell_set(input_cellset_file2, input_binarized_cellset_file2, threshold=0.5, use_percentile_threshold=False) + isx.compute_spatial_overlap_cell_set(input_binarized_cellset_file1, input_binarized_cellset_file2, actual_output_csv_file) + + assert_csv_pairwise_spatial_overlap_matrices_are_close_by_path(expected_output_csv_file.replace('\\','/'), actual_output_csv_file.replace('\\','/')) + delete_files_silently([actual_output_csv_file, input_binarized_cellset_file1, input_binarized_cellset_file2]) + + def test_compute_spatial_overlap_cell_set_binary_partial_overlap(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file1 = os.path.join(base_dir, 'cellset_binary_3cells_3x4fov.isxd') + input_cellset_file2 = os.path.join(base_dir, 'cellset_binary_3cells_3x4fov_partial_overlap.isxd') + + input_binarized_cellset_file1 = os.path.join(output_dir, 'cellset_binary_3cells_3x4fov.isxd') + input_binarized_cellset_file2 = os.path.join(output_dir, 'cellset_binary_3cells_3x4fov2.isxd') + + actual_output_csv_file = os.path.join(output_dir, 'actual_output_f1_scores.csv') + expected_output_csv_file = os.path.join(base_dir, 'cellset_binary_f1_scores_partial_overlap.csv') + delete_files_silently([actual_output_csv_file, input_binarized_cellset_file1, input_binarized_cellset_file2]) + + # test data is already binarized - however we need the cell sets to have type metadata indicating they are "binary" + # so we first binarize - which simply sets the metadata without actually changing the test data + isx.binarize_cell_set(input_cellset_file1, input_binarized_cellset_file1, threshold=0.5, use_percentile_threshold=False) + isx.binarize_cell_set(input_cellset_file2, input_binarized_cellset_file2, threshold=0.5, use_percentile_threshold=False) + isx.compute_spatial_overlap_cell_set(input_binarized_cellset_file1, input_binarized_cellset_file2, actual_output_csv_file) + + assert_csv_pairwise_spatial_overlap_matrices_are_close_by_path(expected_output_csv_file.replace('\\','/'), actual_output_csv_file.replace('\\','/')) + delete_files_silently([actual_output_csv_file, input_binarized_cellset_file1, input_binarized_cellset_file2]) + + def test_compute_spatial_overlap_cell_set_binary_no_overlap(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_cellset_file1 = os.path.join(base_dir, 'cellset_binary_3cells_3x4fov.isxd') + input_cellset_file2 = os.path.join(base_dir, 'cellset_binary_3cells_3x4fov_no_overlap.isxd') + + input_binarized_cellset_file1 = os.path.join(output_dir, 'cellset_binary_3cells_3x4fov.isxd') + input_binarized_cellset_file2 = os.path.join(output_dir, 'cellset_binary_3cells_3x4fov2.isxd') + + actual_output_csv_file = os.path.join(output_dir, 'actual_output_f1_scores.csv') + expected_output_csv_file = os.path.join(base_dir, 'cellset_binary_f1_scores_no_overlap.csv') + delete_files_silently([actual_output_csv_file, input_binarized_cellset_file1, input_binarized_cellset_file2]) + + # test data is already binarized - however we need the cell sets to have type metadata indicating they are "binary" + # so we first binarize - which simply sets the metadata without actually changing the test data + isx.binarize_cell_set(input_cellset_file1, input_binarized_cellset_file1, threshold=0.5, use_percentile_threshold=False) + isx.binarize_cell_set(input_cellset_file2, input_binarized_cellset_file2, threshold=0.5, use_percentile_threshold=False) + isx.compute_spatial_overlap_cell_set(input_binarized_cellset_file1, input_binarized_cellset_file2, actual_output_csv_file) + + assert_csv_pairwise_spatial_overlap_matrices_are_close_by_path(expected_output_csv_file.replace('\\','/'), actual_output_csv_file.replace('\\','/')) + delete_files_silently([actual_output_csv_file, input_binarized_cellset_file1, input_binarized_cellset_file2]) + + def test_MulticolorRegistration_EmptyCellSet(self): + test_dir = test_data_path + '/unit_test/dual_color' + input_cellset_file1 = test_dir + '/cellset_green_dynamic_no_accepted.isxd' + input_cellset_file2 = test_dir + '/cellset_red_static.isxd' + output_directory = test_data_path + '/unit_test/tmp_output_multicolor_reg' + output_spatial_overlap_csv_file = output_directory + '/tmp_output_spatial_overlap.csv' + output_registration_matrix_csv_file = output_directory + '/tmp_output_reg_matrix.csv' + + delete_dirs_silently(output_directory) + os.makedirs(output_directory) + + lower_threshold = 0.1 + upper_threshold = 0.3 + accepted_cells_only = True + save_matched_cellset = False + save_unmatched_cellset = False + save_uncertain_cellset = False + image_format = "tiff" + + try: + isx.multicolor_registration( + input_cellset_file1, input_cellset_file2, output_spatial_overlap_csv_file, output_registration_matrix_csv_file, + output_directory, lower_threshold, upper_threshold, + accepted_cells_only, save_matched_cellset, save_unmatched_cellset, save_uncertain_cellset, image_format + ) + except Exception as e: + assert str(e) == "Error calling C library function isx_multicolor_registration.\nThere are no cells to process" + + # check no intermediate files generated + assert not os.path.exists(output_directory + "/cellset_red_static-BIN.isxd") + assert not os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG.isxd") + assert not os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN.isxd") + assert not os.path.exists(output_directory + "/cellset_red_static-BIN-cellmap.tiff") + assert not os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN-cellmap.tiff") + assert not os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN-cellmap-overlay.tiff") + assert not os.path.exists(output_spatial_overlap_csv_file) + assert not os.path.exists(output_registration_matrix_csv_file) + + delete_dirs_silently(output_directory) + + def test_MulticolorRegistration_NonEmptyCellSet(self): + test_dir = test_data_path + '/unit_test/dual_color' + input_cellset_file1 = test_dir + '/cellset_green_dynamic_no_accepted.isxd' + input_cellset_file2 = test_dir + '/cellset_red_static.isxd' + output_directory = test_data_path + '/unit_test/tmp_output_multicolor_reg' + output_spatial_overlap_csv_file = output_directory + '/tmp_output_spatial_overlap.csv' + output_registration_matrix_csv_file = output_directory + '/tmp_output_reg_matrix.csv' + + delete_dirs_silently(output_directory) + os.makedirs(output_directory) + + lower_threshold = 0.1 + upper_threshold = 0.3 + accepted_cells_only = False + save_matched_cellset = True + save_unmatched_cellset = True + save_uncertain_cellset = True + image_format = "tiff" + + isx.multicolor_registration( + input_cellset_file1, input_cellset_file2, output_spatial_overlap_csv_file, output_registration_matrix_csv_file, + output_directory, lower_threshold, upper_threshold, + accepted_cells_only, save_matched_cellset, save_unmatched_cellset, save_uncertain_cellset, image_format + ) + + # intermediate files + assert os.path.exists(output_directory + "/cellset_red_static-BIN.isxd") + assert os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG.isxd") + assert os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN.isxd") + + assert os.path.exists(output_directory + "/cellset_red_static-BIN-cellmap.tiff") + assert os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN-cellmap.tiff") + assert os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN-cellmap-overlay.tiff") + + assert not os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN-cellset-matched.isxd") + assert os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN-cellset-non-matched.isxd") + assert os.path.exists(output_directory + "/cellset_green_dynamic_no_accepted-REG-BIN-cellset-uncertain.isxd") + + assert os.path.exists(output_spatial_overlap_csv_file) + assert os.path.exists(output_registration_matrix_csv_file) + + delete_dirs_silently(output_directory) + + + def test_MulticolorRegistration_DynamicDynamicIdentity(self): + test_dir = test_data_path + '/unit_test/dual_color' + input_cellset_file1 = test_dir + '/cellset_green_dynamic.isxd' + input_cellset_file2 = test_dir + '/cellset_green_dynamic.isxd' + output_directory = test_data_path + '/unit_test/tmp_output_multicolor_reg' + output_spatial_overlap_csv_file = output_directory + '/tmp_output_spatial_overlap.csv' + output_registration_matrix_csv_file = output_directory + '/tmp_output_reg_matrix.csv' + + delete_dirs_silently(output_directory) + os.makedirs(output_directory) + + lower_threshold = 0.1 + upper_threshold = 0.3 + accepted_cells_only = False + save_matched_cellset = False + save_unmatched_cellset = False + save_uncertain_cellset = False + + isx.multicolor_registration( + input_cellset_file1, input_cellset_file2, output_spatial_overlap_csv_file, output_registration_matrix_csv_file, + output_directory, lower_threshold, upper_threshold, + accepted_cells_only, save_matched_cellset, save_unmatched_cellset, save_uncertain_cellset + ) + + # Each cell should match with itself + df = pd.read_csv(output_spatial_overlap_csv_file, index_col=0) + np.testing.assert_equal(df.values.diagonal(), np.array([1., 1., 1., 1.])) + + df = pd.read_csv(output_registration_matrix_csv_file) + for index, row in df.iterrows(): + assert row['primary'] == row['secondary'] + assert row['max_ncc'] == 1. + assert row['match'] == "yes" + + delete_dirs_silently(output_directory) + + + def test_CellsetRegistration_AnalogIdentity(self): + test_dir = test_data_path + '/unit_test/dual_color' + input_cellset_file1 = test_dir + '/cellset_green_dynamic.isxd' + input_cellset_file2 = test_dir + '/cellset_green_dynamic.isxd' + output_directory = test_data_path + '/unit_test/tmp_output_multicolor_reg' + output_spatial_overlap_csv_file = output_directory + '/tmp_output_spatial_overlap.csv' + output_registration_matrix_csv_file = output_directory + '/tmp_output_reg_matrix.csv' + + delete_dirs_silently(output_directory) + os.makedirs(output_directory) + + isx.register_cellsets( + input_cellset_file1, + input_cellset_file2, + output_spatial_overlap_csv_file, + output_registration_matrix_csv_file, + output_directory, + lower_threshold=0.1, + upper_threshold=0.3, + accepted_cells_only=False, + primary_cellset_name="bob", + secondary_cellset_name="joe", + primary_color=0x123456, + secondary_color=0x000099 + ) + + # Each cell should match with itself + df = pd.read_csv(output_spatial_overlap_csv_file, index_col=0) + np.testing.assert_equal(df.values.diagonal(), np.array([1., 1., 1., 1.])) + + df = pd.read_csv(output_registration_matrix_csv_file) + for index, row in df.iterrows(): + assert row['bob'] == row['joe'] + assert row['max_ncc'] == 1. + assert row['match'] == "yes" + + delete_dirs_silently(output_directory) + + def test_CellsetDeconvolve_Denoised(self): + input_raw_cellset_file = test_data_path + '/unit_test/cellset_deconvolve/idps_movie_128x128x1000-CNMFe_OASIS.isxd' + output_denoised_cellset_file = test_data_path + '/unit_test/output/denoised_cellset.isxd' + + expected = test_data_path + '/unit_test/guilded/exp_mosaicCellSetDeconvolveDenoised128x128x1000.isxd' + + delete_files_silently([output_denoised_cellset_file]) + + isx.deconvolve_cellset( + input_raw_cellset_file, + output_denoised_cellset_files=output_denoised_cellset_file, + output_spike_eventset_files=None, + accepted_only=False, + spike_snr_threshold=3.0, + noise_range=(0.25, 0.5), + noise_method='mean', + first_order_ar=True, + lags=5, + fudge_factor=0.96, + deconvolution_method='oasis') + + assert_isxd_cellsets_are_close_by_path(expected, output_denoised_cellset_file, relative_tolerance=1e-5, use_cosine=True) + + delete_files_silently([output_denoised_cellset_file]) + + def test_CellsetDeconvolve_Spikes(self): + input_raw_cellset_file = test_data_path + '/unit_test/cellset_deconvolve/idps_movie_128x128x1000-CNMFe_OASIS.isxd' + output_spike_eventset_file = test_data_path + '/unit_test/output/spike_eventset.isxd' + + expected = test_data_path + '/unit_test/guilded/exp_mosaicCellSetDeconvolveSpikes128x128x1000.isxd' + + delete_files_silently([output_spike_eventset_file]) + + isx.deconvolve_cellset( + input_raw_cellset_file, + output_denoised_cellset_files=None, + output_spike_eventset_files=output_spike_eventset_file, + accepted_only=False, + spike_snr_threshold=3.0, + noise_range=(0.25, 0.5), + noise_method='mean', + first_order_ar=True, + lags=5, + fudge_factor=0.96, + deconvolution_method='oasis') + + assert_isxd_event_sets_are_close_by_path(expected, output_spike_eventset_file, relative_tolerance=1e-4) + + delete_files_silently([output_spike_eventset_file]) + + def test_estimate_vessel_diameter(self): + input_movie_files = [ + test_data_path + "/unit_test/bloodflow/bloodflow_movie_1.isxd", + test_data_path + "/unit_test/bloodflow/bloodflow_movie_2.isxd" + ] + vs_out_files = [ + test_data_path + "/unit_test/output/bloodflow_movie_1_vesselset.isxd", + test_data_path + "/unit_test/output/bloodflow_movie_2_vesselset.isxd" + ] + exp_vs_files = [ + test_data_path + "/unit_test/bloodflow/blood_flow_movie_1-VD_window2s_increment1s.isxd", + test_data_path + "/unit_test/bloodflow/blood_flow_movie_2-VD_window2s_increment1s.isxd" + ] + delete_files_silently(vs_out_files) + + test_contours = [ + [[96, 95], [222, 182]], + [[348, 301], [406, 311]], + [[439, 302], [482, 357]], + [[110, 355], [128, 409]] + ] + + try: + isx.estimate_vessel_diameter( + input_movie_files, + vs_out_files, + test_contours, + time_window=2, + time_increment=1, + output_units="microns", + estimation_method="Parametric FWHM", + auto_accept_reject=False) + except Exception as error: + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error): + return + else: + raise error + + # Test traces + for i in range(2): + assert_isxd_vesselsets_are_close_by_path(exp_vs_files[i], vs_out_files[i]) + + delete_files_silently(vs_out_files) + + def test_estimate_vessel_diameter_non_parametric(self): + input_movie_files = [ + test_data_path + "/unit_test/bloodflow/bloodflow_movie_1.isxd", + test_data_path + "/unit_test/bloodflow/bloodflow_movie_2.isxd" + ] + vs_out_files = [ + test_data_path + "/unit_test/output/bloodflow_movie_1_vesselset.isxd", + test_data_path + "/unit_test/output/bloodflow_movie_2_vesselset.isxd" + ] + exp_vs_files = [ + test_data_path + "/unit_test/bloodflow/blood_flow_movie_1-VD_window2s_increment1s_non_parametric.isxd", + test_data_path + "/unit_test/bloodflow/blood_flow_movie_2-VD_window2s_increment1s_non_parametric.isxd" + ] + delete_files_silently(vs_out_files) + + test_contours = [ + [[96, 95], [222, 182]], + [[348, 301], [406, 311]], + [[439, 302], [482, 357]], + [[110, 355], [128, 409]] + ] + + try: + isx.estimate_vessel_diameter( + input_movie_files, + vs_out_files, + test_contours, + time_window=2, + time_increment=1, + output_units="microns", + estimation_method="Non-Parametric FWHM", + auto_accept_reject=True, + rejection_threshold_fraction=0.2, + rejection_threshold_count=5) + except Exception as error: + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error): + return + else: + raise error + + # Test traces + for i in range(2): + assert_isxd_vesselsets_are_close_by_path(exp_vs_files[i], vs_out_files[i]) + + delete_files_silently(vs_out_files) + + def test_estimate_vessel_diameter_microns_probe_none(self): + input_movie_file = test_data_path + '/unit_test/baseplate/2021-06-28-23-34-09_video_sched_0_probe_none.isxd' + vs_out_file = test_data_path + '/unit_test/output/bloodflow_movie_2_vesselset.isxd' + delete_files_silently([vs_out_file]) + + test_points = np.array([[[0,0],[1,1]],[[100,100],[200,200]],[[4,4],[5,5]]]) + + with pytest.raises(Exception) as error: + isx.estimate_vessel_diameter(input_movie_file, vs_out_file, test_points, 2, 2, "microns") + + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error.value): + return + + assert 'Baseplate type does not support output unit conversion to Microns. Please select "Pixels" as output units.' in str(error.value) + + assert not is_file(vs_out_file) + + def test_estimate_vessel_diameter_microns_probe_custom(self): + input_movie_file = test_data_path + '/unit_test/baseplate/2021-06-28-23-45-49_video_sched_0_probe_custom.isxd' + vs_out_file = test_data_path + '/unit_test/output/bloodflow_movie_1_vesselset.isxd' + delete_files_silently([vs_out_file]) + + test_points = np.array([[[0,0],[1,1]],[[100,100],[200,200]],[[4,4],[5,5]]]) + + with pytest.raises(Exception) as error: + isx.estimate_vessel_diameter(input_movie_file, vs_out_file, test_points, 2, 2, "microns") + + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error.value): + return + + assert 'Baseplate type does not support output unit conversion to Microns. Please select "Pixels" as output units.' in str(error.value) + + assert not is_file(vs_out_file) + + def test_estimate_rbc_velocity(self): + input_movie_files = [ + test_data_path + "/unit_test/bloodflow/rbcv_movie_1-BP.isxd", + test_data_path + "/unit_test/bloodflow/rbcv_movie_2-BP.isxd" + ] + vs_out_files = [ + test_data_path + "/unit_test/output/rbcv_movie_out_vs_1.isxd", + test_data_path + "/unit_test/output/rbcv_movie_out_vs_2.isxd" + ] + exp_vs_files = [ + test_data_path + "/unit_test/bloodflow/rbcv_movie_1-RBCV_microns.isxd", + test_data_path + "/unit_test/bloodflow/rbcv_movie_2-RBCV_microns.isxd" + ] + delete_files_silently(vs_out_files) + + test_contours = [ + [[124, 25], [153, 36], [90, 202], [61, 191]], + [[24, 42], [43, 34], [85, 148], [65, 156]] + ] + + try: + isx.estimate_rbc_velocity( + input_movie_files, + vs_out_files, + test_contours, + time_window=2, + time_increment=1, + output_units="microns", + save_correlation_heatmaps=True) + except Exception as error: + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error): + return + else: + raise error + + # Test traces + for i in range(2): + assert_isxd_vesselsets_are_close_by_path(exp_vs_files[i], vs_out_files[i]) + + delete_files_silently(vs_out_files) + + def test_estimate_rbc_velocity_microns_probe_none(self): + input_movie_file = test_data_path + '/unit_test/baseplate/2021-06-28-23-34-09_video_sched_0_probe_none.isxd' + vs_out_file = test_data_path + '/unit_test/output/microns_probe_none_vesselset.isxd' + delete_files_silently([vs_out_file]) + + test_contours = np.array([ [ [0, 0], [0, 10], [10, 0], [10, 10] ] ]) + + with pytest.raises(Exception) as error: + isx.estimate_rbc_velocity(input_movie_file, vs_out_file, test_contours, 2.5, 1.25, "microns") + + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error.value): + return + + assert 'Baseplate type does not support output unit conversion to Microns. Please select "Pixels" as output units.' in str(error.value) + + assert not is_file(vs_out_file) + + def test_estimate_rbc_velocity_microns_probe_custom(self): + input_movie_file = test_data_path + '/unit_test/baseplate/2021-06-28-23-45-49_video_sched_0_probe_custom.isxd' + vs_out_file = test_data_path + '/unit_test/output/microns_probe_custom_vesselset.isxd' + delete_files_silently([vs_out_file]) + + test_contours = np.array([ [ [0, 0], [0, 10], [10, 0], [10, 10] ] ]) + + with pytest.raises(Exception) as error: + isx.estimate_rbc_velocity(input_movie_file, vs_out_file, test_contours, 2.5, 1.25, "microns") + + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error.value): + return + + assert 'Baseplate type does not support output unit conversion to Microns. Please select "Pixels" as output units.' in str(error.value) + + assert not is_file(vs_out_file) + + def test_create_neural_activity_movie(self): + input_cell_set_files = [ + test_data_path + "/unit_test/cellset_crop/cellset_binary_5x5_3cells.isxd" + ] + output_neural_movie_files = [ + test_data_path + "/unit_test/output/tmp_cellset_binary_5x5_3cells-NA.isxd" + ] + + isx.create_neural_activity_movie( + input_cell_set_files, + output_neural_movie_files, + accepted_cells_only=False) + + expected_movie_path = test_data_path + "/unit_test/create_neural_movie/cellset_binary_5x5_3cells-NA.isxd" + expected_movie = isx.Movie.read(expected_movie_path) + actual_movie = isx.Movie.read(output_neural_movie_files[0]) + + assert_isxd_movies_are_close(expected_movie, actual_movie) + + del expected_movie + del actual_movie + delete_files_silently(output_neural_movie_files) + + def test_interpolate_movie(self): + input_movie_files = [ + test_data_path + "/unit_test/dual_color/DualColorMultiplexingMovie_green1_red4-channel_red-PP_001-TPC.isxd" + ] + output_interpolated_movie_files = [ + test_data_path + "/unit_test/output/DualColorMultiplexingMovie_green1_red4-channel_red-PP_001-TPC-IN.isxd" + ] + delete_files_silently(output_interpolated_movie_files) + + isx.interpolate_movie( + input_movie_files, + output_interpolated_movie_files, + interpolate_dropped=True, + interpolate_blank=True, + max_consecutive_invalid_frames=2) + + expected_movie_path = test_data_path + "/unit_test/dual_color/DualColorMultiplexingMovie_green1_red4-channel_red-PP_001-TPC-IN.isxd" + expected_movie = isx.Movie.read(expected_movie_path) + actual_movie = isx.Movie.read(output_interpolated_movie_files[0]) + + assert_isxd_movies_are_close(expected_movie, actual_movie) + + del expected_movie + del actual_movie + delete_files_silently(output_interpolated_movie_files) + + def test_estimate_vessel_diameter_single_vessel(self): + input_movie_file = test_data_path + "/unit_test/bloodflow/bloodflow_movie_1.isxd" + test_contour = [[96, 95], [222, 182]] + start_frame = 0 + end_frame = 1000 + + try: + line_profile, fit, estimate, line_coords = isx.estimate_vessel_diameter_single_vessel( + input_movie_file, + test_contour, + start_frame, + end_frame, + output_coordinates=True) + except Exception as error: + # Skip test if blood flow features are disabled in this version + if "Blood flow algorithms are not available in this version of the software. Please contact support in order to enable these features." in str(error): + return + else: + raise error + + exp_line_profile = np.array( + [2.07333333, 6.30666667, 4.22 , 0. , 4.77 , 5.39333333, 6.01 , 4.63666667, + 7.05333333, 9.27 , 7.64 , 8.71 , 6.15666667, 8.13333333, 11.42333333, 10.37666667, + 14.42 , 16.68666667, 13.42333333, 13.07666667, 15.63333333, 13.79 , 17.6 , 20.16666667, + 19.90333333, 18.05333333, 20.09666667, 22.09333333, 27.35333333, 26.99666667, 24.11333333, 28.29 , + 30.93 , 32.32666667, 37.73666667, 46.77333333, 58.55666667, 71.56666667, 76.56333333, 84.95 , + 94.81 , 99.30666667, 117.75 , 119.57666667, 124.58 , 135.04666667, 150.82666667, 154.97666667, + 161.90666667, 174.62 , 184.61 , 191.95666667, 203.53 , 210.23666667, 217.59 , 226.18 , + 237.95 , 242.23333333, 254.26333333, 255.51666667, 259.54666667, 256.04 , 261.36666667, 263.47333333, + 268.17 , 264.17333333, 264.65 , 261.53333333, 270.47333333, 265.57333333, 271.27666667, 264.63666667, + 263. , 259.03666667, 259.23333333, 256.96666667, 255.28666667, 253.82 , 247.49666667, 243.03666667, + 243.11666667, 236.26666667, 227.68 , 218.70333333, 213.98 , 202.26666667, 197.24333333, 181.61333333, + 176.42333333, 166.50666667, 158.40333333, 151.28333333, 138.89 , 132.61666667, 114.60333333, 109.48 , + 103.64333333, 88.59666667, 78.71666667, 65.34 , 63.71333333, 51.06666667, 47.50333333, 44.85 , + 49.08 , 40.26666667, 38.83 , 39.94666667, 35.51 , 36.40333333, 40.23666667, 43.46666667, + 37.20333333, 37.07333333, 35.58666667, 39.43666667, 39.5 , 40.32666667, 38.67 , 37.65333333, + 42.38333333, 39.06 , 35.36333333, 34.71333333, 35.58 , 41.67666667, 38.01] + ) + np.testing.assert_allclose(line_profile, exp_line_profile, rtol=1e5) + + exp_line_coords = np.array( + [[ 96, 95],[ 97, 96],[ 98, 96],[ 99, 97],[100, 98],[101, 98],[102, 99],[103, 100],[104, 101],[105, 101],[106, 102], + [107, 103],[108, 103],[109, 104],[110, 105],[111, 105],[112, 106],[113, 107],[114, 107],[115, 108],[116, 109],[117, 109], + [118, 110],[119, 111],[120, 112],[121, 112],[122, 113],[123, 114],[124, 114],[125, 115],[126, 116],[127, 116],[128, 117],[129, 118], + [130, 118],[131, 119],[132, 120],[133, 121],[134, 121],[135, 122],[136, 123],[137, 123],[138, 124],[139, 125],[140, 125],[141, 126], + [142, 127],[143, 127],[144, 128],[145, 129],[146, 130],[147, 130],[148, 131],[149, 132],[150, 132],[151, 133],[152, 134],[153, 134], + [154, 135],[155, 136],[156, 136],[157, 137],[158, 138],[159, 138],[160, 139],[161, 140],[162, 141],[163, 141],[164, 142],[165, 143], + [166, 143],[167, 144],[168, 145],[169, 145],[170, 146],[171, 147],[172, 147],[173, 148],[174, 149],[175, 150],[176, 150],[177, 151], + [178, 152],[179, 152],[180, 153],[181, 154],[182, 154],[183, 155],[184, 156],[185, 156],[186, 157],[187, 158],[188, 159],[189, 159], + [190, 160],[191, 161],[192, 161],[193, 162],[194, 163],[195, 163],[196, 164],[197, 165],[198, 165],[199, 166],[200, 167],[201, 167],[202, 168], + [203, 169],[204, 170],[205, 170],[206, 171],[207, 172],[208, 172],[209, 173],[210, 174],[211, 174],[212, 175],[213, 176],[214, 176],[215, 177], + [216, 178],[217, 179],[218, 179],[219, 180],[220, 181],[221, 181],[222, 182]], + dtype=np.int32 + ) + np.testing.assert_array_equal(line_coords, exp_line_coords) + + exp_fit = { + 'amplitude': 18135.996082935464, + 'fwhm': 39.27577937146833, + 'peak_center': 69.69494166725066 + } + np.testing.assert_allclose(fit['amplitude'], exp_fit['amplitude'], rtol=1e5) + np.testing.assert_allclose(fit['fwhm'], exp_fit['fwhm'], rtol=1e5) + np.testing.assert_allclose(fit['peak_center'], exp_fit['peak_center'], rtol=1e5) + + exp_estimate = { + 'length': 47.35285870356088, + 'center': 84.02773357871772 + } + np.testing.assert_allclose(estimate['length'], exp_estimate['length'], rtol=1e5) + np.testing.assert_allclose(estimate['center'], exp_estimate['center'], rtol=1e5) + + def test_decompress_isxc_movie(self): + input_compressed_file = test_data_path + "/unit_test/compressed/2022-05-12-21-28-30_video_DR_5_OQ_5.isxc" + output_dir = test_data_path + "/unit_test/output/" + + isx.decompress(input_compressed_file, output_dir) + + expected_movie_path = test_data_path + '/unit_test/compressed/2022-05-12-21-28-30_video_DR_5_OQ_5-decompressed.isxd' + actual_movie_path = output_dir + '2022-05-12-21-28-30_video_DR_5_OQ_5-decompressed.isxd' + expected_movie = isx.Movie.read(expected_movie_path) + actual_movie = isx.Movie.read(actual_movie_path) + + assert_isxd_movies_are_close(expected_movie, actual_movie) + + del expected_movie + del actual_movie + + delete_files_silently([actual_movie_path]) + + @pytest.mark.tiff_movie + @pytest.mark.csv_trace + def test_CellSetExporter(self): + unit_test_dir = test_data_path + '/unit_test' + input_cell_set = unit_test_dir + '/eventDetectionCellSet.isxd' + output_dir = unit_test_dir + '/output' + output_trace = output_dir + '/trace_output.csv' + output_image = output_dir + '/image_output.tiff' + output_images = [output_dir + '/image_output_C{}.tiff'.format(i) for i in range(3)] + + delete_files_silently([output_trace] + output_images) + + isx.export_cell_set_to_csv_tiff([input_cell_set], output_trace, output_image, 'start') + + expected_base_path = unit_test_dir + '/guilded/exp_mosaicCellSetExporter_' + assert_csv_traces_are_close_by_path(expected_base_path + 'TraceOutput.csv', output_trace) + expected_images = [expected_base_path + 'ImageOutput-v2_C{}.tiff'.format(i) for i in range(3)] + for exp, act in zip(expected_images, output_images): + assert_tiff_files_equal_by_path(exp, act) + + assert not os.path.exists(output_dir + '/trace_output-props.csv') + + delete_files_silently([output_trace] + output_images) + + @pytest.mark.csv_trace + def test_CellSetExporterWithProps(self): + unit_test_dir = test_data_path + '/unit_test' + test_dir = unit_test_dir + '/cellset_exporter' + input_cell_sets = ['{}/50fr10_l{}-3cells_he-ROI-LCR.isxd'.format(test_dir, i) for i in range(1, 4)] + output_dir = unit_test_dir + '/output' + output_props = output_dir + '/properties.csv' + + delete_files_silently([output_props]) + + isx.export_cell_set_to_csv_tiff(input_cell_sets, '', '', 'start', output_props_file=output_props) + + exp_props = unit_test_dir + '/guilded/exp_CellSetExporterWithProps-v2.csv' + assert_csv_files_are_equal_by_path(exp_props, output_props) + + delete_files_silently([output_props]) + + @pytest.mark.tiff_movie + @pytest.mark.csv_trace + def test_VesselSetExporter(self): + unit_test_dir = test_data_path + '/unit_test' + input_vessel_set = unit_test_dir + '/bloodflow/rbcv_movie_1-RBCV_microns.isxd' + output_dir = unit_test_dir + '/output' + output_trace = output_dir + '/trace_output.csv' + output_line = output_dir + '/line_output.csv' + output_map = output_dir + '/map_output.tiff' + output_heatmaps = output_dir + 'heatmaps' + + delete_files_silently([output_trace, output_line, output_map]) + delete_dirs_silently([output_heatmaps]) + os.makedirs(output_heatmaps) + + isx.export_vessel_set_to_csv_tiff([input_vessel_set], + output_trace_csv_file=output_trace, + output_line_csv_file=output_line, + output_map_tiff_file=output_map, + output_heatmaps_tiff_dir=output_heatmaps, + time_ref='start') + + exp_trace_data = [ + 'Vessel ID,Vessel Status,Index,Time (s),Velocity (um/s),Direction (deg),Clipping Error,Direction Change Error,No Significant Pixels Error,Invalid Frame Error\n', + 'V0,undecided,0,1,2800.791,248.087,False,False,False,False\n', + 'V0,undecided,1,2,3007.701,247.697,False,False,False,False\n', + 'V0,undecided,2,3,3026.005,247.423,False,False,False,False\n', + 'V0,undecided,3,4,3320.473,247.733,False,False,False,False\n', + 'V0,undecided,4,5,2187.080,246.729,False,False,False,False\n', + 'V0,undecided,5,6,nan,nan,False,False,False,True\n', + 'V1,undecided,0,1,640.503,112.884,False,False,False,False\n', + 'V1,undecided,1,2,587.908,113.486,False,False,False,False\n', + 'V1,undecided,2,3,611.888,114.251,False,False,False,False\n', + 'V1,undecided,3,4,610.198,113.903,False,False,False,False\n', + 'V1,undecided,4,5,539.932,112.015,False,False,False,False\n', + 'V1,undecided,5,6,nan,nan,False,False,False,True\n' + ] + with open(output_trace, 'r') as f: + trace_data = f.read() + assert trace_data == ''.join(exp_trace_data) + + exp_line_data = [ + 'Name,Status,ColorR,ColorG,ColorB,PointX0,PointY0,PointX1,PointY1,PointX2,PointY2,PointX3,PointY3,Max Velocity(um/s)\n', + 'V0,undecided,255,255,255,124,25,153,36,90,202,61,191,6882.64\n', + 'V1,undecided,255,255,255,24,42,43,34,85,148,65,156,4709.46\n' + ] + with open(output_line, 'r') as f: + line_data = f.read() + assert line_data == ''.join(exp_line_data) + + assert os.path.exists(output_map) + + num_vessels = 2 + assert len(os.listdir(output_heatmaps)) == num_vessels + + delete_files_silently([output_trace, output_line, output_map]) + delete_dirs_silently([output_heatmaps]) + + + @pytest.mark.tiff_movie + @pytest.mark.csv_trace + def test_VesselSetExporterNoOutputs(self): + unit_test_dir = test_data_path + '/unit_test' + input_vessel_set = unit_test_dir + '/bloodflow/rbcv_movie_1-RBCV_microns.isxd' + + with pytest.raises(ValueError) as e: + isx.export_vessel_set_to_csv_tiff([input_vessel_set]) + + @pytest.mark.json_cell_contours + def test_ExportCellContours(self): + cell_set_file = test_data_path + '/unit_test/cell_metrics/cell_metrics_movie-PCA-ICA.isxd' + + output = test_data_path + '/unit_test/output/cell_contours_3cells_python.json' + expected = test_data_path + '/unit_test/cell_metrics/expected_cell_contours_3cells_python-v2.json' + + delete_files_silently([output]) + + isx.export_cell_contours(cell_set_file, output, threshold=0.0, rectify_first=True) + + assert_json_files_equal_by_path(expected, output) + + delete_files_silently([output]) diff --git a/isx/test/test_core.py b/isx/test/test_core.py new file mode 100644 index 0000000..a1efb6f --- /dev/null +++ b/isx/test/test_core.py @@ -0,0 +1,153 @@ +# This tests some core functionality related to timing and spacing. + +import datetime +import numpy as np +import pytest + +import isx + +class TestCore: + def test_version(self): + assert isx.__version__ + + @pytest.mark.duration + def test_duration_from_to_secs(self): + secs = 79 + duration = isx.Duration.from_secs(secs) + assert duration.to_secs() == secs + assert duration._impl.den == 1 + + @pytest.mark.duration + def test_duration_from_to_msecs(self): + msecs = 5237 + duration = isx.Duration.from_msecs(msecs) + assert duration.to_msecs() == msecs + assert duration._impl.den == 1e3 + + @pytest.mark.duration + def test_duration_from_to_usecs(self): + usecs = 123920 + duration = isx.Duration.from_usecs(usecs) + assert duration.to_usecs() == usecs + assert duration._impl.den == 1e6 + + @pytest.mark.duration + def test_duration_from_secs_float(self): + secs = 0.09364790469408035 + duration = isx.Duration._from_secs_float(secs, max_denominator=1000000000) + assert duration._impl.num == 12569209 + assert duration._impl.den == 134217728 + + simplified_duration = isx.Duration._from_secs_float(secs, max_denominator=100) + assert simplified_duration._impl.num == 3 + assert simplified_duration._impl.den == 32 + + @pytest.mark.duration + def test_duration_str_valid(self): + duration = isx.Duration.from_msecs(50) + assert isinstance(str(duration), str) + + @pytest.mark.duration + def test_duration_str_invalid(self): + duration = isx.Duration() + assert isinstance(str(duration), str) + + @pytest.mark.time + def test_time_from_to_secs_since_epoch(self): + secs_since_epoch = isx.Duration.from_secs(1523469679) + time = isx.Time._from_secs_since_epoch(secs_since_epoch) + assert time._to_secs_since_epoch() == secs_since_epoch + exp_datetime = datetime.datetime(2018, 4, 11, 18, 1, 19) + assert time.to_datetime() == exp_datetime + + @pytest.mark.time + def test_time_from_msecs_since_epoch(self): + secs_since_epoch = isx.Duration.from_msecs(1523469679261) + time = isx.Time._from_secs_since_epoch(secs_since_epoch) + assert time._to_secs_since_epoch() == secs_since_epoch + exp_datetime = datetime.datetime(2018, 4, 11, 18, 1, 19, 261000) + assert time.to_datetime() == exp_datetime + + @pytest.mark.time + def test_time_str_valid(self): + time = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1523469679261)) + assert isinstance(str(time), str) + + @pytest.mark.time + def test_time_str_valid(self): + time = isx.Time() + assert isinstance(str(time), str) + + @pytest.mark.spacing + def test_spacing_get_num_pixels(self): + num_pixels = (4, 7) + spacing = isx.Spacing(num_pixels=num_pixels) + assert spacing.num_pixels == num_pixels + + @pytest.mark.spacing + def test_spacing_str_valid(self): + spacing = isx.Spacing(num_pixels=(1080, 1440)) + assert isinstance(str(spacing), str) + + @pytest.mark.spacing + def test_spacing_str_invalid(self): + spacing = isx.Spacing() + assert isinstance(str(spacing), str) + + @pytest.mark.timing + def test_timing_get_offsets_since_start(self): + num_samples = 10 + period = isx.Duration.from_msecs(26) + timing = isx.Timing(num_samples=num_samples, period=period) + + # Note that the first offset is 0/1, so if we fill it with 0/1000 + # our strict equality check will fail. + exp_offsets = [isx.Duration()] + for i in range(1, num_samples): + exp_offsets.append(isx.Duration.from_msecs(i * 26)) + + act_offsets = timing.get_offsets_since_start() + for i in range(num_samples): + assert act_offsets[i] == exp_offsets[i] + + @pytest.mark.timing + def test_timing_get_valid_samples(self): + num_samples = 10 + timing = isx.Timing(num_samples=num_samples) + np.testing.assert_array_equal(timing.get_valid_samples(), range(num_samples)) + + @pytest.mark.timing + def test_timing_get_valid_samples_with_dropped(self): + num_samples = 10 + dropped = [2, 4, 5, 6] + timing = isx.Timing(num_samples=num_samples, dropped=dropped) + np.testing.assert_array_equal(timing.dropped, dropped) + np.testing.assert_array_equal(timing.get_valid_samples(), [0, 1, 3, 7, 8, 9]) + + @pytest.mark.timing + def test_timing_get_valid_samples_with_cropped(self): + num_samples = 10 + cropped = [[2, 3], [6, 8]] + timing = isx.Timing(num_samples=num_samples, cropped=cropped) + np.testing.assert_array_equal(timing.cropped, cropped) + np.testing.assert_array_equal(timing.get_valid_samples(), [0, 1, 4, 5, 9]) + + @pytest.mark.timing + def test_timing_get_valid_samples_with_dropped_and_cropped(self): + num_samples = 10 + dropped = [2, 4] + cropped = [[6, 8]] + timing = isx.Timing(num_samples=num_samples, dropped=dropped, cropped=cropped) + np.testing.assert_array_equal(timing.dropped, dropped) + np.testing.assert_array_equal(timing.cropped, cropped) + np.testing.assert_array_equal(timing.get_valid_samples(), [0, 1, 3, 5, 9]) + + @pytest.mark.timing + def test_timing_str_valid(self): + timing = isx.Timing(num_samples=18, dropped=[2, 4], cropped=[[5, 6], [9, 11]]) + assert isinstance(str(timing), str) + + @pytest.mark.timing + def test_timing_str_invalid(self): + timing = isx.Timing() + assert isinstance(str(timing), str) diff --git a/isx/test/test_file_io.py b/isx/test/test_file_io.py new file mode 100644 index 0000000..64cfb1b --- /dev/null +++ b/isx/test/test_file_io.py @@ -0,0 +1,2883 @@ +from test.utilities.setup import delete_files_silently, delete_dirs_silently, test_data_path, is_file +import operator, shutil, os, platform + +import h5py +import numpy as np +import pandas as pd +import pytest + +import isx + +from test.utilities.create_sample_data import write_sample_cellset, write_sample_vessel_diameter_set, write_sample_rbc_velocity_set +from test.asserts import assert_json_files_equal_by_path, assert_tiff_files_equal_by_path, \ + assert_csv_traces_are_close_by_path, compare_h5_groups, assert_csv_events_are_equal_by_path, \ + assert_csv_files_are_equal_by_path, assert_isxd_images_are_close_by_path_nan_zero + +data_types = ('float16', 'float32', 'float64', 'uint8', 'uint16', 'uint32', 'uint64', 'int16', 'int32', 'int64') + +class TestFileIO: + def test_ReadCellSet(self): + input_file = test_data_path + '/unit_test/eventDetectionCellSet.isxd' + cellset = isx.CellSet.read(input_file) + + assert cellset.get_cell_name(0) == 'C0' + assert cellset.get_cell_name(1) == 'C1' + assert cellset.get_cell_name(2) == 'C2' + + assert cellset.num_cells == 3 + + exp_period = isx.Duration._from_num_den(1, 10) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_secs(20)) + exp_spacing = isx.Spacing(num_pixels=(200,200)) + exp_spacing._impl.pixel_width = isx._internal.IsxRatio(22, 10) + exp_spacing._impl.pixel_height = isx._internal.IsxRatio(22, 10) + + assert cellset.timing == isx.Timing(num_samples=50, period=exp_period, start=exp_start) + assert cellset.spacing == exp_spacing + + def test_ReadCellSetName(self): + input_file = test_data_path + '/unit_test/eventDetectionCellSet.isxd' + cellset = isx.CellSet.read(input_file) + + name = cellset.get_cell_name(1) + + assert name == 'C1' + + def test_read_cell_set_with_dropped_cropped(self): + cellset = isx.CellSet.read(test_data_path + '/unit_test/cropped/Trimmed-ROI.isxd') + + exp_period = isx.Duration._from_num_den(100, 1500) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1478271283662)) + exp_timing = isx.Timing(num_samples=40, period=exp_period, start=exp_start, + dropped=[10], cropped=[[2, 6], [12, 12], [19, 34]]) + + assert cellset.timing == exp_timing + + def test_ReadWriteCellSet(self): + cs_out_file = test_data_path + '/unit_test/output/test_readwrite_cellset.isxd' + delete_files_silently([cs_out_file]) + + # create sample data that will be used to make the cell set + cell_props = write_sample_cellset(cs_out_file) + + # read the created cell set file and confirm the correct values have been written + cs_in = isx.CellSet.read(cs_out_file) + assert cs_in.num_cells == cell_props['num_cells'] + assert cs_in.spacing == cell_props['spacing'] + assert cs_in.timing == cell_props['timing'] + valid_samples_mask = cs_in.timing.get_valid_samples_mask() + for k in range(cs_in.num_cells): + np.testing.assert_array_equal(cs_in.get_cell_image_data(k), cell_props['images'][k, :, :]) + trace = cs_in.get_cell_trace_data(k) + exp_trace = np.where(valid_samples_mask, cell_props['traces'][k, :], np.nan) + np.testing.assert_array_equal(trace, exp_trace) + assert cs_in.get_cell_name(k) == cell_props['names'][k] + assert cs_in.get_cell_status(k) == 'undecided' + + del cs_in + delete_files_silently([cs_out_file]) + + + @pytest.mark.cellset + @pytest.mark.parametrize('image_type', data_types) + @pytest.mark.parametrize('traces_type', data_types) + def test_ReadWriteCellSetOtherTypeToFloat32(self, traces_type, image_type): + cs_out_file = test_data_path + '/unit_test/output/test_readwrite_cellset.isxd' + delete_files_silently([cs_out_file]) + + num_cells = 2 + num_pixels = (4, 3) + num_samples = 5 + timing = isx.Timing(num_samples=num_samples) + spacing = isx.Spacing(num_pixels=num_pixels) + + cs_out = isx.CellSet.write(cs_out_file, timing, spacing) + images = np.random.rand(num_cells, *num_pixels).astype(image_type) + traces = np.random.rand(num_cells, num_samples).astype(traces_type) + for c in range(num_cells): + + if image_type != 'float32': + if traces_type != 'float32': + with pytest.warns(UserWarning) as warnings: + cs_out.set_cell_data(c, images[c, :, :], traces[c, :], 'C{:02d}'.format(c)) + assert 'Converting from {} to float32.'.format(image_type) in [str(x.message) for x in warnings] + assert 'Converting from {} to float32.'.format(traces_type) in [str(x.message) for x in warnings] + else: + with pytest.warns(UserWarning) as warnings: + cs_out.set_cell_data(c, images[c, :, :], traces[c, :], 'C{:02d}'.format(c)) + assert 'Converting from {} to float32.'.format(image_type) in [str(x.message) for x in warnings] + else: + if traces_type != 'float32': + with pytest.warns(UserWarning) as warnings: + cs_out.set_cell_data(c, images[c, :, :], traces[c, :], 'C{:02d}'.format(c)) + assert 'Converting from {} to float32.'.format(traces_type) in [str(x.message) for x in warnings] + else: + with pytest.warns(None) as warnings: + cs_out.set_cell_data(c, images[c, :, :], traces[c, :], 'C{:02d}'.format(c)) + assert not [str(x.message) for x in warnings.list] + + cs_out.flush() + + # read the created cell set file and confirm the correct values have been written + cs_in = isx.CellSet.read(cs_out_file) + for c in range(cs_in.num_cells): + np.testing.assert_array_almost_equal(cs_in.get_cell_image_data(c), images[c, :, :]) + np.testing.assert_array_almost_equal(cs_in.get_cell_trace_data(c), traces[c, :]) + + del cs_in + del cs_out + delete_files_silently([cs_out_file]) + + def test_ReadWriteCellSetStatus(self): + """ Test writing a cell set, then reading it and setting the status, then reading it again. """ + + cs_out_file = test_data_path + '/unit_test/output/test_readwrite_cellset.isxd' + delete_files_silently([cs_out_file]) + + # create sample data that will be used to make the cell set + cell_props = write_sample_cellset(cs_out_file) + + # read the created cell set file and set the new status values + cs_in = isx.CellSet.read(cs_out_file, read_only=False) + + statuses = ['accepted', 'rejected'] + written_status = list() + for k in range(cs_in.num_cells): + new_stat = statuses[int(np.random.rand() > 0.50)] + cs_in.set_cell_status(k, new_stat) + written_status.append(new_stat) + + # re-read the created cell set file and check the status values + cs_in = isx.CellSet.read(cs_out_file) + for k in range(cs_in.num_cells): + assert cs_in.get_cell_status(k) == written_status[k], "Cell status does not match" + + del cs_in + delete_files_silently([cs_out_file]) + + def test_GetCell(self): + input_file = test_data_path + '/unit_test/eventDetectionCellSet.isxd' + cellset = isx.CellSet.read(input_file) + + trace = cellset.get_cell_trace_data(0) + image = cellset.get_cell_image_data(0) + status = cellset.get_cell_status(0) + + assert trace.shape == (cellset.timing.num_samples,) + assert image.shape == (200, 200) + assert status == 'undecided' + + + def test_CellSetStrValid(self): + cell_set = isx.CellSet.read(test_data_path + '/unit_test/eventDetectionCellSet.isxd') + assert isinstance(str(cell_set), str) + + + def test_CellSetStrInvalid(self): + cell_set = isx.CellSet() + assert isinstance(str(cell_set), str) + + + def test_ReadImage(self): + file_path = test_data_path + '/unit_test/single_10x10_frameMovie.isxd' + image = isx.Image.read(file_path) + assert image.mode == 'r' + assert image.data_type == np.uint16 + exp_spacing = isx.Spacing(num_pixels=(10, 10)) + exp_spacing._impl.pixel_width = isx._internal.IsxRatio(22, 10) + exp_spacing._impl.pixel_height = isx._internal.IsxRatio(22, 10) + assert image.spacing == exp_spacing + assert image.file_path == file_path + image_data = image.get_data() + assert image_data.dtype == image.data_type + assert image_data.shape == image.spacing.num_pixels + mask = np.zeros(image_data.shape, dtype=bool) + mask[3:7, 3:7] = True + np.testing.assert_array_equal(image_data, np.where(mask, 1, 0)) + + def test_WriteImageU16(self): + file_path = test_data_path + '/unit_test/output/image-u16.isxd' + delete_files_silently([file_path]) + spacing = isx.Spacing(num_pixels=(5, 10)) + data_type = np.uint16 + data = np.random.randint(low=0, high=4095, size=spacing.num_pixels, dtype=data_type) + image = isx.Image.write(file_path, spacing, data_type, data) + assert image.mode == 'w' + assert image.data_type == data_type + assert image.spacing == spacing + assert image.file_path == file_path + np.testing.assert_array_equal(image.get_data(), data) + + image = isx.Image.read(file_path) + assert image.mode == 'r' + assert image.data_type == data_type + assert image.spacing == spacing + assert image.file_path == file_path + np.testing.assert_array_equal(image.get_data(), data) + + del image + delete_files_silently([file_path]) + + def test_WriteImageF32(self): + file_path = test_data_path + '/unit_test/output/image-f32.isxd' + spacing = isx.Spacing(num_pixels=(6, 7)) + data_type = np.float32 + data = np.random.random(spacing.num_pixels).astype(data_type) + + delete_files_silently([file_path]) + image = isx.Image.write(file_path, spacing, data_type, data) + assert image.mode == 'w' + assert image.data_type == data_type + assert image.spacing == spacing + assert image.file_path == file_path + np.testing.assert_array_equal(image.get_data(), data) + + image = isx.Image.read(file_path) + assert image.mode == 'r' + assert image.data_type == data_type + assert image.spacing == spacing + assert image.file_path == file_path + np.testing.assert_array_equal(image.get_data(), data) + + del image + delete_files_silently([file_path]) + + + @pytest.mark.image + @pytest.mark.parametrize('image_data_type', data_types) + @pytest.mark.parametrize('container_data_type', ('uint16', 'float32')) + def test_ReadWriteImageOtherTypeToFloat32(self, image_data_type, container_data_type): + image_file = test_data_path + '/unit_test/output/test_readwrite_image.isxd' + spacing = isx.Spacing(num_pixels=(4, 5)) + data = np.random.random(spacing.num_pixels).astype(image_data_type) + + delete_files_silently([image_file]) + if image_data_type != container_data_type: + with pytest.warns(UserWarning) as warnings: + image = isx.Image.write(image_file, spacing, np.__getattribute__(container_data_type), data) + assert 'Converting from {0} to {1}.'.format(image_data_type, container_data_type) in [str(x.message) for x in warnings] + else: + with pytest.warns(None) as warnings: + image = isx.Image.write(image_file, spacing, np.__getattribute__(container_data_type), data) + assert not [str(x.message) for x in warnings.list] + + np.testing.assert_array_equal(image.get_data(), data.astype(container_data_type)) + + # read the created image and confirm the correct values have been written + image = isx.Image.read(image_file) + np.testing.assert_array_equal(image.get_data(), data.astype(container_data_type)) + + del image + delete_files_silently([image_file]) + + + def test_ImageStrValid(self): + image = isx.Image.read(test_data_path + '/unit_test/single_10x10_frameMovie.isxd') + assert isinstance(str(image), str) + + + def test_ImageStrInvalid(self): + image = isx.Image() + assert isinstance(str(image), str) + + + def test_ReadMovie(self): + input_path = test_data_path + '/unit_test/recording_20160426_145041.xml' + + movie = isx.Movie.read(input_path) + + assert movie.spacing == isx.Spacing(num_pixels=(500, 500)) + exp_period = isx.Duration._from_num_den(1000, 10020) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1461682241930)) + assert movie.timing == isx.Timing(num_samples=33, period=exp_period, start=exp_start) + + def test_ReadMovieF32(self): + input_path = test_data_path + '/unit_test/guilded/exp_mosaicReadMovieF32.isxd' + + movie = isx.Movie.read(input_path) + + exp_spacing = isx.Spacing(num_pixels=(3, 4)) + exp_spacing._impl.pixel_width = isx._internal.IsxRatio(22, 10) + exp_spacing._impl.pixel_height = isx._internal.IsxRatio(22, 10) + assert movie.spacing == exp_spacing + exp_period = isx.Duration.from_msecs(20) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration._from_num_den(1, 100)) + assert movie.timing == isx.Timing(num_samples=3, period=exp_period, start=exp_start) + assert movie.data_type == np.float32 + + f0 = movie.get_frame_data(0) + exp_f0 = np.linspace(-1, 1, 12).reshape(movie.spacing.num_pixels) + assert np.max(np.abs(f0 - exp_f0)) < 1e-6 + + f1 = movie.get_frame_data(1) + exp_f1 = np.linspace(-1, 1, 12)[::-1].reshape(movie.spacing.num_pixels) + assert np.max(np.abs(f1 - exp_f1)) < 1e-6 + + f2 = movie.get_frame_data(2) + exp_f2 = np.linspace(0, 1, 12).reshape(movie.spacing.num_pixels) + assert np.max(np.abs(f2 - exp_f2)) < 1e-6 + + def test_ReadMovieU16(self): + input_path = test_data_path + '/unit_test/guilded/exp_mosaicReadMovieU16.isxd' + + movie = isx.Movie.read(input_path) + + exp_spacing = isx.Spacing(num_pixels=(3, 4)) + exp_spacing._impl.pixel_width = isx._internal.IsxRatio(22, 10) + exp_spacing._impl.pixel_height = isx._internal.IsxRatio(22, 10) + assert movie.spacing == exp_spacing + exp_period = isx.Duration.from_msecs(20) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration._from_num_den(1, 100)) + assert movie.timing == isx.Timing(num_samples=3, period=exp_period, start=exp_start) + assert movie.data_type == np.uint16 + + f0 = movie.get_frame_data(0) + exp_f0 = np.arange(1, 13, dtype='uint16').reshape(movie.spacing.num_pixels) + assert np.max(np.abs(f0 - exp_f0)) < 1e-6 + + f1 = movie.get_frame_data(1) + exp_f1 = np.arange(1, 13, dtype='uint16')[::-1].reshape(movie.spacing.num_pixels) + assert np.max(np.abs(f1 - exp_f1)) < 1e-6 + + f2 = movie.get_frame_data(2) + exp_f2 = np.array([1, 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31]).reshape(movie.spacing.num_pixels) + assert np.max(np.abs(f2 - exp_f2)) < 1e-6 + + def test_ReadMovieU16XmlTiff(self): + input_path = test_data_path + '/unit_test/recording_20161104_145443.xml'; + + movie = isx.Movie.read(input_path) + + assert movie.spacing == isx.Spacing(num_pixels=(1080, 1440)) + exp_period = isx.Duration._from_num_den(1000, 15000) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1478271283662)) + assert movie.timing == isx.Timing(num_samples=40, period=exp_period, start=exp_start, dropped=[10]) + assert movie.data_type == np.uint16 + + f0 = movie.get_frame_data(0) + assert f0[0, 0] == 242 + assert f0[378, 654] == 2468 + assert f0[861, 1143] == 472 + + + def test_ReadMovieU16XmlHdf5(self): + input_path = test_data_path + '/unit_test/recording_20140729_145048.xml' + + movie = isx.Movie.read(input_path) + + exp_period = isx.Duration._from_num_den(45, 1000); + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_secs(1406670648)); + exp_dropped = [20, 30, 31, 32, 33, 34] + assert movie.timing == isx.Timing(num_samples=66, period=exp_period, start=exp_start, dropped=exp_dropped); + assert movie.data_type == np.uint16 + + frame0_data = movie.get_frame_data(0) + np.testing.assert_array_equal(frame0_data, 4094 * np.ones(frame0_data.shape, dtype=np.uint16)) + + + def test_ReadMovieU16XmlHdf5s(self): + input_path = test_data_path + '/unit_test/recording_20160706_132714.xml' + + movie = isx.Movie.read(input_path) + + exp_period = isx.Duration._from_num_den(1000, 10010); + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1467811643999)); + assert movie.timing == isx.Timing(num_samples=82, period=exp_period, start=exp_start); + assert movie.data_type == np.uint16 + + f0 = movie.get_frame_data(0) + assert f0[0, 0] == 1416 + assert f0[182, 132] == 2123 + assert f0[396, 435] == 2283 + + + def test_ReadMovieWithDroppedCropped(self): + input_path = test_data_path + '/unit_test/cropped/recording_20161104_145443-TPC.isxd'; + + movie = isx.Movie.read(input_path) + + exp_period = isx.Duration._from_num_den(100, 1500) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1478271283662)) + exp_timing = isx.Timing(num_samples=40, period=exp_period, start=exp_start, + dropped=[10], cropped=[[2, 6], [12, 12], [19, 34]]) + + assert movie.timing == exp_timing + + def test_ReadMovieNoFrameTimestamps(self): + input_path = test_data_path + '/unit_test/guilded/exp_mosaicReadMovieF32.isxd' + + movie = isx.Movie.read(input_path) + + with pytest.raises(Exception) as error: + movie.get_frame_timestamp(0) + + assert 'No frame timestamps stored in movie.' in str(error.value) + + def test_ReadMovieFrameTimestamps(self): + input_path = test_data_path + '/unit_test/baseplate/2021-06-14-13-30-29_video_green.isxd' + + movie = isx.Movie.read(input_path) + + exp_first_timestamp = 2845042412112 + exp_last_timestamp = 2845044110777 + + assert movie.get_frame_timestamp(0) == exp_first_timestamp + assert movie.get_frame_timestamp(movie.timing.num_samples - 1) == exp_last_timestamp + + def test_ReadNVisionMovie(self): + input_path = test_data_path + '/unit_test/nVision/20220401-022845-KTM-RQEHB_10_secs.isxb' + movie = isx.Movie.read(input_path) + + # verify file path + assert movie.file_path == input_path + + # verify timing info + exp_period = isx.Duration._from_num_den(9968014, 299000000) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1648798026332), 25200) + exp_num_samples = 300 + exp_timing = isx.Timing(num_samples=exp_num_samples, period=exp_period, start=exp_start) + assert movie.timing == exp_timing + + # verify spacing info + exp_spacing = isx.Spacing(num_pixels=(720, 1280)) + assert movie.spacing == exp_spacing + + # verify data type + exp_data_type = np.uint8 + assert movie.data_type == exp_data_type + + # verify frame data by computing sum of all frames in movie + # Results of codec are slightly different between windows and linux/mac, but images look very similar + if platform.system() == "Windows": + exp_frame_sum = 11687268770 + else: + exp_frame_sum = 11687253109 + frame_sum = 0 + for i in range(movie.timing.num_samples): + frame = movie.get_frame_data(i).astype(np.uint64) + frame_sum += np.sum(frame, dtype=np.uint64) + assert frame_sum == exp_frame_sum + + # verify first and last tsc values + exp_start_tsc = 215738669569 + exp_last_tsc = 215748637583 + assert movie.get_frame_timestamp(0) == exp_start_tsc + assert movie.get_frame_timestamp(movie.timing.num_samples - 1) == exp_last_tsc + + # verify acquisition info + exp_acquistion_info = { + 'Animal Date of Birth': '', 'Animal Description': '', 'Animal ID': '', 'Animal Sex': 'm', 'Animal Species': '', 'Animal Weight': 0, + 'Camera Brightness': 0, 'Camera Contrast': 32, 'Camera Gain': 0, 'Camera Name': 'camera-1', 'Camera Saturation': 64, 'Camera Serial Number': 'KTM-RQEHB', + 'Miniscope Paired': False + } + assert movie.get_acquisition_info() == exp_acquistion_info + + def test_ReadNVisionMovieOutOfBounds(self): + input_path = test_data_path + '/unit_test/nVision/20220401-022845-KTM-RQEHB_10_secs.isxb' + movie = isx.Movie.read(input_path) + + with pytest.raises(Exception) as error: + movie.get_frame_data(movie.timing.num_samples) + assert 'Failed to read frame from file. Index is out of bounds.' in str(error.value) + + with pytest.raises(Exception) as error: + movie.get_frame_timestamp(movie.timing.num_samples) + assert 'Failed to read frame timestamp from file. Index is out of bounds.' in str(error.value) + + def test_ReadNVisionMovieDropped(self): + input_path = test_data_path + '/unit_test/nVision/2022-04-18-21-48-13-camera-1_dropped.isxb' + movie = isx.Movie.read(input_path) + + # verify timing info + exp_period = isx.Duration._from_num_den(37712004, 1131000000) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1650343693459), 25200) + exp_num_samples = 1132 + exp_dropped = [ + 186, 188, 189, 190, 192, 193, 194, 196, 197, 198, 200, 201, 202, 204, 205, + 206, 208, 209, 210, 212, 213, 214, 216, 217, 218, 220, 221, 223, 224, 226, + 227, 228, 230, 231, 232, 234, 235, 237, 238, 239, 241, 242, 244, 245, 247, + 248, 249, 251, 252, 254, 255, 256, 258, 259, 260, 262, 263, 265, 266, 267, + 269, 270, 271, 273, 274, 276, 277, 278, 280, 281, 283, 284, 286, 287, 288, + 290, 291, 293, 294, 295, 297, 298, 300, 301, 303, 304, 305, 307, 308, 309, + 311, 313, 314, 315, 317, 318, 319, 321, 322, 324, 325, 326, 328, 329, 331, + 332, 334, 335, 336, 338, 339, 340, 342, 343, 345, 346, 347, 349, 350, 352, + 353, 354, 355, 357, 358, 359, 361, 362, 364, 365, 366, 368, 369, 370, 372, + 373, 375, 376, 378, 379, 380, 382, 383, 385, 386, 387, 389, 390, 392, 393, + 394, 396, 397, 398, 400, 401, 403, 404, 405, 407, 408, 410, 411, 412, 414, + 415, 417, 418, 419, 421, 422, 423, 425, 426, 428, 429, 430, 432, 433, 435, + 436, 438, 439, 440, 442, 443, 444, 446, 447, 449, 450, 451, 453, 454, 455, + 457, 458, 459, 461, 462, 464, 465, 466, 468, 470, 471, 472, 474, 475, 477, + 478, 479, 481, 482, 484, 485, 486, 488, 489, 491, 492, 494, 495, 496, 498, + 499, 500, 502, 503, 505, 506, 507, 509, 510, 512, 513, 514, 516, 517, 519, + 520, 521, 523, 524, 526, 527, 529, 530, 531, 533, 534, 535, 537, 538 + ] + exp_timing = isx.Timing(num_samples=exp_num_samples, period=exp_period, start=exp_start, dropped=exp_dropped) + assert movie.timing == exp_timing + + # verify frame data by computing sum of a subset of frames in the movie + # Results of codec are slightly different between windows and linux/mac, but images look very similar + if platform.system() == "Windows": + exp_frame_sum = 1100566302 + else: + exp_frame_sum = 1100566002 + frame_sum = 0 + start_frame = 180 + num_frames = 10 + for i in range(start_frame, start_frame + num_frames): + frame = movie.get_frame_data(i).astype(np.uint64) + frame_sum += np.sum(frame, dtype=np.uint64) + assert frame_sum == exp_frame_sum + + # verify tsc values and check the value is zero for dropped frames + exp_start_tsc = 38971101006 + exp_last_tsc = 39008813010 + assert movie.get_frame_timestamp(0) == exp_start_tsc + assert movie.get_frame_timestamp(movie.timing.num_samples - 1) == exp_last_tsc + for i in exp_dropped: + assert movie.get_frame_timestamp(i) == 0 + + def test_WriteNVisionMovie(self): + input_path = test_data_path + '/unit_test/nVision/test.isxb' + + with pytest.raises(Exception) as error: + movie = isx.Movie.write(input_path, isx.Timing(), isx.Spacing(), np.float32) + + assert 'Cannot write isxb movies.' in str(error.value) + + def test_MovieStrValid(self): + movie = isx.Movie.read(test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd') + assert isinstance(str(movie), str) + + + def test_MovieStrInvalid(self): + movie = isx.Movie() + assert isinstance(str(movie), str) + + + def test_WriteMovieU16(self): + output_path = test_data_path + '/unit_test/output/test_write_outputU16.isxd' + delete_files_silently([output_path]) + + start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(146776827382931)) + timing = isx.Timing(num_samples=9, period=isx.Duration.from_msecs(50), start=start, dropped=[2, 3, 5]) + spacing = isx.Spacing(num_pixels=(3, 3)) + data_type = np.uint16 + frames = np.random.randint(low=0, high=4095, size=[*spacing.num_pixels, timing.num_samples], dtype=data_type) + + movie = isx.Movie.write(output_path, timing, spacing, data_type=data_type) + for i in timing.get_valid_samples(): + movie.set_frame_data(i, frames[:, :, i]) + movie.flush() + + movie = isx.Movie.read(output_path) + + assert movie.spacing == spacing + assert movie.timing == timing + assert movie.data_type == np.uint16 + + for i in timing.get_valid_samples(): + np.testing.assert_array_equal(movie.get_frame_data(i), frames[:, :, i]) + + del movie + delete_files_silently([output_path]) + + def test_WriteMovieF32(self): + output_path = test_data_path + '/unit_test/output/test_write_outputF32.isxd' + delete_files_silently([output_path]) + + start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1467768443283)) + timing = isx.Timing(num_samples=12, period=isx.Duration.from_msecs(50), start=start, + dropped=[1, 11], cropped=[[3, 5], [8, 9]]) + spacing = isx.Spacing(num_pixels=(32, 57)) + data_type = np.float32 + frames = np.random.randn(spacing.num_pixels[0], spacing.num_pixels[1], timing.num_samples).astype(data_type) + + movie = isx.Movie.write(output_path, timing, spacing, data_type=data_type) + for k in timing.get_valid_samples(): + movie.set_frame_data(k, frames[:, :, k]) + movie.flush() + + movie = isx.Movie.read(output_path) + + assert movie.timing == timing + assert movie.spacing == spacing + assert movie.data_type == np.float32 + + for k in timing.get_valid_samples(): + np.testing.assert_array_equal(movie.get_frame_data(k), frames[:, :, k]) + + del movie + delete_files_silently([output_path]) + + @pytest.mark.movie + @pytest.mark.parametrize('frame_data_type', data_types) + @pytest.mark.parametrize('movie_data_type', ('uint16', 'float32')) + def test_WriteMovieOtherTypeToFloat32(self, frame_data_type, movie_data_type): + output_path = test_data_path + '/unit_test/output/test_write_outputF32.isxd' + delete_files_silently([output_path]) + + start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1467768443282)) + timing = isx.Timing(num_samples=5, period=isx.Duration.from_msecs(52), start=start) + spacing = isx.Spacing(num_pixels=(5, 3)) + frames = np.random.randn(spacing.num_pixels[0], spacing.num_pixels[1], + timing.num_samples).astype(frame_data_type) + + movie = isx.Movie.write(output_path, timing, spacing, data_type=np.__getattribute__(movie_data_type)) + for k in timing.get_valid_samples(): + if frame_data_type != movie_data_type: + with pytest.warns(UserWarning) as warnings: + movie.set_frame_data(k, frames[:, :, k]) + assert 'Converting from {0} to {1}.'.format(frame_data_type, movie_data_type) in [str(x.message) for x in warnings] + else: + with pytest.warns(None) as warnings: + movie.set_frame_data(k, frames[:, :, k]) + assert not [str(x.message) for x in warnings] + + movie.flush() + + movie = isx.Movie.read(output_path) + + assert movie.timing == timing + assert movie.spacing == spacing + assert movie.data_type == np.dtype(movie_data_type) + + for k in timing.get_valid_samples(): + np.testing.assert_array_equal(movie.get_frame_data(k), frames[:, :, k].astype(movie_data_type)) + + del movie + delete_files_silently([output_path]) + + + @pytest.mark.tiff_movie + def test_MovieTiffExporter(self): + movie_file_path = test_data_path + '/unit_test/50fr10_l1-3cells_he.isxd' + tiff_file_path = test_data_path + '/unit_test/output/test_output.tiff' + expected_file_path = test_data_path + '/unit_test/guilded/exp_mosaicMovieTiffExporter_output-v2.tiff' + + delete_files_silently([tiff_file_path]) + + isx.export_movie_to_tiff(movie_file_path, tiff_file_path) + + assert_tiff_files_equal_by_path(expected_file_path, tiff_file_path) + + delete_files_silently([tiff_file_path]) + + @pytest.mark.tiff_movie + def test_MovieTiffExporterWithInvalid(self): + input_movie_file_path = test_data_path + '/unit_test/cropped/recording_20161104_145443-TPC.isxd' + output_movie_file_path = test_data_path + '/unit_test/output/output.tif' + + delete_files_silently([output_movie_file_path]) + isx.export_movie_to_tiff(input_movie_file_path, output_movie_file_path, write_invalid_frames=True) + + input_movie = isx.Movie.read(input_movie_file_path) + output_movie = isx.Movie.read(output_movie_file_path) + + num_pixels = input_movie.spacing.num_pixels + num_samples = input_movie.timing.num_samples + valid_samples_mask = input_movie.timing.get_valid_samples_mask() + assert num_samples == output_movie.timing.num_samples + assert len(input_movie.timing.dropped) > 0 + assert len(input_movie.timing.cropped) > 0 + for i in range(num_samples): + if valid_samples_mask[i]: + np.testing.assert_array_equal(output_movie.get_frame_data(i), input_movie.get_frame_data(i)) + else: + np.testing.assert_array_equal(output_movie.get_frame_data(i), np.zeros(num_pixels, np.uint16)) + + del output_movie + delete_files_silently([output_movie_file_path]) + + + @pytest.mark.nwb_movie + def test_MovieExporter(self): + movie_file_path = test_data_path + '/unit_test/recording_20160426_145041.hdf5' + output_nwb_path = test_data_path + '/unit_test/output/test_output.nwb' + expected_nwb_path = test_data_path + '/unit_test/guilded/exp_mosaicMovieExporter_output-v3.nwb' + + delete_files_silently([output_nwb_path]) + + identifier = 'recording_20160426_145041 recording_20160426_145041 NWB-1.0.6 2018-01-03T11:57:36.590-08:00' + description = 'Exported from Inscopix Data Processing.' + + isx.export_movie_to_nwb(movie_file_path, output_nwb_path, identifier=identifier, session_description=description) + + output_nwb = h5py.File(output_nwb_path, 'r') + expected_nwb = h5py.File(expected_nwb_path, 'r') + compare_h5_groups(output_nwb['/'], expected_nwb['/'], ['file_create_date']) + + output_nwb.close() + delete_files_silently([output_nwb_path]) + + @pytest.mark.mp4_movie + def test_MovieMp4ExporterIsxd(self): + movie_file_path = test_data_path + '/unit_test/cropped/recording_20161104_145443-TPC.isxd' + mp4_file_path = test_data_path + '/unit_test/output/test_output.mp4' + + delete_files_silently([mp4_file_path]) + + isx.export_movie_to_mp4( + movie_file_path, + mp4_file_path, + compression_quality=0.1, + write_invalid_frames=False + ) + + # verify size of file + # Results of codec are slightly different between windows and linux/mac, but images look very similar + if platform.system() == "Windows": + exp_file_bytes = 1028858 + else: + exp_file_bytes = 1028817 + assert os.path.getsize(mp4_file_path) == exp_file_bytes + + delete_files_silently([mp4_file_path]) + + @pytest.mark.mp4_movie + def test_MovieMp4ExporterIsxdWithInvalid(self): + movie_file_path = test_data_path + '/unit_test/cropped/recording_20161104_145443-TPC.isxd' + mp4_file_path = test_data_path + '/unit_test/output/test_output.mp4' + + delete_files_silently([mp4_file_path]) + + isx.export_movie_to_mp4( + movie_file_path, + mp4_file_path, + compression_quality=0.1, + write_invalid_frames=True + ) + + # verify size of file + # Results of codec are slightly different between windows and linux/mac, but images look very similar + if platform.system() == "Windows": + exp_file_bytes = 1286184 + else: + exp_file_bytes = 1285643 + assert os.path.getsize(mp4_file_path) == exp_file_bytes + + delete_files_silently([mp4_file_path]) + + @pytest.mark.mp4_movie + def test_MovieMp4ExporterIsxb(self): + movie_file_path = test_data_path + '/unit_test/nVision/20220412-200447-camera-100.isxb' + mp4_file_path = test_data_path + '/unit_test/output/test_output.mp4' + + delete_files_silently([mp4_file_path]) + + isx.export_movie_to_mp4( + movie_file_path, + mp4_file_path, + compression_quality=0.1, + write_invalid_frames=False + ) + + # verify size of file + # Results of codec are slightly different between windows and linux/mac, but images look very similar + if platform.system() == "Windows": + exp_file_bytes = 6991088 + else: + exp_file_bytes = 6977564 + assert os.path.getsize(mp4_file_path) == exp_file_bytes + + delete_files_silently([mp4_file_path]) + + @pytest.mark.mp4_movie + def test_MovieMp4ExporterIsxbTracking(self): + movie_file_paths = [ + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_0.isxb", + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_1.isxb" + ] + mp4_file_path = test_data_path + '/unit_test/output/test_output.mp4' + + delete_files_silently([mp4_file_path]) + + isx.export_movie_to_mp4( + movie_file_paths, + mp4_file_path, + compression_quality=0.1, + write_invalid_frames=False, + draw_bounding_box=True, + draw_bounding_box_center=True, + draw_zones=True + ) + + # verify size of file + # Results of codec are slightly different between windows and linux/mac, but images look very similar + if platform.system() == "Windows": + exp_file_bytes = 992197 + else: + exp_file_bytes = 990564 + assert os.path.getsize(mp4_file_path) == exp_file_bytes + + delete_files_silently([mp4_file_path]) + + @pytest.mark.mp4_movie + def test_MovieMp4ExporterIntFrameRate(self): + movie_file_path = test_data_path + '/unit_test/nVision/20220412-200447-camera-100.isxb' + mp4_file_path = test_data_path + '/unit_test/output/test_output.mp4' + + delete_files_silently([mp4_file_path]) + + isx.export_movie_to_mp4( + movie_file_path, + mp4_file_path, + compression_quality=0.1, + write_invalid_frames=False, + frame_rate_format='int' + ) + + # verify size of file + # Results of codec are slightly different between windows and linux/mac, but images look very similar + if platform.system() == "Windows": + exp_file_bytes = 6990927 + else: + exp_file_bytes = 6977407 + assert os.path.getsize(mp4_file_path) == exp_file_bytes + + delete_files_silently([mp4_file_path]) + + @pytest.mark.mp4_movie + def test_MovieTimestampExporterIsxdNoTimestamps(self): + movie_file_path = test_data_path + '/unit_test/cnmfe-cpp/movie_128x128x1000.isxd' + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + with pytest.raises(Exception) as error: + isx.export_movie_timestamps_to_csv( + movie_file_path, + csv_file_path, + time_ref='tsc') + assert 'Input movie does not have frame timestamps stored in file.' in str(error.value) + + def test_MovieTimestampExporterIsxdSeries(self): + movie_file_paths = [ + test_data_path + '/unit_test/baseplate/2021-06-28-23-45-49_video_sched_0_probe_custom.isxd', + test_data_path + '/unit_test/baseplate/2021-06-28-23-34-09_video_sched_0_probe_none.isxd', + ] + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_movie_timestamps_to_csv( + movie_file_paths, + csv_file_path, + time_ref='tsc') + + df = pd.read_csv(csv_file_path) + assert (df.iloc[0] == pd.DataFrame({ + 'Global Frame Number' : [0], + 'Movie Number' : [0], + 'Local Frame Number' : [0], + 'Frame Timestamp (us)' : [4170546756640]} + )).all(axis=None) + + assert (df.iloc[len(df.index) - 1] == pd.DataFrame({ + 'Global Frame Number' : [53], + 'Movie Number' : [1], + 'Local Frame Number' : [26], + 'Frame Timestamp (us)' : [4171250265074]} + )).all(axis=None) + + delete_files_silently([csv_file_path]) + + def test_MovieTimestampExporterIsxb(self): + movie_file_path = test_data_path + '/unit_test/nVision/20220412-200447-camera-100.isxb' + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_movie_timestamps_to_csv( + movie_file_path, + csv_file_path, + time_ref='tsc') + + df = pd.read_csv(csv_file_path) + assert (df.iloc[0] == pd.DataFrame({ + 'Global Frame Number' : [0], + 'Movie Number' : [0], + 'Local Frame Number' : [0], + 'Frame Timestamp (us)' : [115829025489]} + )).all(axis=None) + + assert (df.iloc[len(df.index) - 1] == pd.DataFrame({ + 'Global Frame Number' : [112], + 'Movie Number' : [0], + 'Local Frame Number' : [112], + 'Frame Timestamp (us)' : [115832757521]} + )).all(axis=None) + + delete_files_silently([csv_file_path]) + + def test_MovieTimestampExporterUnix(self): + movie_file_path = test_data_path + '/unit_test/nVision/20220412-200447-camera-100.isxb' + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_movie_timestamps_to_csv( + movie_file_path, + csv_file_path, + time_ref='unix') + + df = pd.read_csv(csv_file_path) + assert (df.iloc[0] == pd.DataFrame({ + 'Global Frame Number' : [0], + 'Movie Number' : [0], + 'Local Frame Number' : [0], + 'Frame Timestamp (s)' : [1649819290.471000]} + )).all(axis=None) + + assert (df.iloc[len(df.index) - 1] == pd.DataFrame({ + 'Global Frame Number' : [112], + 'Movie Number' : [0], + 'Local Frame Number' : [112], + 'Frame Timestamp (s)' : [1649819294.203032]} + )).all(axis=None) + + delete_files_silently([csv_file_path]) + + def test_MovieTimestampExporterStart(self): + movie_file_path = test_data_path + '/unit_test/nVision/20220412-200447-camera-100.isxb' + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_movie_timestamps_to_csv( + movie_file_path, + csv_file_path, + time_ref='start') + + df = pd.read_csv(csv_file_path) + assert (df.iloc[0] == pd.DataFrame({ + 'Global Frame Number' : [0], + 'Movie Number' : [0], + 'Local Frame Number' : [0], + 'Frame Timestamp (s)' : [0.000000]} + )).all(axis=None) + + assert (df.iloc[len(df.index) - 1] == pd.DataFrame({ + 'Global Frame Number' : [112], + 'Movie Number' : [0], + 'Local Frame Number' : [112], + 'Frame Timestamp (s)' : [3.732032]} + )).all(axis=None) + + delete_files_silently([csv_file_path]) + + @pytest.mark.mp4_movie + def test_NVisionMovieTrackingFrameDataExporter(self): + movie_file_paths = [ + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_0.isxb", + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_1.isxb" + ] + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_nvision_movie_tracking_frame_data_to_csv( + movie_file_paths, + csv_file_path, + time_ref='start' + ) + + with open(csv_file_path, 'r') as f: + lines = f.read().splitlines() + + expected_columns = "Global Frame Number,Movie Number,Local Frame Number,Frame Timestamp (s),Bounding Box Left,Bounding Box Top,Bounding Box Right,Bounding Box Bottom,Bounding Box Center X,Bounding Box Center Y,Confidence,Zone ID,Zone Name,Zone Event,Zone Trigger" + assert lines[0] == expected_columns + + expected_first_line = "0,0,0,0.000000,526.136230,682.003479,650.984802,908.188293,588.560547,795.095886,67.986031,,,," + assert lines[1] == expected_first_line + + expected_last_line = "19,1,9,0.631984,528.115173,776.796631,699.851135,912.584290,613.983154,844.690430,98.499191,4270701760,ZONE#1 rectangle,," + assert lines[-1] == expected_last_line + + delete_files_silently([csv_file_path]) + + @pytest.mark.mp4_movie + def test_NVisionMovieTrackingFrameDataExporterTsc(self): + movie_file_paths = [ + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_0.isxb", + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_1.isxb" + ] + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_nvision_movie_tracking_frame_data_to_csv( + movie_file_paths, + csv_file_path, + time_ref='tsc' + ) + + with open(csv_file_path, 'r') as f: + lines = f.read().splitlines() + + expected_columns = "Global Frame Number,Movie Number,Local Frame Number,Frame Timestamp (us),Bounding Box Left,Bounding Box Top,Bounding Box Right,Bounding Box Bottom,Bounding Box Center X,Bounding Box Center Y,Confidence,Zone ID,Zone Name,Zone Event,Zone Trigger" + assert lines[0] == expected_columns + + expected_first_line = "0,0,0,163957519943,526.136,682.003,650.985,908.188,588.561,795.096,67.986,,,," + assert lines[1] == expected_first_line + + expected_last_line = "19,1,9,163958151927,528.115,776.797,699.851,912.584,613.983,844.69,98.4992,4270701760,ZONE#1 rectangle,," + assert lines[-1] == expected_last_line + + delete_files_silently([csv_file_path]) + + @pytest.mark.mp4_movie + def test_NVisionMovieTrackingFrameDataExporterUnix(self): + movie_file_paths = [ + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_0.isxb", + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_1.isxb" + ] + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_nvision_movie_tracking_frame_data_to_csv( + movie_file_paths, + csv_file_path, + time_ref='unix' + ) + + with open(csv_file_path, 'r') as f: + lines = f.read().splitlines() + + expected_columns = "Global Frame Number,Movie Number,Local Frame Number,Frame Timestamp (s),Bounding Box Left,Bounding Box Top,Bounding Box Right,Bounding Box Bottom,Bounding Box Center X,Bounding Box Center Y,Confidence,Zone ID,Zone Name,Zone Event,Zone Trigger" + assert lines[0] == expected_columns + + expected_first_line = "0,0,0,1705049721.643000,526.136230,682.003479,650.984802,908.188293,588.560547,795.095886,67.986031,,,," + assert lines[1] == expected_first_line + + expected_last_line = "19,1,9,1705049722.274984,528.115173,776.796631,699.851135,912.584290,613.983154,844.690430,98.499191,4270701760,ZONE#1 rectangle,," + assert lines[-1] == expected_last_line + + delete_files_silently([csv_file_path]) + + @pytest.mark.mp4_movie + def test_NVisionMovieTrackingZoneDataExporter(self): + movie_file_paths = [ + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_0.isxb", + test_data_path + "/unit_test/nVision/tracking/Group-20240111-080531_2024-01-12-08-55-21_sched_1.isxb" + ] + csv_file_path = test_data_path + '/unit_test/output/test_output.csv' + + delete_files_silently([csv_file_path]) + + isx.export_nvision_movie_tracking_zone_data_to_csv( + movie_file_paths, + csv_file_path + ) + + with open(csv_file_path, 'r') as f: + lines = f.read().splitlines() + + expected_columns = "ID,Enabled,Name,Description,Type,X 0,Y 0,X 1,Y 1,X 2,Y 2,X 3,Y 3,X 4,Y 4,Major Axis, Minor Axis, Angle" + assert lines[0] == expected_columns + + expected_first_line = "1705077750976,1,ZONE#1 rectangle,,rectangle,534.135,387.9,993.203,387.9,993.203,868.86,534.135,868.86,,,,," + assert lines[1] == expected_first_line + + expected_last_line = "1705077943271,1,ZONE#4 Elipse,,ellipse,1273.26,241.02,,,,,,,,,293.76,98.1654,90" + assert lines[-1] == expected_last_line + + delete_files_silently([csv_file_path]) + + @pytest.mark.csv_trace + def test_EventSetExporterDense(self): + unit_test_dir = test_data_path + '/unit_test' + input_event_set = unit_test_dir + '/guilded/exp_mosaicEventDetection_output-v2.isxd' + output_csv = unit_test_dir + '/output/output.csv' + + delete_files_silently([output_csv]) + + isx.export_event_set_to_csv([input_event_set], output_csv, 'start', sparse_output=False) + + expected_csv = unit_test_dir + '/guilded/exp_mosaicEventSetExporter.csv' + assert_csv_events_are_equal_by_path(expected_csv, output_csv) + + assert not os.path.exists(unit_test_dir + '/output/output-props.csv') + + delete_files_silently([output_csv]) + + @pytest.mark.csv_trace + def test_EventSetExporterSparse(self): + unit_test_dir = test_data_path + '/unit_test' + input_event_set = unit_test_dir + '/guilded/exp_mosaicEventDetection_output-v2.isxd' + output_csv = unit_test_dir + '/output/output.csv' + + delete_files_silently([output_csv]) + + isx.export_event_set_to_csv([input_event_set], output_csv, 'start', sparse_output=True) + + expected_csv = unit_test_dir + '/guilded/exp_mosaicEventSetExporterSparse.csv' + assert_csv_events_are_equal_by_path(expected_csv, output_csv) + + assert not os.path.exists(unit_test_dir + '/output/output-props.csv') + + delete_files_silently([output_csv]) + + @pytest.mark.csv_trace + def test_EventSetExporterSparseBinary(self): + unit_test_dir = test_data_path + '/unit_test' + input_event_set = unit_test_dir + '/guilded/exp_mosaicEventDetection_output-v2.isxd' + output_csv = unit_test_dir + '/output/output.csv' + + delete_files_silently([output_csv]) + + isx.export_event_set_to_csv([input_event_set], output_csv, 'start', sparse_output=True, write_amplitude=False) + + expected_csv = unit_test_dir + '/guilded/exp_mosaicEventSetExporterSparseBinary.csv' + assert_csv_events_are_equal_by_path(expected_csv, output_csv) + + assert not os.path.exists(unit_test_dir + '/output/output-props.csv') + + delete_files_silently([output_csv]) + + @pytest.mark.csv_trace + def test_EventSetExporterWithProps(self): + unit_test_dir = test_data_path + '/unit_test' + test_dir = unit_test_dir + '/events-export' + input_event_sets = ['{}/50fr10_l{}-3cells_he-ROI-LCR-ED.isxd'.format(test_dir, i) for i in range(1, 4)] + output_dir = unit_test_dir + '/output' + output_csv = output_dir + '/output.csv' + output_props = output_dir + '/props.csv' + + delete_files_silently([output_csv, output_props]) + + isx.export_event_set_to_csv(input_event_sets, output_csv, 'start', output_props_file=output_props) + + exp_props = unit_test_dir + '/guilded/exp_EventSetExporterWithProps-v2.csv' + assert_csv_files_are_equal_by_path(exp_props, output_props) + + delete_files_silently([output_csv, output_props]) + + @pytest.mark.csv_trace + def test_GpioSetExporter(self): + unit_test_dir = test_data_path + '/unit_test' + input_gpio_set = unit_test_dir + '/gpio/2020-05-20-10-33-22_video.gpio' + output_csv = unit_test_dir + '/output/output.csv' + intermediate_isxd_path = '/tmp/2020-05-20-10-33-22_video_gpio.isxd' + intermediate_isxd_windows_path = unit_test_dir + '/gpio/2020-05-20-10-33-22_video_gpio.isxd' + + delete_files_silently([output_csv, intermediate_isxd_path]) + + isx.export_gpio_set_to_csv([input_gpio_set], output_csv, inter_isxd_file_dir='/tmp', time_ref='start') + + expected_csv = unit_test_dir + '/gpio/2020-05-20-10-33-22_video_gpio.csv' + assert_csv_events_are_equal_by_path(expected_csv, output_csv) + + assert os.path.exists(intermediate_isxd_path) or os.path.exists(intermediate_isxd_windows_path) + + delete_files_silently([output_csv, intermediate_isxd_path]) + + @pytest.mark.csv_trace + def test_ImuSetExporter(self): + unit_test_dir = test_data_path + '/unit_test' + input_gpio_set = unit_test_dir + '/gpio/2020-05-20-10-33-22_video.imu' + output_csv = unit_test_dir + '/output/output.csv' + intermediate_isxd_path = '/tmp/2020-05-20-10-33-22_video_imu.isxd' + intermediate_isxd_windows_path = unit_test_dir + '/gpio/2020-05-20-10-33-22_video_imu.isxd' + + delete_files_silently([output_csv, intermediate_isxd_path]) + + isx.export_gpio_set_to_csv([input_gpio_set], output_csv, inter_isxd_file_dir='/tmp', time_ref='start') + + expected_csv = unit_test_dir + '/gpio/2020-05-20-10-33-22_video_imu.csv' + assert_csv_events_are_equal_by_path(expected_csv, output_csv) + + assert os.path.exists(intermediate_isxd_path) or os.path.exists(intermediate_isxd_windows_path) + + delete_files_silently([output_csv, intermediate_isxd_path]) + + @pytest.mark.csv_trace + def test_ImuIsxdSetExporter(self): + unit_test_dir = test_data_path + '/unit_test' + input_gpio_set = unit_test_dir + '/gpio/2020-05-20-10-33-22_video_imu.isxd' + output_csv = unit_test_dir + '/output/output.csv' + + delete_files_silently([output_csv]) + + isx.export_gpio_set_to_csv([input_gpio_set], output_csv, inter_isxd_file_dir='/tmp', time_ref='start') + + expected_csv = unit_test_dir + '/gpio/2020-05-20-10-33-22_video_imu.csv' + assert_csv_events_are_equal_by_path(expected_csv, output_csv) + + delete_files_silently([output_csv]) + + @pytest.mark.isxd_events + def test_ReadEvent(self): + input_file = test_data_path + '/unit_test/guilded/exp_mosaicEventDetection_output-v2.isxd' + event = isx.EventSet.read(input_file) + + assert event.num_cells == 3 + exp_period = isx.Duration._from_num_den(1, 10) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_secs(20)) + assert event.timing == isx.Timing(num_samples=50, period=exp_period, start=exp_start) + + @pytest.mark.isxd_events + def test_GetEventData(self): + input_file = test_data_path + '/unit_test/guilded/exp_mosaicEventDetection_output-v2.isxd' + event = isx.EventSet.read(input_file) + + time_C0, amplitude_C0 = event.get_cell_data(0) + + assert time_C0[0] == 200000 + np.testing.assert_approx_equal(1.446234, amplitude_C0[0], significant=6) + + time_C2, amplitude_C2 = event.get_cell_data(2) + + assert time_C2[0] == 0 + np.testing.assert_approx_equal(1.238986, amplitude_C2[0], significant=6) + + @pytest.mark.isxd_events + def test_GetEventCellIndex(self): + input_file = test_data_path + '/unit_test/guilded/exp_mosaicEventDetection_output-v2.isxd' + event = isx.EventSet.read(input_file) + + assert event.get_cell_index('C0') == 0 + assert event.get_cell_index('C1') == 1 + + @pytest.mark.isxd_events + def test_GetEventData_iterate_cells(self): + # test event reading on a bigger file with more events and cells + input_file = test_data_path + '/unit_test/event_detection/recording_20161006_111406-PP-bp-mc-dff-pcaica-events.isxd' + exp_file = test_data_path + '/unit_test/event_detection/recording_20161006_111406-PP-bp-mc-dff-pcaica-events.csv' + + event = isx.EventSet.read(input_file) + event_df = pd.read_csv(exp_file) + cnames = [event.get_cell_name(k) for k in range(event.num_cells)] + + num_cells_with_zero_events = 0 + for index, cname in enumerate(cnames): + # get expected event times and amplitudes for the cell, sort + i = event_df[' Cell Name'] == ' {}'.format(cname) + if i.sum() == 0: + num_cells_with_zero_events += 1 + continue + + # compare expected event times and amplitudes with those returned from API + event_times, event_amps = event.get_cell_data(index) + + np.testing.assert_allclose(event_times / 1e6, event_df['Time (s)'][i].values) + np.testing.assert_allclose(event_amps, event_df[' Value'][i].values, rtol=1e-5) + + # this is just a check to make sure nothing stupid happened + assert num_cells_with_zero_events < len(cnames) + + @pytest.mark.isxd_events + @pytest.mark.parametrize('method', ('get_cell_data', 'get_cell_name')) + @pytest.mark.parametrize('cell_index', (424, 500000000, 18446744073709551615)) + def test_GetEventData_bad_cell_index_int(self, method, cell_index): + input_file = test_data_path + '/unit_test/event_detection/recording_20161006_111406-PP-bp-mc-dff-pcaica-events.isxd' + + event = isx.EventSet.read(input_file) + with pytest.raises(Exception) as error: + getattr(event, method)(cell_index) + assert f'Cell index {cell_index} is too large' in str(error.value) + + @pytest.mark.isxd_events + @pytest.mark.parametrize('method', ('get_cell_data', 'get_cell_name')) + @pytest.mark.parametrize('cell_index', (8 + 3j, '0', '423', 'My Cell Name', 1.5, (1, 5), [3, 5], {4, 9}, {6: 5})) + def test_GetEventData_bad_cell_index_type(self, method, cell_index): + input_file = test_data_path + '/unit_test/event_detection/recording_20161006_111406-PP-bp-mc-dff-pcaica-events.isxd' + + event = isx.EventSet.read(input_file) + with pytest.raises(Exception) as error: + getattr(event, method)(cell_index) + assert ("argument 2:" in str(error.value) and "wrong type" in str(error.value)) or "cannot be interpreted as an integer" in str(error.value) + + @pytest.mark.isxd_events + def test_GetEventCellNames(self): + input_file = test_data_path + '/unit_test/guilded/exp_mosaicEventDetection_output-v2.isxd' + event = isx.EventSet.read(input_file) + + cell_1 = event.get_cell_name(0) + cell_2 = event.get_cell_name(1) + cell_3 = event.get_cell_name(2) + + assert cell_1 == 'C0' + assert cell_2 == 'C1' + assert cell_3 == 'C2' + + @pytest.mark.isxd_events + def test_WriteEvents(self): + output_path = test_data_path + '/unit_test/output/test_write_events.isxd' + delete_files_silently([output_path]) + + timing = isx.Timing(num_samples=5, period=isx.Duration.from_msecs(10)) + events = isx.EventSet.write(output_path, timing, ['Cell_1']) + + timestamps = np.array([1, 2, 3, 4, 5]).astype(np.uint64) + data = np.random.rand(timing.num_samples).astype(np.float32) + + events.set_cell_data(0, timestamps, data) + events.flush() + + events = isx.EventSet.read(output_path) + + assert events.num_cells == 1 + assert events.timing == timing + + act_timestamps, act_data = events.get_cell_data(0) + + assert act_timestamps[0] == timestamps[0] + assert act_timestamps[1] == timestamps[1] + np.testing.assert_approx_equal(act_data[0], data[0], significant=6) + np.testing.assert_approx_equal(act_data[1], data[1], significant=6) + + del events + delete_files_silently([output_path]) + + def test_WriteEmptyEvents(self): + output_path = test_data_path + '/unit_test/output/test_write_empty_events.isxd' + + cell_names = ['Cell_1'] + timing = isx.Timing(num_samples=10) + + delete_files_silently([output_path]) + events = isx.EventSet.write(output_path, timing, cell_names) + + events.set_cell_data(0, np.array([], np.uint64), np.array([], np.float32)) + events.flush() + + events = isx.EventSet.read(output_path) + + num_cells = len(cell_names) + assert events.num_cells == num_cells + assert events.timing == timing + + for c in range(num_cells): + assert events.get_cell_name(c) == cell_names[c] + + [act_usecs_since_start, act_values] = events.get_cell_data(0) + assert len(act_usecs_since_start) == 0 + assert len(act_values) == 0 + + del events + delete_files_silently([output_path]) + + + @pytest.mark.isxd_events + @pytest.mark.parametrize('event_data_type', data_types) + @pytest.mark.parametrize('time_data_type', data_types) + def test_WriteEventsOtherTypeToFloat32(self, event_data_type, time_data_type): + output_path = test_data_path + '/unit_test/output/test_write_events.isxd' + delete_files_silently([output_path]) + + timing = isx.Timing(num_samples=5, period=isx.Duration.from_msecs(10)) + events = isx.EventSet.write(output_path, timing, ['Cell_1']) + + timestamps = np.array([1, 2, 3, 4, 5]).astype(time_data_type) + + data = np.random.rand(timing.num_samples).astype(event_data_type) + + if time_data_type != 'uint64': + if event_data_type != 'float32': + with pytest.warns(UserWarning) as warnings: + events.set_cell_data(0, timestamps, data) + assert 'Converting from {} to uint64.'.format(time_data_type) in [str(x.message) for x in warnings] + assert 'Converting from {} to float32.'.format(event_data_type) in [str(x.message) for x in warnings] + else: + with pytest.warns(UserWarning) as warnings: + events.set_cell_data(0, timestamps, data) + assert 'Converting from {} to uint64.'.format(time_data_type) in [str(x.message) for x in warnings] + else: + if event_data_type != 'float32': + with pytest.warns(UserWarning) as warnings: + events.set_cell_data(0, timestamps, data) + assert 'Converting from {} to float32.'.format(event_data_type) in [str(x.message) for x in warnings] + else: + with pytest.warns(None) as warnings: + events.set_cell_data(0, timestamps, data) + assert not [str(x.message) for x in warnings.list] + + + events.flush() + + events = isx.EventSet.read(output_path) + + assert events.num_cells == 1 + assert events.timing == timing + + act_timestamps, act_data = events.get_cell_data(0) + + assert act_timestamps[0] == timestamps[0] + assert act_timestamps[1] == timestamps[1] + np.testing.assert_approx_equal(act_data[0], data[0], significant=6) + np.testing.assert_approx_equal(act_data[1], data[1], significant=6) + + del events + delete_files_silently([output_path]) + + + def test_EventSetStrValid(self): + event_set = isx.EventSet.read(test_data_path + '/unit_test/guilded/exp_mosaicEventDetection_output-v2.isxd') + assert isinstance(str(event_set), str) + + + def test_EventSetStrInvalid(self): + event_set = isx.EventSet() + assert isinstance(str(event_set), str) + + + def test_LosslessTimingSpacing(self): + movie_file_path = test_data_path + '/unit_test/50fr10_l1-3cells_he-PP.isxd' + movie = isx.Movie.read(movie_file_path) + + output_dir = test_data_path + '/python/test_lossless_timing_spacing' + if os.path.exists(output_dir): + shutil.rmtree(output_dir) + os.makedirs(output_dir) + + movie_proc_file_path = output_dir + '/movie_proc.isxd' + movie_proc = isx.Movie.write(movie_proc_file_path, movie.timing, movie.spacing, movie.data_type) + for f in movie.timing.get_valid_samples(): + movie_proc.set_frame_data(f, movie.get_frame_data(f) * 0.5) + movie_proc.flush() + + movie_proc = isx.Movie.read(movie_proc_file_path) + assert movie.timing == movie_proc.timing + assert movie.spacing == movie_proc.spacing + + movie_proc_data = np.zeros((np.prod(movie.spacing.num_pixels), movie.timing.num_samples), dtype=np.uint16) + for f in movie_proc.timing.get_valid_samples(): + movie_proc_data[:, f] = movie_proc.get_frame_data(f).flatten() + + cell_set_file_path = output_dir + '/cell_set.isxd' + cell_set = isx.CellSet.write(cell_set_file_path, movie_proc.timing, movie_proc.spacing) + num_cells = 3 + cell_names = ['C{}'.format(c) for c in range(num_cells)] + images = np.zeros(list(movie.spacing.num_pixels) + [movie.timing.num_samples], dtype=np.float32) + images[14, 9, 1] = 1; + images[14, 74, 2] = 1; + images[64, 74, 3] = 1; + for c in range(num_cells): + image = images[:, :, c] + trace = image.flatten().dot(movie_proc_data) + cell_set.set_cell_data(c, image, trace, cell_names[c]) + cell_set.flush() + + cell_set = isx.CellSet.read(cell_set_file_path) + assert cell_set.timing == movie_proc.timing + assert cell_set.spacing == movie_proc.spacing + + event_set_file_path = output_dir + '/event_set.isxd' + event_set = isx.EventSet.write(event_set_file_path, cell_set.timing, cell_names) + usecs_since_start = np.array([x.to_usecs() for x in cell_set.timing.get_offsets_since_start()], np.uint64) + event_set_usecs = [] + event_set_values = [] + for c in range(num_cells): + trace = cell_set.get_cell_trace_data(c) + trace_above_thresh = (trace > 1500).nonzero() + event_set_usecs.append(usecs_since_start[trace_above_thresh]) + event_set_values.append(trace[trace_above_thresh]) + event_set.set_cell_data(c, event_set_usecs[c], event_set_values[c]) + event_set.flush() + + event_set = isx.EventSet.read(event_set_file_path) + assert event_set.timing == cell_set.timing + for c in range(num_cells): + [usecs, values] = event_set.get_cell_data(c) + np.testing.assert_array_equal(usecs, event_set_usecs[c]) + np.testing.assert_array_equal(values, event_set_values[c]) + + del movie + del movie_proc + del cell_set + del event_set + + shutil.rmtree(output_dir) + + @pytest.mark.isxd_gpio + def test_ReadGpio(self): + input_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video.gpio' + gpio = isx.GpioSet.read(input_file) + + assert gpio.num_channels == 26 + exp_period = isx.Duration._from_num_den(1, 1000) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(1589970802786)) + assert gpio.timing == isx.Timing(num_samples=51280, period=exp_period, start=exp_start) + + @pytest.mark.isxd_gpio + def test_GetGpioData(self): + input_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video.gpio' + gpio = isx.GpioSet.read(input_file) + + time_C23, amplitude_C23 = gpio.get_channel_data(23) + + assert time_C23[1] == 76000 + np.testing.assert_approx_equal(500.0, amplitude_C23[1], significant=6) + + time_C24, amplitude_C24 = gpio.get_channel_data(24) + + assert time_C24[1] == 75000 + np.testing.assert_approx_equal(1.0, amplitude_C24[1], significant=6) + + @pytest.mark.isxd_events + def test_GetGpioChannelIndex(self): + input_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video.gpio' + gpio = isx.GpioSet.read(input_file) + + assert gpio.get_channel_index('e-focus') == 23 + assert gpio.get_channel_index('BNC Sync Output') == 24 + + @pytest.mark.isxd_gpio + def test_GetImuData(self): + input_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video.imu' + imu = isx.GpioSet.read(input_file) + + time_acc, amp_acc = imu.get_channel_data(0) + + assert time_acc[1] == 20592 + np.testing.assert_approx_equal(-6.1035156e-05, amp_acc[1], significant=6) + + time_ori, amp_ori = imu.get_channel_data(3) + + assert time_ori[1] == 20592 + np.testing.assert_approx_equal(-1.5742188, amp_ori[1], significant=6) + + @pytest.mark.isxd_gpio + def test_GetGpioData_iterate_channels(self): + input_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video_gpio.isxd' + exp_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video_gpio.csv' + + gpio = isx.GpioSet.read(input_file) + gpio_df = pd.read_csv(exp_file) + cnames = [gpio.get_channel_name(k) for k in range(gpio.num_channels)] + + num_channels_with_zero_data = 0 + for index, cname in enumerate(cnames): + # get expected data points for this channel, and sort + i = gpio_df[' Channel Name'] == ' {}'.format(cname) + + # skip if there are zero points + if i.sum() == 0: + num_cells_with_zero_data += 1 + continue + + # compare expected event times and amplitudes with those returned from API + data_times, data_amps = gpio.get_channel_data(index) + + np.testing.assert_allclose(data_times / 1e6, gpio_df['Time (s)'][i].values, rtol=1e-5) + np.testing.assert_allclose(data_amps, gpio_df[' Value'][i].values, rtol=1e-5) + + # this is just a check to make sure nothing stupid happened + assert num_channels_with_zero_data < len(cnames) + + @pytest.mark.isxd_gpio + @pytest.mark.parametrize('method', ('get_channel_data', 'get_channel_name')) + @pytest.mark.parametrize('channel_index', (26, 500000000, 18446744073709551615)) + def test_GetGpioData_bad_channel_index_int(self, method, channel_index): + input_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video_gpio.isxd' + + gpio = isx.GpioSet.read(input_file) + with pytest.raises(Exception) as error: + getattr(gpio, method)(channel_index) + assert f'Channel index {channel_index} is too large' in str(error.value) + + @pytest.mark.isxd_gpio + @pytest.mark.parametrize('method', ('get_channel_data', 'get_channel_name')) + @pytest.mark.parametrize('channel_index', (8 + 3j, '0', '423', 'My Channel Name', 1.5, (1, 5), [3, 5], {4, 9}, {6: 5})) + def test_GetGpioData_bad_channel_index_type(self, method, channel_index): + input_file = test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video_gpio.isxd' + + event = isx.GpioSet.read(input_file) + with pytest.raises(Exception) as error: + getattr(event, method)(channel_index) + assert ("argument 2:" in str(error.value) and "wrong type" in str(error.value)) or "cannot be interpreted as an integer" in str(error.value) + + def test_GpioSetStrValid(self): + gpio_set = isx.GpioSet.read(test_data_path + '/unit_test/gpio/2020-05-20-10-33-22_video_gpio.isxd') + assert isinstance(str(gpio_set), str) + + + def test_GpioSetStrInvalid(self): + gpio_set = isx.GpioSet() + assert isinstance(str(gpio_set), str) + + @staticmethod + def assert_acquisition_info_dataset_1(info): + assert info['Animal Sex'] == 'm' + assert info['Animal Date of Birth'] == '' + assert info['Animal ID'] == '' + assert info['Animal Species'] == '' + assert info['Animal Weight'] == 0 + assert info['Animal Description'] == '' + + assert info['Microscope Focus'] == 0 + assert info['Microscope Gain'] == 7 + assert info['Microscope EX LED Power (mw/mm^2)'] == 0 + assert info['Microscope OG LED Power (mw/mm^2)'] == 0 + assert info['Microscope Serial Number'] == 'unknown' + assert info['Microscope Type'] == 'nVista' + + assert info['Session Name'] == 'Session 20180621-174314' + + assert info['Experimenter Name'] == 'John Doe' + + assert info['Probe Diameter (mm)'] == 0 + assert info['Probe Flip'] == 'none' + assert info['Probe Length (mm)'] == 0 + assert info['Probe Pitch'] == 0 + assert info['Probe Rotation (degrees)'] == 0 + assert info['Probe Type'] == 'None' + + assert info['Acquisition SW Version'] == '1.1.0' + + def test_MovieGetAcquisitionInfo(self): + movie_file_path = test_data_path + '/unit_test/acquisition_info/2018-06-21-17-51-03_video_sched_0.isxd' + movie = isx.Movie.read(movie_file_path) + info = movie.get_acquisition_info() + TestFileIO.assert_acquisition_info_dataset_1(info) + + def test_CellSetGetAcquisitionInfo(self): + cell_set_file_path = test_data_path + '/unit_test/acquisition_info/2018-06-21-17-51-03_video_sched_0-PP-ROI.isxd' + cell_set = isx.CellSet.read(cell_set_file_path) + info = cell_set.get_acquisition_info() + TestFileIO.assert_acquisition_info_dataset_1(info) + + def test_EventSetGetAcquisitionInfo(self): + event_set_file_path = test_data_path + '/unit_test/acquisition_info/2018-06-21-17-51-03_video_sched_0-PP-ROI-ED.isxd' + event_set = isx.EventSet.read(event_set_file_path) + info = event_set.get_acquisition_info() + TestFileIO.assert_acquisition_info_dataset_1(info) + + + def testCreateCellMapNoAcceptedCells(self): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + cell_set_file_path = create_cell_map_path + "/2020-05-07-11-38-24_video_DR_1_OQ_1-decompressed-efocus_0700-PP-ROI_001.isxd" + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map_name = 'created_cell_map_no_accepted.isxd' + actual_isxd_cell_map = output_dir + '/' + actual_isxd_cell_map_name + + delete_files_silently(actual_isxd_cell_map) + + with pytest.raises(Exception) as error: + isx.create_cell_map(cell_set_file_path, + output_isxd_cell_map_file=actual_isxd_cell_map) + + assert 'There are no cells to create a cell map with! Only selected cells will be used for the cell map.' in str(error.value) + + assert not is_file(actual_isxd_cell_map) + + delete_files_silently(actual_isxd_cell_map) + + def testCreateCellMapUndecided(self): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + cell_set_file_path = create_cell_map_path + "/2020-05-07-11-38-24_video_DR_1_OQ_1-decompressed-efocus_0700-PP-ROI_001.isxd" + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map_name = 'created_cell_map_undecided.isxd' + actual_isxd_cell_map = output_dir + '/' + actual_isxd_cell_map_name + + expected_isxd_cell_map = create_cell_map_path + '/' + actual_isxd_cell_map_name + + delete_files_silently(actual_isxd_cell_map) + + isx.create_cell_map(cell_set_file_path, + selected_cell_statuses=['accepted','undecided'], + output_isxd_cell_map_file=actual_isxd_cell_map) + + assert_isxd_images_are_close_by_path_nan_zero(expected_isxd_cell_map, actual_isxd_cell_map) + + delete_files_silently(actual_isxd_cell_map) + + + @pytest.mark.parametrize(('binary','cell_thresh', 'rgb', 'cell_statuses'), + (( False, .99, None, ['accepted', 'undecided']), + ( True, .99, "green",['accepted', 'undecided']), + ( True, 0.8, None, ['rejected']), + ( False, 0.8, None, ['rejected']), + ( True, 0.5, None, ['rejected']), + ( False, 0.4, "blue", ['rejected']), + ( True, 0.4, "blue", ['rejected']), + ( False, 0.0, "red", ['rejected']), + )) + def testCreateCellMapVarInput(self, cell_thresh, binary, rgb, cell_statuses): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + + cell_set_file_path = create_cell_map_path + "/synth_movie-03-no-frame-nums-lots-dots_he-PCA-ICA.isxd" + + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map_name = f'created_cell_map_b{binary}_thresh{cell_thresh}_{rgb}.isxd' + actual_tiff_cell_map_name = f'created_cell_map_b{binary}_thresh{cell_thresh}_{rgb}.tiff' + actual_isxd_cell_map = output_dir + '/' + actual_isxd_cell_map_name + actual_tiff_cell_map = output_dir + '/' + actual_tiff_cell_map_name + + expected_isxd_cell_map = create_cell_map_path + '/' + actual_isxd_cell_map_name + expected_tiff_cell_map = create_cell_map_path + '/' + actual_tiff_cell_map_name + + delete_files_silently([actual_isxd_cell_map, actual_tiff_cell_map]) + + isx.create_cell_map(cell_set_file_path, + selected_cell_statuses=cell_statuses, + output_isxd_cell_map_file=actual_isxd_cell_map, + output_tiff_cell_map_file=actual_tiff_cell_map, + cell_thresh=cell_thresh, + binary=binary, + rgb=rgb) + + assert_isxd_images_are_close_by_path_nan_zero(expected_isxd_cell_map, actual_isxd_cell_map) + assert_tiff_files_equal_by_path(expected_tiff_cell_map, actual_tiff_cell_map) + + delete_files_silently([actual_isxd_cell_map, actual_tiff_cell_map]) + + @pytest.mark.skip(reason="no suitably small data") + def testCreateCellMapNVistaCellSet(self): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + cell_set_file_path = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE.isxd' + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map = 'created_cell_map.isxd' + actual_tiff_cell_map = 'created_cell_map.tiff' + full_actual_isxd_cell_map = output_dir + '/created_cell_map.isxd' + full_actual_tiff_cell_map = output_dir + '/created_cell_map.tiff' + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + expected_isxd_cell_map = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-MAP.isxd' + expected_tiff_cell_map = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-MAP.tif' + + isx.create_cell_map(cell_set_file_path, cell_thresh=0.5, output_isxd_cell_map_name=actual_isxd_cell_map, + output_tiff_cell_map_name=actual_tiff_cell_map, output_dir=output_dir) + + assert_isxd_images_are_close_by_path_nan_zero(expected_isxd_cell_map, full_actual_isxd_cell_map) + assert_tiff_files_equal_by_path(full_actual_tiff_cell_map, expected_tiff_cell_map) + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + + @pytest.mark.skip(reason="no suitably small data") + def testCreateCellMapNVistaCellSetOri(self): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + cell_set_file_path = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI.isxd' + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map = 'created_cell_map.isxd' + actual_tiff_cell_map = 'created_cell_map.tiff' + full_actual_isxd_cell_map = output_dir + '/created_cell_map.isxd' + full_actual_tiff_cell_map = output_dir + '/created_cell_map.tiff' + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + expected_isxd_cell_map = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI-MAP.isxd' + expected_tiff_cell_map = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI-MAP.tif' + + isx.create_cell_map(cell_set_file_path, cell_thresh=0.5, + output_isxd_cell_map_name=actual_isxd_cell_map, + output_tiff_cell_map_name=actual_tiff_cell_map, + output_dir=output_dir) + + assert_isxd_images_are_close_by_path_nan_zero(expected_isxd_cell_map, full_actual_isxd_cell_map) + assert_tiff_files_equal_by_path(full_actual_tiff_cell_map, expected_tiff_cell_map) + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + + @pytest.mark.skip(reason="no suitably small data") + def testCreateCellMapNVistaCellSetOriTrf(self): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + cell_set_file_path = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI-TRF.isxd' + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map = 'created_cell_map.isxd' + actual_tiff_cell_map = 'created_cell_map.tiff' + full_actual_isxd_cell_map = output_dir + '/created_cell_map.isxd' + full_actual_tiff_cell_map = output_dir + '/created_cell_map.tiff' + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + expected_isxd_cell_map = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI-TRF-MAP.isxd' + expected_tiff_cell_map = create_cell_map_path + '/2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI-TRF-MAP.tif' + + isx.create_cell_map(cell_set_file_path, cell_thresh=0.5, + output_isxd_cell_map=actual_isxd_cell_map, + output_tiff_cell_map=actual_tiff_cell_map, + output_dir=output_dir) + + assert_isxd_images_are_close_by_path_nan_zero(expected_isxd_cell_map, full_actual_isxd_cell_map) + assert_tiff_files_equal_by_path(full_actual_tiff_cell_map, expected_tiff_cell_map) + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + + @pytest.mark.skip(reason="no suitably small data") + def testCreateCellMapOlympusCellSet(self): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + cell_set_file_path = create_cell_map_path + '/movie_920_green_resonant-BP-MC-CNMFE.isxd' + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map = 'created_cell_map.isxd' + actual_tiff_cell_map = 'created_cell_map.tiff' + full_actual_isxd_cell_map = output_dir + '/created_cell_map.isxd' + full_actual_tiff_cell_map = output_dir + '/created_cell_map.tiff' + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + expected_isxd_cell_map = create_cell_map_path + '/movie_920_green_resonant-BP-MC-CNMFE-MAP.isxd' + expected_tiff_cell_map = create_cell_map_path + '/movie_920_green_resonant-BP-MC-CNMFE-MAP.tif' + + isx.create_cell_map(cell_set_file_path, cell_thresh=0.5, output_isxd_cell_map_name=actual_isxd_cell_map, + output_tiff_cell_map_name=actual_tiff_cell_map, output_dir=output_dir) + + assert_isxd_images_are_close_by_path_nan_zero(expected_isxd_cell_map, full_actual_isxd_cell_map) + assert_tiff_files_equal_by_path(full_actual_tiff_cell_map, expected_tiff_cell_map) + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + + @pytest.mark.skip(reason="no suitably small data") + def testCreateCellMapOlympusCellSetALG(self): + create_cell_map_path = test_data_path + '/unit_test/create_cell_map' + cell_set_file_path = create_cell_map_path + '/movie_920_green_resonant-BP-MC-CNMFE-ALG.isxd' + output_dir = test_data_path + '/unit_test/output' + + actual_isxd_cell_map = 'created_cell_map.isxd' + actual_tiff_cell_map = 'created_cell_map.tiff' + full_actual_isxd_cell_map = output_dir + '/created_cell_map.isxd' + full_actual_tiff_cell_map = output_dir + '/created_cell_map.tiff' + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + expected_isxd_cell_map = create_cell_map_path + '/movie_920_green_resonant-BP-MC-CNMFE-ALG-MAP.isxd' + expected_tiff_cell_map = create_cell_map_path + '/movie_920_green_resonant-BP-MC-CNMFE-ALG-MAP.tif' + + isx.create_cell_map(cell_set_file_path, cell_thresh=0.5, + output_isxd_cell_map_name=actual_isxd_cell_map, + output_tiff_cell_map_name=actual_tiff_cell_map, + output_dir=output_dir) + + assert_isxd_images_are_close_by_path_nan_zero(expected_isxd_cell_map, full_actual_isxd_cell_map) + assert_tiff_files_equal_by_path(full_actual_tiff_cell_map, expected_tiff_cell_map) + + delete_files_silently([full_actual_isxd_cell_map, full_actual_tiff_cell_map]) + + def testOverlayCellMapOnImage(self): + create_cell_map_path = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + cell_map_file = os.path.join(create_cell_map_path, 'movie_920_green_resonant-BP-MC-CNMFE-ALG-MAP.isxd') + input_image_file = os.path.join(create_cell_map_path, 'zstack_920_green_Galvano-green_26to51.isxd') + + overlayed_image_name = 'overlayed_image.tiff' + actual_overlayed_image_file = os.path.join(output_dir, overlayed_image_name) + + expected_overlayed_image_file = os.path.join(create_cell_map_path, overlayed_image_name) + + delete_files_silently(actual_overlayed_image_file) + + isx.overlay_cell_map_on_image(cell_map_file, input_image_file, actual_overlayed_image_file) + + assert is_file(actual_overlayed_image_file) + assert_isxd_images_are_close_by_path_nan_zero(expected_overlayed_image_file.replace('\\', '/'), actual_overlayed_image_file.replace('\\', '/')) + + delete_files_silently(actual_overlayed_image_file) + + + @pytest.mark.parametrize('input_cellset_map', ( + 'created_cell_map_undecided.isxd',)) + def testOverlayCellMapOnImage_not_matching_files(self, input_cellset_map): + create_cell_map_path = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + cell_map_file = os.path.join(create_cell_map_path, input_cellset_map) + input_image_file = os.path.join(create_cell_map_path, 'image_output_accepted-cells-map.tiff') + + overlayed_image_name = f'o_{input_cellset_map}.tiff' + actual_overlayed_image_file = os.path.join(output_dir, overlayed_image_name) + with pytest.raises(Exception) as error: + isx.overlay_cell_map_on_image(cell_map_file, input_image_file, overlayed_image_name) + assert f'operands could not be broadcast together' in str(error.value) + + assert not is_file(actual_overlayed_image_file) + + + @pytest.mark.skip(reason="expected output file needs to be updated") + def test_overlay_cellmaps(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_first_cellmap_file = os.path.join(base_dir, '2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI-TRF-MAP.tif') + input_second_cellmap_file = os.path.join(base_dir, 'movie_920_green_resonant-BP-MC-CNMFE-ALG-MAP.tif') + + output_tiff_name = 'overlayed_cellmaps.tiff' + actual_output_tiff_file = os.path.join(output_dir, output_tiff_name) + expected_output_tiff_file = os.path.join(base_dir, output_tiff_name) + + delete_files_silently(actual_output_tiff_file) + + isx.overlay_cellmaps(input_first_cellmap_file, input_second_cellmap_file, actual_output_tiff_file) + + assert is_file(actual_output_tiff_file) + + assert_tiff_files_equal_by_path(actual_output_tiff_file, expected_output_tiff_file) + + delete_files_silently(actual_output_tiff_file) + + + def test_overlay_cellmaps_wrong_size(self): + base_dir = os.path.join(test_data_path, 'unit_test', 'create_cell_map') + output_dir = os.path.join(test_data_path, 'unit_test', 'output') + + input_first_cellmap_file = os.path.join(base_dir, '2019-11-30-17-47-27_video_trig_0-PP-BP-MC-CNMFE-ORI-TRF-MAP.tif') + input_second_cellmap_file = os.path.join(base_dir, 'image_rot_flip_metadata.tiff') + + output_tiff_name = 'overlayed_cellmaps_wrong_size.tiff' + actual_output_tiff_file = os.path.join(output_dir, output_tiff_name) + + delete_files_silently(actual_output_tiff_file) + + with pytest.raises(Exception) as error: + isx.overlay_cellmaps(input_first_cellmap_file, input_second_cellmap_file, actual_output_tiff_file) + assert 'The two images do not have the same size: (1024, 1024) vs (912, 786)' in str(error.value) + + assert not is_file(actual_output_tiff_file) + + delete_files_silently(actual_output_tiff_file) + + def test_ReadVesselSet(self): + input_file = test_data_path + '/unit_test/bloodflow/bloodflow_movie_10s-VD_vesselsets.isxd' + vessel_set = isx.VesselSet.read(input_file) + + assert vessel_set.get_vessel_name(0) == 'V0' + assert vessel_set.get_vessel_name(1) == 'V1' + assert vessel_set.get_vessel_name(2) == 'V2' + + assert vessel_set.num_vessels == 3 + + exp_period = isx.Duration._from_num_den(50, 1000) + exp_start = isx.Time._from_secs_since_epoch(isx.Duration.from_secs(0)) + exp_spacing = isx.Spacing(num_pixels=(250,250)) + exp_spacing._impl.pixel_width = isx._internal.IsxRatio(6, 1) + exp_spacing._impl.pixel_height = isx._internal.IsxRatio(6, 1) + + assert vessel_set.timing == isx.Timing(num_samples=200, period=exp_period, start=exp_start) + + assert vessel_set.spacing == exp_spacing + + def test_ReadVesselSetName(self): + input_file = test_data_path + '/unit_test/bloodflow/bloodflow_movie_10s-VD_vesselsets.isxd' + vessel_set = isx.VesselSet.read(input_file) + + name = vessel_set.get_vessel_name(1) + + assert name == 'V1' + + def test_ReadVesselSetStatus(self): + input_file = test_data_path + '/unit_test/bloodflow/bloodflow_movie_10s-VD_vesselsets.isxd' + vessel_set = isx.VesselSet.read(input_file) + + assert vessel_set.get_vessel_status(0) == 'accepted' + assert vessel_set.get_vessel_status(1) == 'undecided' + assert vessel_set.get_vessel_status(2) == 'rejected' + + def test_ReadVesselSetLine(self): + input_file = test_data_path + '/unit_test/bloodflow/bloodflow_movie_10s-VD_vesselsets.isxd' + vessel_set = isx.VesselSet.read(input_file) + + np.testing.assert_array_equal(vessel_set.get_vessel_line_data(0), np.array([[90, 71], [108, 88]])) + np.testing.assert_array_equal(vessel_set.get_vessel_line_data(1), np.array([[148, 163], [167, 179]])) + np.testing.assert_array_equal(vessel_set.get_vessel_line_data(2), np.array([[236, 146], [213, 163]])) + + def test_ReadVesselSetRoi(self): + input_file = test_data_path + '/unit_test/bloodflow/rbcv_movie_1-RBCV_microns.isxd' + vessel_set = isx.VesselSet.read(input_file) + np.testing.assert_array_equal(vessel_set.get_vessel_line_data(0), np.array([[124, 25], [153, 36], [90, 202], [61, 191]])) + np.testing.assert_array_equal(vessel_set.get_vessel_line_data(1), np.array([[24, 42], [43, 34], [85, 148], [65, 156]])) + + def test_ReadVesselSetType(self): + input_rbcv_file = test_data_path + '/unit_test/bloodflow/rbcv_movie_1-RBCV_microns.isxd' + input_vessel_diameter_file = test_data_path + '/unit_test/bloodflow/bloodflow_movie_10s-VD_vesselsets.isxd' + vessel_set = isx.VesselSet.read(input_rbcv_file) + assert vessel_set.get_vessel_set_type() == isx.VesselSet.VesselSetType.RBC_VELOCITY + vessel_set = isx.VesselSet.read(input_vessel_diameter_file) + assert vessel_set.get_vessel_set_type() == isx.VesselSet.VesselSetType.VESSEL_DIAMETER + + def test_ReadVesselSetDirection(self): + input_file = test_data_path + '/unit_test/bloodflow/rbcv_movie_1-RBCV_microns.isxd' + vessel_set = isx.VesselSet.read(input_file) + trace = vessel_set.get_vessel_direction_trace_data(0) + exp_trace = [248.08775, 247.69649, 247.4231 , 247.73364, 246.72937, np.nan] + np.testing.assert_allclose(trace, exp_trace, rtol=1e-05) + + def test_ReadVesselSetCorrelations(self): + input_file = test_data_path + '/unit_test/bloodflow/rbcv_movie_1-RBCV_microns.isxd' + vessel_set = isx.VesselSet.read(input_file) + vessel_id = 0 + frame_idx = 0 + correlations = vessel_set.get_vessel_correlations_data(vessel_id, frame_idx) + + exp_shape = (3, 178, 93) + assert correlations.shape == exp_shape + + def test_ReadWriteVesselSet(self): + vs_out_file = test_data_path + '/unit_test/output/test_readwrite_vesselset.isxd' + delete_files_silently([vs_out_file]) + + # create sample data that will be used to make the vessel set + vessel_props = write_sample_vessel_diameter_set(vs_out_file) + + # read the created vessel set file and confirm the correct values have been written + vs_in = isx.VesselSet.read(vs_out_file) + assert vs_in.num_vessels == vessel_props['num_vessels'] + assert vs_in.spacing == vessel_props['spacing'] + assert vs_in.timing == vessel_props['timing'] + valid_samples_mask = vs_in.timing.get_valid_samples_mask() + for k in range(vs_in.num_vessels): + assert vs_in.get_vessel_name(k) == vessel_props['names'][k] + assert vs_in.get_vessel_status(k) == 'undecided' + + # Note: Key difference, vessel sets only have one image, that being the first image + np.testing.assert_array_equal(vs_in.get_vessel_image_data(k), vessel_props['images'][0]) + np.testing.assert_array_equal(vs_in.get_vessel_line_data(k), vessel_props['lines'][k]) + np.testing.assert_array_equal(vs_in.get_vessel_trace_data(k), vessel_props['traces'][k]) + np.testing.assert_array_equal(vs_in.get_vessel_center_trace_data(k), vessel_props['center_traces'][k]) + + del vs_in + delete_files_silently([vs_out_file]) + + def test_ReadWriteVesselSetRbcVelocity(self): + vs_out_file = test_data_path + '/unit_test/output/test_readwrite_vesselset.isxd' + delete_files_silently([vs_out_file]) + + # create sample data that will be used to make the vessel set + vessel_props = write_sample_rbc_velocity_set(vs_out_file) + + # read the created vessel set file and confirm the correct values have been written + vs_in = isx.VesselSet.read(vs_out_file) + assert vs_in.num_vessels == vessel_props['num_vessels'] + assert vs_in.spacing == vessel_props['spacing'] + assert vs_in.timing == vessel_props['timing'] + valid_samples_mask = vs_in.timing.get_valid_samples_mask() + for k in range(vs_in.num_vessels): + # Note: Key difference, vessel sets only have one image, that being the first image + assert vs_in.get_vessel_name(k) == vessel_props['names'][k] + assert vs_in.get_vessel_status(k) == 'undecided' + + np.testing.assert_array_equal(vs_in.get_vessel_image_data(k), vessel_props['images'][0]) + np.testing.assert_array_equal(vs_in.get_vessel_line_data(k), vessel_props['lines'][k]) + np.testing.assert_array_equal(vs_in.get_vessel_trace_data(k), vessel_props['traces'][k]) + np.testing.assert_array_equal(vs_in.get_vessel_direction_trace_data(k), vessel_props['direction_traces'][k]) + + for t in range(vessel_props['timing'].num_samples): + np.testing.assert_array_equal(vs_in.get_vessel_correlations_data(k, t), vessel_props['correlations_traces'][k][t, :, :, :]) + + del vs_in + delete_files_silently([vs_out_file]) + + def test_VesselSetGetAcquisitionInfo(self): + vessel_set_file_path = test_data_path + '/unit_test/bloodflow/blood_flow_movie_1-VD_window2s_increment1s.isxd' + vessel_set = isx.VesselSet.read(vessel_set_file_path) + info = vessel_set.get_acquisition_info() + + # Assert against known values + assert info['Trace Units'] == 'microns' + assert info['Vessel Set Type'] == 'vessel diameter' + assert info['Time Increment (s)'] == 1 + assert info['Time Window (s)'] == 2 + + def test_AlignStartTimesInvalidRef(self): + test_dir = test_data_path + "/unit_test/" + ref_file_path = test_dir + "/imu/2020-02-13-18-43-21_video.imu" + align_file_path = test_dir + "/nVision/20220412-200447-camera-100.isxb" + + with pytest.raises(Exception) as error: + isx.align_start_times( + input_ref_file=ref_file_path, + input_align_files=align_file_path) + + assert 'Unsupported data type - only gpio files, isxd movies, and isxb movies are supported as a timing reference.' in str(error.value) + + def test_AlignStartTimesInvalidAlign(self): + test_dir = test_data_path + "/unit_test/" + ref_file_path = test_dir + "/gpio/2020-05-20-10-33-22_video.gpio" + align_file_path = test_dir + "/cell_metrics/cell_metrics_movie-PCA-ICA.isxd" + + with pytest.raises(Exception) as error: + isx.align_start_times( + input_ref_file=ref_file_path, + input_align_files=align_file_path) + + assert 'Unsupported data type - only isxd movies and isxb movies are supported as input files to align to a timing reference.' in str(error.value) + + @pytest.mark.skipif(not isx._is_with_algos, reason="Cannot run algo module with minimal api") + def test_AlignStartTimesInvalidNoFrameTimestamps(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + ref_file_path = test_dir + "/2022-06-08-23-53-41_video.gpio" + align_file_path = test_dir + "/2022-06-08-23-53-41_video.isxd" + processed_file_path = test_dir + "/2022-06-08-23-53-41_video-PP.isxd" + + delete_files_silently([processed_file_path]) + + # temporally downsample by 2x to strip movie of timestamps + isx.preprocess(align_file_path, processed_file_path, temporal_downsample_factor=2) + + with pytest.raises(Exception) as error: + isx.align_start_times( + input_ref_file=ref_file_path, + input_align_files=processed_file_path) + + delete_files_silently([processed_file_path]) + + assert 'Cannot get first tsc from movie with no frame timestamps.' in str(error.value) + + def test_AlignStartTimesInvalidNoUUID(self): + test_dir = test_data_path + "/unit_test/" + ref_file_path = test_dir + "/gpio/2020-05-20-10-33-22_video.gpio" + align_file_path = test_dir + "/cnmfe-cpp/movie_128x128x1000.isxd" + + with pytest.raises(Exception) as error: + isx.align_start_times( + input_ref_file=ref_file_path, + input_align_files=align_file_path) + + assert 'Cannot determine if files are paired and synchronized - no recording UUID in timing reference file metadata.' in str(error.value) + + def test_AlignStartTimesInvalidPairedUnsynced(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-unsynchronized" + ref_file_path = test_dir + "/2022-06-08-23-57-41_video.gpio" + align_file_path = test_dir + "/2022-06-08-23-57-43-camera-1.isxb" + + with pytest.raises(Exception) as error: + isx.align_start_times( + input_ref_file=ref_file_path, + input_align_files=align_file_path) + + assert 'Files are not paired and synchronized - recording UUID of align file (AC-00111111-l4R4GRt28o-1654732663355) does not match recording UUID of timing reference file (AC-00111111-l4R4GRt28o-1654732661796).' in str(error.value) + + def test_AlignStartTimesInvalidStandalone(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID" + ref_file_path = test_dir + "/standalone-miniscope/2022-06-08-23-58-43_video.gpio" + align_file_path = test_dir + "/standalone-behavior/2022-06-08-23-58-51-camera-1.isxb" + + with pytest.raises(Exception) as error: + isx.align_start_times( + input_ref_file=ref_file_path, + input_align_files=align_file_path) + + assert 'Files are not paired and synchronized - recording UUID of align file (GA-21807233-0000000000-1654732731777) does not match recording UUID of timing reference file (AC-00111111-0000000000-1654732723918).' in str(error.value) + + def test_AlignStartTimesGpioRef(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + gpio_file_path = test_dir + "/2022-06-08-23-53-41_video.gpio" + isxd_file_path = test_dir + "/2022-06-08-23-53-41_video.isxd" + isxb_file_path = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + + # Copy test isxb and isxd files to modify + isxd_file_path_copy = test_dir + "/2022-06-08-23-53-41_video-mod.isxd" + isxb_file_path_copy = test_dir + "/2022-06-08-23-53-41_video-camera-1-mod.isxb" + + delete_files_silently([isxd_file_path_copy, isxb_file_path_copy]) + + shutil.copyfile(isxd_file_path, isxd_file_path_copy) + shutil.copyfile(isxb_file_path, isxb_file_path_copy) + + isx.align_start_times( + input_ref_file=gpio_file_path, + input_align_files=[isxd_file_path_copy, isxb_file_path_copy]) + + # Verify the modified isxd file + original_isxd_movie = isx.Movie.read(isxd_file_path) + modified_isxd_movie = isx.Movie.read(isxd_file_path_copy) + + # The calculated start time of an isxd file based on a gpio reference generally equals the start time + # stored in the original isxd file because the isxd and gpio file originate from the same hardware system. + # However this is not necessarily guaranteed to be the case which is why the isxd start time is recomputed just in case. + assert modified_isxd_movie.timing == original_isxd_movie.timing + + # Ensure the frame data and json metadata in the file is not corrupted due to the operation. + for i in range(original_isxd_movie.timing.num_samples): + original_movie_frame = original_isxd_movie.get_frame_data(i) + modified_movie_frame = modified_isxd_movie.get_frame_data(i) + np.testing.assert_array_equal(modified_movie_frame, original_movie_frame) + assert modified_isxd_movie.get_acquisition_info() == original_isxd_movie.get_acquisition_info() + + # Verify the modified isxb file + original_isxb_movie = isx.Movie.read(isxb_file_path) + modified_isxb_movie = isx.Movie.read(isxb_file_path_copy) + + # The recomputed start time is 541 ms greater than the start time in the original isxb file + # Generally there is a greater delay in the start of isxb recording because it's on a separate hardware system from the gpio and isxd files + original_isxb_ti = original_isxb_movie.timing + exp_isxb_timing = isx.Timing( + start=isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(original_isxb_ti.start._impl.secs_since_epoch.num + 541), original_isxb_ti.start._impl.utc_offset), + period=original_isxb_ti.period, + num_samples=original_isxb_ti.num_samples, + dropped=original_isxb_ti.dropped, + cropped=original_isxb_ti.cropped, + blank=original_isxb_ti.blank + ) + assert modified_isxb_movie.timing == exp_isxb_timing + + # Ensure the frame data and json metadata in the file is not corrupted due to the operation. + for i in range(original_isxb_movie.timing.num_samples): + original_movie_frame = original_isxb_movie.get_frame_data(i) + modified_movie_frame = modified_isxb_movie.get_frame_data(i) + np.testing.assert_array_equal(modified_movie_frame, original_movie_frame) + assert modified_isxb_movie.get_acquisition_info() == original_isxb_movie.get_acquisition_info() + + del modified_isxd_movie + del modified_isxb_movie + delete_files_silently([isxd_file_path_copy, isxb_file_path_copy]) + + def test_AlignStartTimesIsxdRef(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxd_file_path = test_dir + "/2022-06-08-23-53-41_video.isxd" + isxb_file_path = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + + # Copy test isxb and isxd files to modify + isxb_file_path_copy = test_dir + "/2022-06-08-23-53-41_video-camera-1-mod.isxb" + + delete_files_silently([isxb_file_path_copy]) + + shutil.copyfile(isxb_file_path, isxb_file_path_copy) + + isx.align_start_times( + input_ref_file=isxd_file_path, + input_align_files=[isxb_file_path_copy]) + + # Verify the modified isxb file + original_isxb_movie = isx.Movie.read(isxb_file_path) + modified_isxb_movie = isx.Movie.read(isxb_file_path_copy) + + # The recomputed start time is 541 ms greater than the start time in the original isxb file + original_isxb_ti = original_isxb_movie.timing + exp_isxb_timing = isx.Timing( + start=isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(original_isxb_ti.start._impl.secs_since_epoch.num + 541), original_isxb_ti.start._impl.utc_offset), + period=original_isxb_ti.period, + num_samples=original_isxb_ti.num_samples, + dropped=original_isxb_ti.dropped, + cropped=original_isxb_ti.cropped, + blank=original_isxb_ti.blank + ) + + del modified_isxb_movie + delete_files_silently([isxb_file_path_copy]) + + def test_AlignStartTimesSeries(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/scheduled" + ref_file_path = test_dir + "/2022-06-09-12-33-38_video_sched_0.gpio" + align_file_paths = [ + test_dir + "/2022-06-09-12-33-38_video_sched_0-camera-1.isxb", + test_dir + "/2022-06-09-12-33-38_video_sched_1-camera-1.isxb", + test_dir + "/2022-06-09-12-33-38_video_sched_2-camera-1.isxb" + ] + + # Copy test isxb and isxd files to modify + align_copy_file_paths = [ + test_dir + "/2022-06-09-12-33-38_video_sched_0-camera-1-mod.isxb", + test_dir + "/2022-06-09-12-33-38_video_sched_1-camera-1-mod.isxb", + test_dir + "/2022-06-09-12-33-38_video_sched_2-camera-1-mod.isxb" + ] + + delete_files_silently(align_copy_file_paths) + + for i in range(3): + shutil.copyfile(align_file_paths[i], align_copy_file_paths[i]) + + isx.align_start_times( + input_ref_file=ref_file_path, + input_align_files=align_copy_file_paths) + + exp_ts_diffs = [524, 10428, 20433] + + for i in range(3): + # Verify the modified isxb file + original_isxb_movie = isx.Movie.read(align_file_paths[i]) + modified_isxb_movie = isx.Movie.read(align_copy_file_paths[i]) + + original_isxb_ti = original_isxb_movie.timing + exp_isxb_timing = isx.Timing( + start=isx.Time._from_secs_since_epoch(isx.Duration.from_msecs(original_isxb_ti.start._impl.secs_since_epoch.num + exp_ts_diffs[i]), original_isxb_ti.start._impl.utc_offset), + period=original_isxb_ti.period, + num_samples=original_isxb_ti.num_samples, + dropped=original_isxb_ti.dropped, + cropped=original_isxb_ti.cropped, + blank=original_isxb_ti.blank + ) + + del modified_isxb_movie + + delete_files_silently(align_copy_file_paths) + + def test_ExportAlignedTimestamps(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + gpio_file_path = test_dir + "/2022-06-08-23-53-41_video.gpio" + isxd_file_path = test_dir + "/2022-06-08-23-53-41_video.isxd" + isxb_file_path = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + output_file_path = test_dir + "/output.csv" + + delete_files_silently([output_file_path]) + + isx.export_aligned_timestamps( + input_ref_file=gpio_file_path, + input_align_files=[isxb_file_path, isxd_file_path], + input_ref_name="gpio", + input_align_names=["isxb", "isxd"], + output_csv_file=output_file_path, + time_ref='start') + + df = pd.read_csv(output_file_path) + assert (df.iloc[0] == pd.DataFrame({ + 'gpio Timestamp (s)' : [0.000000], + 'gpio Channel' : ['Digital GPI 0'], + 'isxb Timestamp (s)' : [0.282199], + 'isxd Timestamp (s)' : [0.052248]} + )).all(axis=None) + + df.fillna(-1, inplace=True) + assert (df.iloc[len(df.index) - 1] == pd.DataFrame({ + 'gpio Timestamp (s)' : [1.951800], + 'gpio Channel' : ['BNC Trigger Input'], + 'isxb Timestamp (s)' : [-1], + 'isxd Timestamp (s)' : [-1]} + )).all(axis=None) + + delete_files_silently([output_file_path]) + + def test_ExportAlignedTimestamps_Unix(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxd_file_path = test_dir + "/2022-06-08-23-53-41_video.isxd" + isxb_file_path = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + output_file_path = test_dir + "/output.csv" + + delete_files_silently([output_file_path]) + + isx.export_aligned_timestamps( + input_ref_file=isxd_file_path, + input_align_files=[isxb_file_path], + input_ref_name="isxd", + input_align_names=["isxb"], + output_csv_file=output_file_path, + time_ref='unix') + + df = pd.read_csv(output_file_path, dtype=str) + assert (df.iloc[0] == pd.DataFrame({ + 'isxd Timestamp (s)' : ['1654732421.888000'], + 'isxb Timestamp (s)' : ['1654732422.117951']} + )).all(axis=None) + + df.fillna('None', inplace=True) + assert (df.iloc[len(df.index) - 1] == pd.DataFrame({ + 'isxd Timestamp (s)' : ['None'], + 'isxb Timestamp (s)' : ['1654732423.853928']} + )).all(axis=None) + + delete_files_silently([output_file_path]) + + def test_ExportAlignedTimestamps_Tsc(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxd_file_path = test_dir + "/2022-06-08-23-53-41_video.isxd" + isxb_file_path = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + output_file_path = test_dir + "/output.csv" + + delete_files_silently([output_file_path]) + + isx.export_aligned_timestamps( + input_ref_file=isxd_file_path, + input_align_files=[isxb_file_path], + input_ref_name="isxd", + input_align_names=["isxb"], + output_csv_file=output_file_path, + time_ref='tsc') + + df = pd.read_csv(output_file_path) + assert (df.iloc[0] == pd.DataFrame({ + 'isxd Timestamp (s)' : [459472532939], + 'isxb Timestamp (s)' : [459472762890]} + )).all(axis=None) + + df.fillna(-1, inplace=True) + assert (df.iloc[len(df.index) - 1] == pd.DataFrame({ + 'isxd Timestamp (s)' : [-1], + 'isxb Timestamp (s)' : [459474498867]} + )).all(axis=None) + + delete_files_silently([output_file_path]) + + def test_ExportEthovisionDataWithIsxbTimestamps(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file]) + + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + time_ref='tsc' + ) + + df = pd.read_csv(output_csv_file) + df.fillna('None', inplace=True) + + assert (df.iloc[0] == + pd.DataFrame({ + 'isxb Frame Timestamp (us)' : [459472762890], + 'Trial time' : [0.0], + 'Recording time' : [0.0], + 'X center' : [40.7033], + 'Y center' : [71.2558], + 'Area' : [3010.72], + 'Areachange' : [0.0], + 'Elongation' : [0.553722], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'Activity' : ['None'], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [0.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + assert (df.iloc[-1] == + pd.DataFrame({ + 'isxb Frame Timestamp (us)' : [459474498867], + 'Trial time' : [1.733], + 'Recording time' : [1.733], + 'X center' : [42.9174], + 'Y center' : [71.0059], + 'Area' : [3369.08], + 'Areachange' : [551.344], + 'Elongation' : [0.487716], + 'Distance moved' : [2.41289], + 'Velocity' : [72.3875], + 'Activity' : [0.250916], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [1.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + delete_files_silently([output_csv_file]) + + def test_ExportEthovisionDataWithIsxbTimestamps_FrameOffByOne(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + original_ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial-mod.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file, ethovision_file]) + + ethovision_data = pd.read_excel(original_ethovision_file) + ethovision_data.drop(ethovision_data.index[-1], inplace=True) + ethovision_data.to_excel(ethovision_file, index=False) + + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + time_ref='tsc' + ) + + df = pd.read_csv(output_csv_file) + df.fillna('None', inplace=True) + + # validate first row of csv + assert (df.iloc[0] == + pd.DataFrame({ + 'isxb Frame Timestamp (us)' : [459472798865], + 'Trial time' : [0.0], + 'Recording time' : [0.0], + 'X center' : [40.7033], + 'Y center' : [71.2558], + 'Area' : [3010.72], + 'Areachange' : [0.0], + 'Elongation' : [0.553722], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'Activity' : ['None'], + 'Activity state(Highly active)' : [0], + 'Activity state(Inactive)' : [0], + 'Result 1' : [1] + }) + ).all(axis=None) + + # validate last row of csv + assert (df.iloc[-1] == + pd.DataFrame({ + 'isxb Frame Timestamp (us)' : [459474498867], + 'Trial time' : [1.7], + 'Recording time' : [1.7], + 'X center' : [40.5835], + 'Y center' : [70.3934], + 'Area' : [3172.29], + 'Areachange' : [594.905], + 'Elongation' : [0.565859], + 'Distance moved' : [1.82628], + 'Velocity' : [54.7891], + 'Activity' : [0.258729], + 'Activity state(Highly active)' : [0], + 'Activity state(Inactive)' : [1], + 'Result 1' : [1] + }) + ).all(axis=None) + + delete_files_silently([output_csv_file, ethovision_file]) + + def test_ExportEthovisionDataWithIsxbTimestamps_FrameOffByMoreThanOne(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + original_ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial-mod.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file, ethovision_file]) + + ethovision_data = pd.read_excel(original_ethovision_file) + ethovision_data.drop(ethovision_data.index[-1], inplace=True) + ethovision_data.drop(ethovision_data.index[-1], inplace=True) + ethovision_data.to_excel(ethovision_file, index=False) + + with pytest.raises(ValueError) as error: + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + time_ref='tsc' + ) + + assert str(error.value) == "Length of timestamps array (53) is not the same as (or within one) of the ethovision table (51)" + + delete_files_silently([output_csv_file, ethovision_file]) + + def test_ExportEthovisionDataWithIsxbTimestampsCsv(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.csv" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file]) + + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + time_ref='tsc' + ) + + df = pd.read_csv(output_csv_file) + df.fillna('None', inplace=True) + + assert (df.iloc[0] == + pd.DataFrame({ + 'isxb Frame Timestamp (us)' : [459472762890], + 'Trial time' : [0.0], + 'Recording time' : [0.0], + 'X center' : ['None'], + 'Y center' : ['None'], + 'Area' : ['None'], + 'Areachange' : ['None'], + 'Elongation' : ['None'], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'LED start' : [0.0], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [0.0], + 'Activity' : ['None'], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + assert (df.iloc[-1] == + pd.DataFrame({ + 'isxb Frame Timestamp (us)' : [459474498867], + 'Trial time' : [2.08], + 'Recording time' : [2.08], + 'X center' : ['None'], + 'Y center' : ['None'], + 'Area' : ['None'], + 'Areachange' : ['None'], + 'Elongation' : ['None'], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'LED start' : [0.0], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [1.0], + 'Activity' : [0.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + delete_files_silently([output_csv_file]) + + def test_ExportEthovisionDataWithIsxbTimestamps_TimeRefStart(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + isxd_file = test_dir + "/2022-06-08-23-53-41_video.isxd" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file]) + + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + input_ref_file=isxd_file, + time_ref='start' + ) + + df = pd.read_csv(output_csv_file) + df.fillna('None', inplace=True) + + assert (df.iloc[0] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [0.229951], + 'Trial time' : [0.0], + 'Recording time' : [0.0], + 'X center' : [40.7033], + 'Y center' : [71.2558], + 'Area' : [3010.72], + 'Areachange' : [0.0], + 'Elongation' : [0.553722], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'Activity' : ['None'], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [0.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + assert (df.iloc[-1] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [1.965928], + 'Trial time' : [1.733], + 'Recording time' : [1.733], + 'X center' : [42.9174], + 'Y center' : [71.0059], + 'Area' : [3369.08], + 'Areachange' : [551.344], + 'Elongation' : [0.487716], + 'Distance moved' : [2.41289], + 'Velocity' : [72.3875], + 'Activity' : [0.250916], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [1.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + delete_files_silently([output_csv_file]) + + def test_ExportEthovisionDataWithIsxbTimestamps_TimeRefUnix(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + original_isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1-mod.isxb" + isxd_file = test_dir + "/2022-06-08-23-53-41_video.isxd" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file, isxb_file]) + shutil.copyfile(original_isxb_file, isxb_file) + + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + input_ref_file=isxd_file, + time_ref='unix' + ) + + df = pd.read_csv(output_csv_file) + df.fillna('None', inplace=True) + assert (df.iloc[0] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [1654732422.118], + 'Trial time' : [0.0], + 'Recording time' : [0.0], + 'X center' : [40.7033], + 'Y center' : [71.2558], + 'Area' : [3010.72], + 'Areachange' : [0.0], + 'Elongation' : [0.553722], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'Activity' : ['None'], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [0.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + assert (df.iloc[-1] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [1654732423.853977], + 'Trial time' : [1.733], + 'Recording time' : [1.733], + 'X center' : [42.9174], + 'Y center' : [71.0059], + 'Area' : [3369.08], + 'Areachange' : [551.344], + 'Elongation' : [0.487716], + 'Distance moved' : [2.41289], + 'Velocity' : [72.3875], + 'Activity' : [0.250916], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [1.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + delete_files_silently([output_csv_file, isxb_file]) + + def test_ExportEthovisionDataWithIsxbTimestamps_TimeRefStart_RefIsxb(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file]) + + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + input_ref_file=isxb_file, + time_ref='start' + ) + + df = pd.read_csv(output_csv_file) + df.fillna('None', inplace=True) + + assert (df.iloc[0] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [0.0], + 'Trial time' : [0.0], + 'Recording time' : [0.0], + 'X center' : [40.7033], + 'Y center' : [71.2558], + 'Area' : [3010.72], + 'Areachange' : [0.0], + 'Elongation' : [0.553722], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'Activity' : ['None'], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [0.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + assert (df.iloc[-1] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [1.735977], + 'Trial time' : [1.733], + 'Recording time' : [1.733], + 'X center' : [42.9174], + 'Y center' : [71.0059], + 'Area' : [3369.08], + 'Areachange' : [551.344], + 'Elongation' : [0.487716], + 'Distance moved' : [2.41289], + 'Velocity' : [72.3875], + 'Activity' : [0.250916], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [1.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + delete_files_silently([output_csv_file]) + + def test_ExportEthovisionDataWithIsxbTimestamps_TimeRefUnix_RefIsxb(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + original_isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1-mod.isxb" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file, isxb_file]) + shutil.copyfile(original_isxb_file, isxb_file) + + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + input_ref_file=isxb_file, + time_ref='unix' + ) + + df = pd.read_csv(output_csv_file) + df.fillna('None', inplace=True) + assert (df.iloc[0] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [1654732421.577], + 'Trial time' : [0.0], + 'Recording time' : [0.0], + 'X center' : [40.7033], + 'Y center' : [71.2558], + 'Area' : [3010.72], + 'Areachange' : [0.0], + 'Elongation' : [0.553722], + 'Distance moved' : ['None'], + 'Velocity' : ['None'], + 'Activity' : ['None'], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [0.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + assert (df.iloc[-1] == + pd.DataFrame({ + 'isxb Frame Timestamp (s)' : [1654732423.312977], + 'Trial time' : [1.733], + 'Recording time' : [1.733], + 'X center' : [42.9174], + 'Y center' : [71.0059], + 'Area' : [3369.08], + 'Areachange' : [551.344], + 'Elongation' : [0.487716], + 'Distance moved' : [2.41289], + 'Velocity' : [72.3875], + 'Activity' : [0.250916], + 'Activity state(Highly active)' : [0.0], + 'Activity state(Inactive)' : [1.0], + 'Result 1' : [1.0] + }) + ).all(axis=None) + + delete_files_silently([output_csv_file, isxb_file]) + + def test_ExportEthovisionDataWithIsxbTimestamps_TimeRefStart_NoRef(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file]) + + with pytest.raises(ValueError) as error: + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + time_ref='start' + ) + assert str(error.value) == "An input reference file is required for time_ref = 'start' or time_ref = 'unix'." + + def test_ExportEthovisionDataWithIsxbTimestamps_TimeRefUnix_NoRef(self): + test_dir = test_data_path + "/unit_test/nVision/recordingUUID/paired-synchronized/manual" + isxb_file = test_dir + "/2022-06-08-23-53-41_video-camera-1.isxb" + ethovision_file = test_data_path + "/unit_test/nVision/ethovision/ethovision_trial.xlsx" + output_csv_file = test_dir + "/output.csv" + + delete_files_silently([output_csv_file]) + + with pytest.raises(ValueError) as error: + isx.export_ethovision_data_with_isxb_timestamps( + input_ethovision_file=ethovision_file, + input_isxb_file=isxb_file, + output_csv_file=output_csv_file, + time_ref='unix' + ) + assert str(error.value) == "An input reference file is required for time_ref = 'start' or time_ref = 'unix'." diff --git a/isx/test/test_file_metadata.py b/isx/test/test_file_metadata.py new file mode 100644 index 0000000..2e85600 --- /dev/null +++ b/isx/test/test_file_metadata.py @@ -0,0 +1,93 @@ +import os + +from test.utilities.setup import delete_files_silently, test_data_path + +import pytest + +import isx + +data_expected = {'Acquisition SW Version': '1.2.0', + 'Animal Date of Birth': '2018, early foggy morning in the mid November', + 'Animal Description': 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.', + 'Animal ID': 'Algernon XII Junior', + 'Animal Sex': 'm', + 'Animal Species': 'Lorem ipsum dolor sit amet', + 'Animal Weight': 100500, + 'Experimenter Name': 'John James "Jimmy" O\'Grady', + 'Exposure Time (ms)': 17, + 'Microscope EX LED Power (mw/mm^2)': 0.7, + 'Microscope Focus': 500, + 'Microscope Gain': 1, + 'Microscope OG LED Power (mw/mm^2)': 5, + 'Microscope Serial Number': 11094105, + 'Microscope Type': 'NVoke2', + 'Probe Diameter (mm)': 0.5, + 'Probe Flip': 'none', + 'Probe Length (mm)': 8.4, + 'Probe Pitch': 2, + 'Probe Rotation (degrees)': 0, + 'Probe Type': 'Straight Lens', + 'Session Name': 'Session 20181120-182836' + } +input_file = test_data_path + r'/unit_test/hub/2018-11-20-18-35-03/' + '2018-11-20-18-35-03_video.isxd' +output_dir = test_data_path + r'/unit_test/output' + + +class TestNV3FileMetadataMovie: + def test_nv3_isxd_movie_captured(self): + metadata = isx.Movie.read(input_file).get_acquisition_info() + assert metadata == data_expected + + @pytest.mark.skipif(not isx._is_with_algos, reason="Only for algo tests") + @pytest.mark.parametrize('algo', [] if not isx._is_with_algos else [isx.preprocess, isx.spatial_filter, isx.dff]) + def test_nv3_isxd_movie_processed(self, algo): + output_file = output_dir + r'/' + algo.__name__ + '.isxd' + delete_files_silently(output_file) + + algo(input_file, output_file) + metadata = isx.Movie.read(output_file).get_acquisition_info() + assert metadata == data_expected + delete_files_silently(output_file) + + @pytest.mark.skipif(not isx._is_with_algos, reason="Only for algo tests") + def test_nv3_isxd_movie_processed_mc(self): + output_file = output_dir + r'/' + 'motion-correct' + '.isxd' + delete_files_silently(output_file) + + data_expected['Motion correction padding'] = False + + isx.motion_correct(input_file, output_file) + metadata = isx.Movie.read(output_file).get_acquisition_info() + assert metadata == data_expected + delete_files_silently(output_file) + del data_expected['Motion correction padding'] + + @pytest.mark.skipif(not isx._is_with_algos, reason="Only for algo tests") + def test_nv3_isxd_cellset_pcaica(self): + output_file = output_dir + r'/' + 'pca-ica' + '.isxd' + delete_files_silently(output_file) + + data_expected['Cell Identification Method'] = 'pca-ica' + data_expected['Trace Units'] = 'dF over F' + + isx.pca_ica(input_file, output_file, 8, 7) + metadata = isx.CellSet.read(output_file).get_acquisition_info() + assert metadata == data_expected + delete_files_silently(output_file) + + @pytest.mark.skipif(not isx._is_with_algos, reason="Only for algo tests") + def test_nv3_isxd_eventset(self): + cellset_path = output_dir + r'/' + 'pca-ica-for-ed' + '.isxd' + delete_files_silently(cellset_path) + + data_expected['Cell Identification Method'] = 'pca-ica' + data_expected['Trace Units'] = 'dF over F' + + isx.pca_ica(input_file, cellset_path, 8, 7) + output_eventset = output_dir + r'/' + 'event_detection' + '.isxd' + delete_files_silently(output_eventset) + isx.event_detection(cellset_path, output_eventset) + metadata = isx.EventSet.read(output_eventset).get_acquisition_info() + assert metadata == data_expected + delete_files_silently(cellset_path) + delete_files_silently(output_eventset) diff --git a/isx/test/utilities/__init__.py b/isx/test/utilities/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/isx/test/utilities/create_sample_data.py b/isx/test/utilities/create_sample_data.py new file mode 100644 index 0000000..e676d8f --- /dev/null +++ b/isx/test/utilities/create_sample_data.py @@ -0,0 +1,79 @@ +import numpy as np + +import isx + + +def write_sample_cellset(cs_out_file): + + # create sample data that will be used to make the cell set + num_cells = 5 + timing = isx.Timing(num_samples=14, period=isx.Duration.from_msecs(10), dropped=[3, 11], cropped=[[4, 6], [8, 9]]) + spacing = isx.Spacing(num_pixels=(3, 5)) + names = ['C{}'.format(k) for k in range(num_cells)] + images = np.random.randn(*[num_cells, *spacing.num_pixels]).astype(np.float32) + traces = np.random.randn(*[num_cells, timing.num_samples]).astype(np.float32) + + cs_out = isx.CellSet.write(cs_out_file, timing, spacing) + for k in range(num_cells): + cs_out.set_cell_data(k, images[k, :, :], traces[k, :], name=names[k]) + cs_out.flush() + + return {'num_cells' : num_cells, + 'spacing' : spacing, + 'timing' : timing, + 'names' : names, + 'traces' : traces, + 'images' : images} + +def write_sample_vessel_diameter_set(vs_out_file): + + # create sample data that will be used to make the vessel set + num_vessels = 5 + timing = isx.Timing(num_samples=14, period=isx.Duration.from_msecs(10)) + spacing = isx.Spacing(num_pixels=(3, 5)) + names = ['V{}'.format(k) for k in range(num_vessels)] + images = np.random.randn(*[num_vessels, *spacing.num_pixels]).astype(np.float32) + lines = np.random.randint(0, min(spacing.num_pixels), (num_vessels, 2, 2)) + traces = np.random.randn(*[num_vessels, timing.num_samples]).astype(np.float32) + cen_traces = np.random.randn(*[num_vessels, timing.num_samples]).astype(np.float32) + vs_out = isx.VesselSet.write(vs_out_file, timing, spacing, 'vessel diameter') + for k in range(num_vessels): + vs_out.set_vessel_diameter_data(k, images[k, :, :], lines[k, :, :], traces[k, :], cen_traces[k, :],name=names[k]) + vs_out.flush() + + return {'num_vessels' : num_vessels, + 'spacing' : spacing, + 'timing' : timing, + 'names' : names, + 'traces' : traces, + 'center_traces' : cen_traces, + 'lines' : lines, + 'images' : images} + +def write_sample_rbc_velocity_set(vs_out_file): + + # create sample data that will be used to make the vessel set + num_vessels = 5 + correlation_sizes = np.random.randint(2, 5, size=(num_vessels, 2)) + timing = isx.Timing(num_samples=14, period=isx.Duration.from_msecs(10)) + spacing = isx.Spacing(num_pixels=(3, 5)) + names = ['V{}'.format(k) for k in range(num_vessels)] + images = np.random.randn(*[num_vessels, *spacing.num_pixels]).astype(np.float32) + lines = np.random.randint(0, min(spacing.num_pixels), (num_vessels, 4, 2)) + traces = np.random.randn(*[num_vessels, timing.num_samples]).astype(np.float32) + dir_traces = np.random.randn(*[num_vessels, timing.num_samples]).astype(np.float32) + corr_traces = [np.random.randn(*[timing.num_samples, 3, correlation_sizes[i][0], correlation_sizes[i][1]]).astype(np.float32) for i in range(num_vessels)] + vs_out = isx.VesselSet.write(vs_out_file, timing, spacing, 'rbc velocity') + for k in range(num_vessels): + vs_out.set_rbc_velocity_data(k, images[k, :, :], lines[k, :, :], traces[k, :], dir_traces[k, :], corr_traces[k], name=names[k]) + vs_out.flush() + + return {'num_vessels' : num_vessels, + 'spacing' : spacing, + 'timing' : timing, + 'names' : names, + 'traces' : traces, + 'direction_traces' : dir_traces, + 'correlations_traces' : corr_traces, + 'lines' : lines, + 'images' : images} diff --git a/isx/test/utilities/setup.py b/isx/test/utilities/setup.py new file mode 100644 index 0000000..d8428df --- /dev/null +++ b/isx/test/utilities/setup.py @@ -0,0 +1,24 @@ +import os +import shutil +import isx + +test_data_path = os.environ['ISX_TEST_DATA_PATH'] + + +def delete_files_silently(files): + if isinstance(files, str): + files = [files] + for f in files: + if os.path.isfile(f): + os.remove(f) + +def delete_dirs_silently(dirs): + if isinstance(dirs, str): + dirs = [dirs] + for d in dirs: + if os.path.isdir(d): + shutil.rmtree(d, ignore_errors=True) + + +def is_file(f): + return os.path.isfile(f) diff --git a/isx/util.py b/isx/util.py new file mode 100644 index 0000000..b0fa888 --- /dev/null +++ b/isx/util.py @@ -0,0 +1,86 @@ +""" +The util module contains miscellanenous functions that would ideally +be built-in for Python or are specific to the isx package. +""" + +import os +import ctypes + +import isx._internal + +def get_file_stem(in_file): + """Get the file stem for file that can have multiple extensions.""" + in_file_stem = os.path.basename(in_file) + while '.' in in_file_stem: + in_file_stem = os.path.splitext(in_file_stem)[0] + return in_file_stem + +def get_file_extension(in_file): + """ Get full extension for file that can have multiple extensions.""" + basename = os.path.basename(in_file) + return basename[basename.index('.'):] + + +def make_output_file_path(in_file, out_dir, suffix=None, ext='isxd'): + """ + Make an output file path from an input path, output directory, suffix and extension. + + This is useful for generate output file paths for processing steps. + + Arguments + --------- + in_file : str + The input file path. + out_dir : str + The output directory path. + suffix : str + The suffix to append to the file stem with a '-'. + If left empty, no suffix is appended. + ext : 'isxd' + The output file extension, not including the '.'. + + Returns + ------- + str + The output file path. + + Examples + -------- + Make the output file path for a preprocessed recording. + + >>> make_output_file_path('in_dir/in_file.xml', 'out_dir', 'PP') + 'out_dir/in_file-PP.isxd' + """ + in_file_stem = get_file_stem(in_file) + + if suffix: + return os.path.join(out_dir, '{}-{}.{}'.format(in_file_stem, suffix, ext)) + else: + return os.path.join(out_dir, '{}.{}'.format(in_file_stem, ext)) + + +def make_output_file_paths(in_files, out_dir, suffix, ext='isxd'): + """ Like :func:`isx.make_output_file_path`, but for many files. + """ + return [make_output_file_path(f, out_dir, suffix, ext=ext) for f in in_files] + + +def verify_deinterleave(in_file, efocus): + """ + Verify if all frames from movie has the same efocus as provided. + + Arguments + --------- + in_file : str + The input file path. + efocus : int + The efocus value to be compared with. + + Returns + ------- + bool + True if the movie is successfully verified, False otherwise. + """ + success = ctypes.c_int() + isx._internal.c_api.isx_movie_verify_deinterleave(in_file.encode('utf-8'), ctypes.c_uint16(efocus), ctypes.byref(success)) + return success.value > 0 diff --git a/isxcore b/isxcore new file mode 160000 index 0000000..b8116a3 --- /dev/null +++ b/isxcore @@ -0,0 +1 @@ +Subproject commit b8116a3777715e3d573b038d100d006bb22275ed diff --git a/pyproject.toml b/pyproject.toml index 660c8b6..8cf3256 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,63 +1,3 @@ - - -[tool.poetry] -name = "isx" -version = "0.0.0.dev" -description = "Python-based ISXD file reader" -authors = ["Inscopix, Inc. "] -maintainers = ["Inscopix, Inc. "] -readme = "README.md" -license = "CC-BY-NC-4.0" -homepage = "https://github.com/inscopix/py_isx" -repository = "https://github.com/inscopix/py_isx" -documentation = "https://inscopix.github.io/py_isx/" - - -[tool.poetry.dependencies] -python = ">=3.9,<4.0" -beartype = ">=0.15.0" -numpy = ">=1.26.2" -importlib-metadata = "^7.0.1" -mkdocs = { version = "^1.4.2", optional = true } -mkdocs-material-extensions = { version = "^1.1.1", optional =true } -mkdocs-material = { version = "^9.0.9", optional = true } -mkdocstrings = { version = "^0.24.0", optional = true } -mkdocstrings-python = { version = "^1.7.5", optional = true } -pytest = { version = ">=7.2.0", optional = true } -ipykernel = { version = ">=6.20.1", optional = true } -debugpy = { version = "1.6", optional = true } -matplotlib = { version = ">=3.8.2", optional = true } -poetry2setup = { version = "^1.1.0", optional = true } -requests = { version = "^2.31.0", optional = true } -mkdocs-git-revision-date-localized-plugin = { version = "^1.2.2", optional = true } -mkdocs-git-committers-plugin-2 = { version = "^2.2.3", optional = true } - - -[tool.poetry.extras] -test = ["pytest","requests","poetry2setup"] -dev = ["ipykernel", "debugpy","matplotlib","poetry2setup"] -docs = ["mkdocs", "mkdocs-material-extensions", "mkdocs-material", "mkdocstrings" ,"mkdocstrings-python","mkdocs-git-revision-date-localized-plugin","mkdocs-git-committers-plugin-2"] - [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" - -[tool.black] -line-length = 79 -exclude = "setup.py" - -[tool.interrogate] -fail-under = 100 -exclude = ["setup.py", "docs", "build","tests"] -ignore-init-method = true -ignore-init-module = true -ignore-magic = true -ignore-semiprivate = true -ignore-private = true -ignore-property-decorators = true -ignore-module = true -ignore-nested-functions = true -ignore-nested-classes = true -ignore-setters = false -ignore-regex = ["^get$", "^mock_.*", ".*BaseClass.*"] -color = true \ No newline at end of file +requires = ["setuptools>=61.0"] +build-backend = "setuptools.build_meta" \ No newline at end of file diff --git a/setup.py b/setup.py index 57ebd28..c30eaa9 100644 --- a/setup.py +++ b/setup.py @@ -1,48 +1,23 @@ -# -*- coding: utf-8 -*- -from setuptools import setup - -packages = \ -['isx'] - -package_data = \ -{'': ['*']} - -install_requires = \ -['beartype>=0.15.0', 'importlib-metadata>=7.0.1,<8.0.0', 'numpy>=1.26.2'] - -extras_require = \ -{'dev': ['ipykernel>=6.20.1', - 'debugpy==1.6', - 'matplotlib>=3.8.2', - 'poetry2setup>=1.1.0,<2.0.0'], - 'docs': ['mkdocs>=1.4.2,<2.0.0', - 'mkdocs-material-extensions>=1.1.1,<2.0.0', - 'mkdocs-material>=9.0.9,<10.0.0', - 'mkdocstrings>=0.24.0,<0.25.0', - 'mkdocstrings-python>=1.7.5,<2.0.0', - 'mkdocs-git-revision-date-localized-plugin>=1.2.2,<2.0.0', - 'mkdocs-git-committers-plugin-2>=2.2.3,<3.0.0'], - 'test': ['pytest>=7.2.0', - 'poetry2setup>=1.1.0,<2.0.0', - 'requests>=2.31.0,<3.0.0']} - -setup_kwargs = { - 'name': 'isx', - 'version': '0.0.0.dev0', - 'description': 'Python-based ISXD file reader', - 'long_description': '# isx: pure-python API to read Inscopix data\n\n![](https://github.com/inscopix/py_isx/actions/workflows/main.yml/badge.svg) \n![](https://img.shields.io/pypi/v/isx)\n\nThis is a pure-python API to read Inscopix ISXD files. \n\n\n## Documentation\n\n[Read the documentation](https://inscopix.github.io/py_isx/)\n\n## Support\n\n| File type | Support |\n| --------- | ------- |\n| ISXD CellSet | ✅ |\n| ISXD Movie | ✅ |\n| ISXD Movie (multi-plane) | ❌ |\n| ISXD Movie (dual-color) | ❌ |\n| GPIO data | ❌ |\n| ISXD Events | ❌ |\n| ISXD VesselSet | ❌ |\n\n\n## Install\n\n### Poetry\n\n```bash\npoetry add isx\n```\n\n### pip\n\n\n```bash\npip install isx\n```\n\n## Caution\n\nThis is a work in progress, and all reading functions in the IDPS Python API are not supported yet. \n\n\n## Testing\n\nThis code is tested using GitHub Actions on the following python\nversions:\n\n- 3.9\n- 3.10\n- 3.11\n- 3.12\n', - 'author': 'Inscopix, Inc.', - 'author_email': 'support@inscopix.com', - 'maintainer': 'Inscopix, Inc.', - 'maintainer_email': 'support@inscopix.com', - 'url': 'https://github.com/inscopix/py_isx', - 'packages': packages, - 'package_data': package_data, - 'install_requires': install_requires, - 'extras_require': extras_require, - 'python_requires': '>=3.9,<4.0', -} - - -setup(**setup_kwargs) - +from setuptools import setup, find_namespace_packages + +setup( + name='isx', + version='2.0.0', + packages=find_namespace_packages(), + python_requires='>=3.9,<3.13', + install_requires=[ + 'h5py>=2.8.0', + 'numpy>=1.14', + 'scipy>=1.0', + 'tifffile>=0.15.1', + 'pandas>=0.20.1', + 'pillow>=8.0.1', + 'openpyxl>=3.0.10', # Required for pandas Excel support + ], + include_package_data=True, + description="Inscopix Data Processing Software Python API", + url="https://www.inscopix.com/support", + author="Inscopix, Inc.", + author_email="support@inscopix.bruker.com", + has_ext_modules=lambda: True +) diff --git a/tests/data.py b/tests/data.py deleted file mode 100644 index 2555190..0000000 --- a/tests/data.py +++ /dev/null @@ -1,48 +0,0 @@ -"""this module contains helper code that downloads test -data from a github releases page""" - -import os -from pathlib import Path - -import requests -from beartype import beartype - -if os.path.exists("/ideas/data/") and os.access("/ideas/data/", os.W_OK): - data_root = "/ideas/data/" -elif os.path.exists("/tmp"): - # on a POSIX system, use /tmp - # this will work even if this repo is installed in - # "non-editable" mode inside a "site-packages" folder - # where you don't have write permissions - data_root = os.path.join("/tmp", "data") - if not os.path.isdir(data_root): - os.makedirs(data_root) - -else: - # we're on a windows system, or some other weird system - # attempt to use the install dir. this may fail - # if we don't have permissions to write here - data_root = os.path.join((Path(__file__).parent.parent), "data") - - if not os.path.isdir(data_root): - os.makedirs(data_root) - - -@beartype -def download(file_name: str) -> str: - """helper function that downloads test data (if needed) - and returns a path to the file on the local filesystem""" - - file_path = os.path.join(data_root, file_name) - - if os.path.exists(file_path): - return file_path - - response = requests.get( - f"https://github.com/inscopix/py_isx/releases/download/test-data/{file_name}" - ) - - with open(file_path, "wb") as file: - file.write(response.content) - - return file_path diff --git a/tests/test_isx.py b/tests/test_isx.py deleted file mode 100644 index 7258ed7..0000000 --- a/tests/test_isx.py +++ /dev/null @@ -1,222 +0,0 @@ -""" tests for the Python based ISX module""" - - -import isx -import numpy as np -import pytest -from data import download - -# information about each movie that we will check -movie_info = [ - dict( - name="movie_128x128x100_part1.isxd", - dtype=np.float32, - num_pixels=(128, 128), - num_samples=100, - frame_max=1146.0001, - frame_min=703.0001, - frame_sum=15429191.0, - ), - dict( - name="movie_longer_than_3_min.isxd", - dtype=np.uint16, - num_pixels=(33, 29), - num_samples=1248, - frame_max=2658, - frame_min=492, - frame_sum=1400150, - ), - dict( - name="movie_u8.isxd", - dtype=np.uint8, - num_pixels=(3, 4), - num_samples=5, - frame_max=11, - frame_min=0, - frame_sum=66, - ), -] - -cellset_info = [ - dict( - name="empty_cellset.isxd", - num_cells=0, - num_pixels=(4, 5), - num_samples=7, - n_accepted=0, - n_rejected=0, - ), - dict( - name="cellset.isxd", - num_cells=4, - num_pixels=(366, 398), - num_samples=5444, - n_accepted=3, - n_rejected=1, - ), - dict( - name="cellset_series_part1.isxd", - num_cells=6, - num_pixels=(21, 21), - num_samples=100, - n_accepted=0, - n_rejected=0, - ), -] - -# download files if needed and resolve to local File system -for item in movie_info: - item["name"] = download(item["name"]) -for item in cellset_info: - item["name"] = download(item["name"]) - - -cell_set_methods = [ - "get_cell_name", - "get_cell_image_data", - "get_cell_trace_data", - "get_cell_status", -] - - -def _read_all_status(cell_set: isx.CellSet) -> list[str]: - """helper function to read all status in cellset""" - cell_status = [] - - for i in range(cell_set.num_cells): - cell_status.append(cell_set.get_cell_status(i)) - - return cell_status - - -@pytest.mark.parametrize("item", cellset_info) -def test_cellset_status(item): - """check that we can read the number of samples correctly""" - - cell_set = isx.CellSet.read(item["name"]) - cell_status = _read_all_status(cell_set) - - assert ( - cell_status.count("accepted") == item["n_accepted"] - ), f"Could not read the number of accepted cells correctly for {item['name']}" - - assert ( - cell_status.count("rejected") == item["n_rejected"] - ), f"Could not read the number of accepted cells correctly for {item['name']}" - - -@pytest.mark.parametrize("item", cellset_info) -def test_cellset_num_samples(item): - """check that we can read the number of samples correctly""" - - cell_set = isx.CellSet.read(item["name"]) - - assert ( - cell_set.timing.num_samples == item["num_samples"] - ), f"Could not read the number of pixels correctly for {item['name']}" - - -@pytest.mark.parametrize("item", cellset_info) -def test_cellset_num_pixels(item): - """check that we can read the number of pixels correctly""" - - cell_set = isx.CellSet.read(item["name"]) - - assert ( - cell_set.spacing.num_pixels == item["num_pixels"] - ), f"Could not read the number of pixels correctly for {item['name']}" - - -@pytest.mark.parametrize("item", cellset_info) -def test_read_num_cells(item): - """check that we can read the number of cells in a cellset correctly""" - - cell_set = isx.CellSet.read(item["name"]) - - assert ( - cell_set.num_cells == item["num_cells"] - ), f"Could not read the number of cells correctly for {item['name']}" - - -@pytest.mark.parametrize("method", cell_set_methods) -@pytest.mark.parametrize("item", cellset_info) -def test_error_on_bad_cell_index(item, method): - """check that we get the correct error message when we try to read info from a cell that doesn't exist""" - - cell_set = isx.CellSet.read(item["name"]) - - with pytest.raises(IndexError, match="Cell ID must be >=0"): - getattr(cell_set, method)(-1) - - with pytest.raises(IndexError, match="Cannot access cell"): - getattr(cell_set, method)(cell_set.num_cells + 1) - - -@pytest.mark.parametrize("item", movie_info) -def test_isxd_type_movie(item): - """test that we can identify file types correctly""" - - movie_file = item["name"] - - assert ( - isx.isxd_type(movie_file) == "miniscope_movie" - ), f"Expected {movie_file} to be of type miniscope_movie" - - -@pytest.mark.parametrize("item", movie_info) -def test_movie_data_type(item): - """check that we can correctly identify movie data type""" - - movie_name = item["name"] - movie = isx.Movie.read(movie_name) - - assert ( - movie.data_type == item["dtype"] - ), f"Could not correctly read data type of movie {movie_name}" - - assert movie.data_type == movie.get_frame_data(0).dtype - - -@pytest.mark.parametrize("item", movie_info) -def test_movie_num_pixels(item): - """check that we can correctly read the frame size of each movie""" - - movie_name = item["name"] - movie = isx.Movie.read(movie_name) - - assert ( - movie.spacing.num_pixels == item["num_pixels"] - ), f"Could not correctly read num_pixels of movie {movie_name}" - - -@pytest.mark.parametrize("item", movie_info) -def test_movie_num_samples(item): - """check that we can correctly read the number of samples (number of frames) from the movie""" - - movie_name = item["name"] - movie = isx.Movie.read(movie_name) - - assert ( - movie.timing.num_samples == item["num_samples"] - ), f"Could not correctly read num_samples of movie {movie_name}" - - -@pytest.mark.parametrize("item", movie_info) -def test_movie_read_frame(item): - """check that we can correctly read the first frame of the movie by checking that frame's min, sum and max""" - - movie_name = item["name"] - movie = isx.Movie.read(movie_name) - frame = movie.get_frame_data(0) - - assert np.isclose( - frame.max(), item["frame_max"] - ), f"Could not correctly the first frame of {movie_name}" - - assert np.isclose( - frame.min(), item["frame_min"] - ), f"Could not correctly the first frame of {movie_name}" - - assert np.isclose( - frame.sum(), item["frame_sum"] - ), f"Could not correctly the first frame of {movie_name}"