Skip to content

Commit

Permalink
fully working local version
Browse files Browse the repository at this point in the history
  • Loading branch information
dkazanc committed May 2, 2024
1 parent 7e88035 commit 73911e7
Show file tree
Hide file tree
Showing 11 changed files with 917 additions and 720 deletions.
6 changes: 3 additions & 3 deletions httomolibgpu/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
from httomolibgpu.misc.rescale import rescale_to_int
from httomolibgpu.prep.alignment import distortion_correction_proj_discorpy
from httomolibgpu.prep.normalize import normalize
#from httomolibgpu.prep.phase import paganin_filter_savu, paganin_filter_tomopy
from httomolibgpu.prep.phase import paganin_filter_savu, paganin_filter_tomopy
from httomolibgpu.prep.stripe import (
remove_stripe_based_sorting,
remove_stripe_ti,
remove_all_stripe,
)

from httomolibgpu.recon.algorithm import FBP, SIRT, CGLS
#from httomolibgpu.recon.rotation import find_center_vo, find_center_360, find_center_pc
#from httomolibgpu.recon.rotation import find_center_vo
from httomolibgpu.recon.rotation import find_center_vo, find_center_360, find_center_pc
12 changes: 10 additions & 2 deletions httomolibgpu/cupywrapper.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
cupy_run = False
try:
import cupy as cp
import nvtx

try:
cp.cuda.Device(0).compute_capability
cupy_run = True
except cp.cuda.runtime.CUDARuntimeError:
print("CuPy library is a major dependency for HTTomolibgpu, please install")
import numpy as cp
except ImportError:
import numpy as cp
except ImportError as e:
print(
f"Failed to import module in {__file__} with error: {e}; defaulting to CPU-only mode"
)
from unittest.mock import Mock
import numpy as cp

nvtx = Mock()
8 changes: 5 additions & 3 deletions httomolibgpu/misc/corr.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,18 @@

import numpy as np
from httomolibgpu import cupywrapper

cp = cupywrapper.cp

nvtx = cupywrapper.nvtx
from numpy import float32
import nvtx

__all__ = [
"median_filter",
"remove_outlier",
]

@nvtx.annotate()

def median_filter(
data: cp.ndarray,
kernel_size: int = 3,
Expand Down Expand Up @@ -71,6 +72,7 @@ def median_filter(
return data


@nvtx.annotate()
def __median_filter(
data: cp.ndarray,
kernel_size: int = 3,
Expand Down Expand Up @@ -157,7 +159,7 @@ def __median_filter(
thresholding_kernel(data, float32(dif), output)
return output

@nvtx.annotate()

def remove_outlier(
data: cp.ndarray, kernel_size: int = 3, axis: int = 0, dif: float = 0.1
) -> cp.ndarray:
Expand Down
13 changes: 8 additions & 5 deletions httomolibgpu/misc/morph.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,18 @@

import numpy as np
from httomolibgpu import cupywrapper

cp = cupywrapper.cp

import nvtx
nvtx = cupywrapper.nvtx
from typing import Literal

__all__ = [
"sino_360_to_180",
"data_resampler",
]

@nvtx.annotate()

def sino_360_to_180(
data: cp.ndarray, overlap: int = 0, rotation: Literal["left", "right"] = "left"
) -> cp.ndarray:
Expand Down Expand Up @@ -62,6 +63,7 @@ def sino_360_to_180(
return data


@nvtx.annotate()
def __sino_360_to_180(
data: cp.ndarray, overlap: int = 0, rotation: Literal["left", "right"] = "left"
) -> cp.ndarray:
Expand Down Expand Up @@ -103,7 +105,6 @@ def __sino_360_to_180(
return out


@nvtx.annotate()
def data_resampler(
data: cp.ndarray, newshape: list, axis: int = 1, interpolation: str = "linear"
) -> cp.ndarray:
Expand All @@ -123,17 +124,19 @@ def data_resampler(
Returns:
cp.ndarray: Up/Down-scaled 3D cupy array
"""
"""
if cupywrapper.cupy_run:
return __data_resampler(data, newshape, axis, interpolation)
else:
print("data_resampler won't be executed because CuPy is not installed")
return data


@nvtx.annotate()
def __data_resampler(
data: cp.ndarray, newshape: list, axis: int = 1, interpolation: str = "linear"
) -> cp.ndarray:

from cupyx.scipy.interpolate import interpn

if data.ndim != 3:
Expand Down
10 changes: 7 additions & 3 deletions httomolibgpu/misc/rescale.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,17 @@

import numpy as np
from httomolibgpu import cupywrapper

cp = cupywrapper.cp

import nvtx
nvtx = cupywrapper.nvtx
from typing import Literal, Optional, Tuple, Union

__all__ = [
"rescale_to_int",
]
@nvtx.annotate()


def rescale_to_int(
data: cp.ndarray,
perc_range_min: float = 0.0,
Expand Down Expand Up @@ -72,8 +74,10 @@ def rescale_to_int(
return __rescale_to_int(data, perc_range_min, perc_range_max, bits, glob_stats)
else:
print("rescale_to_int won't be executed because CuPy is not installed")
return data
return data


@nvtx.annotate()
def __rescale_to_int(
data: cp.ndarray,
perc_range_min: float = 0.0,
Expand Down
104 changes: 70 additions & 34 deletions httomolibgpu/prep/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,26 +21,12 @@
"""Modules for data correction"""

import numpy as np
cupy_run = False
try:
import cupy as xp

try:
xp.cuda.Device(0).compute_capability
cupy_run = True
except xp.cuda.runtime.CUDARuntimeError:
print("CuPy library is a major dependency for HTTomolibgpu, please install")
import numpy as xp
except ImportError:
import numpy as xp
from httomolibgpu import cupywrapper

from typing import Dict, List
import nvtx
cp = cupywrapper.cp
nvtx = cupywrapper.nvtx

if cupy_run:
from cupyx.scipy.ndimage import map_coordinates
else:
from scipy.ndimage import map_coordinates
from typing import Dict, List

__all__ = [
"distortion_correction_proj_discorpy",
Expand All @@ -52,9 +38,56 @@
# (which is the same as the TomoPy version
# https://github.com/tomopy/tomopy/blob/c236a2969074f5fc70189fb5545f0a165924f916/source/tomopy/prep/alignment.py#L950-L981
# but with the additional params `order` and `mode`).
@nvtx.annotate()
def distortion_correction_proj_discorpy(
data: xp.ndarray,
data: cp.ndarray,
metadata_path: str,
preview: Dict[str, List[int]],
order: int = 1,
mode: str = "reflect",
):
"""Unwarp a stack of images using a backward model.
Parameters
----------
data : cp.ndarray
3D array.
metadata_path : str
The path to the file containing the distortion coefficients for the
data.
preview : Dict[str, List[int]]
A dict containing three key-value pairs:
- a list containing the `start` value of each dimension
- a list containing the `stop` value of each dimension
- a list containing the `step` value of each dimension
order : int, optional.
The order of the spline interpolation.
mode : {'reflect', 'grid-mirror', 'constant', 'grid-constant', 'nearest',
'mirror', 'grid-wrap', 'wrap'}, optional
To determine how to handle image boundaries.
Returns
-------
cp.ndarray
3D array. Distortion-corrected image(s).
"""
if cupywrapper.cupy_run:
return __distortion_correction_proj_discorpy(
data, metadata_path, preview, order, mode
)
else:
print(
"distortion_correction_proj_discorpy won't be executed because CuPy is not installed"
)
return data


@nvtx.annotate()
def __distortion_correction_proj_discorpy(
data: cp.ndarray,
metadata_path: str,
preview: Dict[str, List[int]],
order: int = 1,
Expand Down Expand Up @@ -89,9 +122,12 @@ def distortion_correction_proj_discorpy(
cp.ndarray
3D array. Distortion-corrected image(s).
"""

from cupyx.scipy.ndimage import map_coordinates

# Check if it's a stack of 2D images, or only a single 2D image
if len(data.shape) == 2:
data = xp.expand_dims(data, axis=0)
data = cp.expand_dims(data, axis=0)

# Get info from metadata txt file
xcenter, ycenter, list_fact = _load_metadata_txt(metadata_path)
Expand All @@ -118,26 +154,26 @@ def distortion_correction_proj_discorpy(
ycenter = ycenter - y_offset

height, width = data.shape[y_dim + 1], data.shape[x_dim + 1]
xu_list = xp.arange(width) - xcenter
yu_list = xp.arange(height) - ycenter
xu_mat, yu_mat = xp.meshgrid(xu_list, yu_list)
ru_mat = xp.sqrt(xu_mat**2 + yu_mat**2)
fact_mat = xp.sum(
xp.asarray([factor * ru_mat**i for i, factor in enumerate(list_fact)]), axis=0
xu_list = cp.arange(width) - xcenter
yu_list = cp.arange(height) - ycenter
xu_mat, yu_mat = cp.meshgrid(xu_list, yu_list)
ru_mat = cp.sqrt(xu_mat**2 + yu_mat**2)
fact_mat = cp.sum(
cp.asarray([factor * ru_mat**i for i, factor in enumerate(list_fact)]), axis=0
)
xd_mat = xp.asarray(
xp.clip(xcenter + fact_mat * xu_mat, 0, width - 1), dtype=xp.float32
xd_mat = cp.asarray(
cp.clip(xcenter + fact_mat * xu_mat, 0, width - 1), dtype=cp.float32
)
yd_mat = xp.asarray(
xp.clip(ycenter + fact_mat * yu_mat, 0, height - 1), dtype=xp.float32
yd_mat = cp.asarray(
cp.clip(ycenter + fact_mat * yu_mat, 0, height - 1), dtype=cp.float32
)
indices = [xp.reshape(yd_mat, (-1, 1)), xp.reshape(xd_mat, (-1, 1))]
indices = xp.asarray(indices, dtype=xp.float32)
indices = [cp.reshape(yd_mat, (-1, 1)), cp.reshape(xd_mat, (-1, 1))]
indices = cp.asarray(indices, dtype=cp.float32)

# Loop over images and unwarp them
for i in range(data.shape[0]):
mat = map_coordinates(data[i], indices, order=order, mode=mode)
mat = xp.reshape(mat, (height, width))
mat = cp.reshape(mat, (height, width))
data[i] = mat

return data
Expand Down
Loading

0 comments on commit 73911e7

Please sign in to comment.