Skip to content

Commit

Permalink
Added better support for forward and backward derivatives
Browse files Browse the repository at this point in the history
  • Loading branch information
david-zwicker committed May 9, 2024
1 parent 931b674 commit 002dc32
Show file tree
Hide file tree
Showing 7 changed files with 157 additions and 40 deletions.
132 changes: 106 additions & 26 deletions pde/grids/operators/cartesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -515,22 +515,36 @@ def make_laplace(
return laplace


def _make_gradient_scipy_nd(grid: CartesianGrid) -> OperatorType:
def _make_gradient_scipy_nd(
grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central"
) -> OperatorType:
"""make a gradient operator using the scipy module
Args:
grid (:class:`~pde.grids.cartesian.CartesianGrid`):
The grid for which the operator is created
method (str):
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an array of values
"""
from scipy import ndimage

scaling = 0.5 / grid.discretization
scaling = 1 / grid.discretization
dim = grid.dim
shape_out = (dim,) + grid.shape

if method == "central":
stencil = [-0.5, 0, 0.5]
elif method == "forward":
stencil = [0, -1, 1]
elif method == "backward":
stencil = [-1, 1, 0]
else:
raise ValueError(f"Unknown derivative type `{method}`")

Check warning on line 546 in pde/grids/operators/cartesian.py

View check run for this annotation

Codecov / codecov/patch

pde/grids/operators/cartesian.py#L546

Added line #L546 was not covered by tests

def gradient(arr: np.ndarray, out: np.ndarray) -> None:
"""apply gradient operator to array `arr`"""
assert arr.shape == grid._shape_full
Expand All @@ -543,45 +557,68 @@ def gradient(arr: np.ndarray, out: np.ndarray) -> None:
with np.errstate(all="ignore"):
# some errors can happen for ghost cells
for i in range(dim):
out[i] = ndimage.convolve1d(arr, [1, 0, -1], axis=i)[valid] * scaling[i]
out[i] = ndimage.correlate1d(arr, stencil, axis=i)[valid] * scaling[i]

return gradient


def _make_gradient_numba_1d(grid: CartesianGrid) -> OperatorType:
def _make_gradient_numba_1d(
grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central"
) -> OperatorType:
"""make a 1d gradient operator using numba compilation
Args:
grid (:class:`~pde.grids.cartesian.CartesianGrid`):
The grid for which the operator is created
method (str):
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an array of values
"""
if method not in {"central", "forward", "backward"}:
raise ValueError(f"Unknown derivative type `{method}`")

Check warning on line 581 in pde/grids/operators/cartesian.py

View check run for this annotation

Codecov / codecov/patch

pde/grids/operators/cartesian.py#L581

Added line #L581 was not covered by tests

dim_x = grid.shape[0]
scale = 0.5 / grid.discretization[0]
dx = grid.discretization[0]

@jit
def gradient(arr: np.ndarray, out: np.ndarray) -> None:
"""apply gradient operator to array `arr`"""
for i in range(1, dim_x + 1):
out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale
if method == "central":
out[0, i - 1] = (arr[i + 1] - arr[i - 1]) / (2 * dx)
elif method == "forward":
out[0, i - 1] = (arr[i + 1] - arr[i]) / dx
elif method == "backward":
out[0, i - 1] = (arr[i] - arr[i - 1]) / dx

return gradient # type: ignore


def _make_gradient_numba_2d(grid: CartesianGrid) -> OperatorType:
def _make_gradient_numba_2d(
grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central"
) -> OperatorType:
"""make a 2d gradient operator using numba compilation
Args:
grid (:class:`~pde.grids.cartesian.CartesianGrid`):
The grid for which the operator is created
method (str):
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an array of values
"""
dim_x, dim_y = grid.shape
scale_x, scale_y = 0.5 / grid.discretization
if method == "central":
scale_x, scale_y = 0.5 / grid.discretization
elif method in {"forward", "backward"}:
scale_x, scale_y = 1 / grid.discretization
else:
raise ValueError(f"Unknown derivative type `{method}`")

Check warning on line 621 in pde/grids/operators/cartesian.py

View check run for this annotation

Codecov / codecov/patch

pde/grids/operators/cartesian.py#L621

Added line #L621 was not covered by tests

# use parallel processing for large enough arrays
parallel = dim_x * dim_y >= config["numba.multithreading_threshold"]
Expand All @@ -591,24 +628,41 @@ def gradient(arr: np.ndarray, out: np.ndarray) -> None:
"""apply gradient operator to array `arr`"""
for i in nb.prange(1, dim_x + 1):
for j in range(1, dim_y + 1):
out[0, i - 1, j - 1] = (arr[i + 1, j] - arr[i - 1, j]) * scale_x
out[1, i - 1, j - 1] = (arr[i, j + 1] - arr[i, j - 1]) * scale_y
if method == "central":
out[0, i - 1, j - 1] = (arr[i + 1, j] - arr[i - 1, j]) * scale_x
out[1, i - 1, j - 1] = (arr[i, j + 1] - arr[i, j - 1]) * scale_y
elif method == "forward":
out[0, i - 1, j - 1] = (arr[i + 1, j] - arr[i, j]) * scale_x
out[1, i - 1, j - 1] = (arr[i, j + 1] - arr[i, j]) * scale_y
elif method == "backward":
out[0, i - 1, j - 1] = (arr[i, j] - arr[i - 1, j]) * scale_x
out[1, i - 1, j - 1] = (arr[i, j] - arr[i, j - 1]) * scale_y

return gradient # type: ignore


def _make_gradient_numba_3d(grid: CartesianGrid) -> OperatorType:
def _make_gradient_numba_3d(
grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central"
) -> OperatorType:
"""make a 3d gradient operator using numba compilation
Args:
grid (:class:`~pde.grids.cartesian.CartesianGrid`):
The grid for which the operator is created
method (str):
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an array of values
"""
dim_x, dim_y, dim_z = grid.shape
scale_x, scale_y, scale_z = 0.5 / grid.discretization
if method == "central":
scale_x, scale_y, scale_z = 0.5 / grid.discretization
elif method in {"forward", "backward"}:
scale_x, scale_y, scale_z = 1 / grid.discretization
else:
raise ValueError(f"Unknown derivative type `{method}`")

Check warning on line 665 in pde/grids/operators/cartesian.py

View check run for this annotation

Codecov / codecov/patch

pde/grids/operators/cartesian.py#L665

Added line #L665 was not covered by tests

# use parallel processing for large enough arrays
parallel = dim_x * dim_y * dim_z >= config["numba.multithreading_threshold"]
Expand All @@ -619,22 +673,45 @@ def gradient(arr: np.ndarray, out: np.ndarray) -> None:
for i in nb.prange(1, dim_x + 1):
for j in range(1, dim_y + 1):
for k in range(1, dim_z + 1):
out[0, i - 1, j - 1, k - 1] = (
arr[i + 1, j, k] - arr[i - 1, j, k]
) * scale_x
out[1, i - 1, j - 1, k - 1] = (
arr[i, j + 1, k] - arr[i, j - 1, k]
) * scale_y
out[2, i - 1, j - 1, k - 1] = (
arr[i, j, k + 1] - arr[i, j, k - 1]
) * scale_z
if method == "central":
out[0, i - 1, j - 1, k - 1] = (
arr[i + 1, j, k] - arr[i - 1, j, k]
) * scale_x
out[1, i - 1, j - 1, k - 1] = (
arr[i, j + 1, k] - arr[i, j - 1, k]
) * scale_y
out[2, i - 1, j - 1, k - 1] = (
arr[i, j, k + 1] - arr[i, j, k - 1]
) * scale_z
elif method == "forward":
out[0, i - 1, j - 1, k - 1] = (
arr[i + 1, j, k] - arr[i, j, k]
) * scale_x
out[1, i - 1, j - 1, k - 1] = (
arr[i, j + 1, k] - arr[i, j, k]
) * scale_y
out[2, i - 1, j - 1, k - 1] = (
arr[i, j, k + 1] - arr[i, j, k]
) * scale_z
elif method == "backward":
out[0, i - 1, j - 1, k - 1] = (
arr[i, j, k] - arr[i - 1, j, k]
) * scale_x
out[1, i - 1, j - 1, k - 1] = (
arr[i, j, k] - arr[i, j - 1, k]
) * scale_y
out[2, i - 1, j - 1, k - 1] = (
arr[i, j, k] - arr[i, j, k - 1]
) * scale_z

return gradient # type: ignore


@CartesianGrid.register_operator("gradient", rank_in=0, rank_out=1)
def make_gradient(
grid: CartesianGrid, backend: Literal["auto", "numba", "scipy"] = "auto"
grid: CartesianGrid,
backend: Literal["auto", "numba", "scipy"] = "auto",
method: Literal["central", "forward", "backward"] = "central",
) -> OperatorType:
"""make a gradient operator on a Cartesian grid
Expand All @@ -644,6 +721,9 @@ def make_gradient(
backend (str):
Backend used for calculating the gradient operator.
If backend='auto', a suitable backend is chosen automatically.
method (str):
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an array of values
Expand All @@ -659,18 +739,18 @@ def make_gradient(

if backend == "numba":
if dim == 1:
gradient = _make_gradient_numba_1d(grid)
gradient = _make_gradient_numba_1d(grid, method=method)
elif dim == 2:
gradient = _make_gradient_numba_2d(grid)
gradient = _make_gradient_numba_2d(grid, method=method)
elif dim == 3:
gradient = _make_gradient_numba_3d(grid)
gradient = _make_gradient_numba_3d(grid, method=method)
else:
raise NotImplementedError(
f"Numba gradient operator not implemented for dimension {dim}"
)

elif backend == "scipy":
gradient = _make_gradient_scipy_nd(grid)
gradient = _make_gradient_scipy_nd(grid, method=method)

else:
raise ValueError(f"Backend `{backend}` is not defined")
Expand Down
4 changes: 2 additions & 2 deletions pde/grids/operators/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ def make_derivative(
axis (int):
The axis along which the derivative will be taken
method (str):
The method for calculating the derivative. Possible values are
'central', 'forward', and 'backward'.
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an full array of values including those at
Expand Down
22 changes: 18 additions & 4 deletions pde/grids/operators/polar_sym.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,19 @@ def laplace(arr: np.ndarray, out: np.ndarray) -> None:

@PolarSymGrid.register_operator("gradient", rank_in=0, rank_out=1)
@fill_in_docstring
def make_gradient(grid: PolarSymGrid) -> OperatorType:
def make_gradient(
grid: PolarSymGrid, method: Literal["central", "forward", "backward"] = "central"
) -> OperatorType:
"""make a discretized gradient operator for a polar grid
{DESCR_POLAR_GRID}
Args:
grid (:class:`~pde.grids.spherical.PolarSymGrid`):
The polar grid for which this operator will be defined
method (str):
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an array of values
Expand All @@ -78,14 +83,23 @@ def make_gradient(grid: PolarSymGrid) -> OperatorType:

# calculate preliminary quantities
dim_r = grid.shape[0]
dr = grid.discretization[0]
scale_r = 1 / (2 * dr)
if method == "central":
scale_r = 0.5 / grid.discretization[0]
elif method in {"forward", "backward"}:
scale_r = 1 / grid.discretization[0]
else:
raise ValueError(f"Unknown derivative type `{method}`")

Check warning on line 91 in pde/grids/operators/polar_sym.py

View check run for this annotation

Codecov / codecov/patch

pde/grids/operators/polar_sym.py#L91

Added line #L91 was not covered by tests

@jit
def gradient(arr: np.ndarray, out: np.ndarray) -> None:
"""apply gradient operator to array `arr`"""
for i in range(1, dim_r + 1): # iterate inner radial points
out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r
if method == "central":
out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r
elif method == "forward":
out[0, i - 1] = (arr[i + 1] - arr[i]) * scale_r
elif method == "backward":
out[0, i - 1] = (arr[i] - arr[i - 1]) * scale_r
out[1, i - 1] = 0 # no angular dependence by definition

return gradient # type: ignore
Expand Down
24 changes: 19 additions & 5 deletions pde/grids/operators/spherical_sym.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,14 +87,20 @@ def laplace(arr: np.ndarray, out: np.ndarray) -> None:

@SphericalSymGrid.register_operator("gradient", rank_in=0, rank_out=1)
@fill_in_docstring
def make_gradient(grid: SphericalSymGrid) -> OperatorType:
def make_gradient(
grid: SphericalSymGrid,
method: Literal["central", "forward", "backward"] = "central",
) -> OperatorType:
"""make a discretized gradient operator for a spherical grid
{DESCR_SPHERICAL_GRID}
Args:
grid (:class:`~pde.grids.spherical.SphericalSymGrid`):
The polar grid for which this operator will be defined
method (str):
The method for calculating the derivative. Possible values are 'central',
'forward', and 'backward'.
Returns:
A function that can be applied to an array of values
Expand All @@ -103,15 +109,23 @@ def make_gradient(grid: SphericalSymGrid) -> OperatorType:

# calculate preliminary quantities
dim_r = grid.shape[0]
dr = grid.discretization[0]

scale_r = 1 / (2 * dr)
if method == "central":
scale_r = 0.5 / grid.discretization[0]
elif method in {"forward", "backward"}:
scale_r = 1 / grid.discretization[0]
else:
raise ValueError(f"Unknown derivative type `{method}`")

Check warning on line 117 in pde/grids/operators/spherical_sym.py

View check run for this annotation

Codecov / codecov/patch

pde/grids/operators/spherical_sym.py#L117

Added line #L117 was not covered by tests

@jit
def gradient(arr: np.ndarray, out: np.ndarray) -> None:
"""apply gradient operator to array `arr`"""
for i in range(1, dim_r + 1): # iterate inner radial points
out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r
if method == "central":
out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r
elif method == "forward":
out[0, i - 1] = (arr[i + 1] - arr[i]) * scale_r
elif method == "backward":
out[0, i - 1] = (arr[i] - arr[i - 1]) * scale_r
out[1, i - 1] = out[2, i - 1] = 0 # no angular dependence by definition

return gradient # type: ignore
Expand Down
7 changes: 4 additions & 3 deletions tests/grids/operators/test_cartesian_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,13 +168,14 @@ def test_gradient_1d():


@pytest.mark.parametrize("ndim", [1, 2, 3])
@pytest.mark.parametrize("method", ["central", "forward", "backward"])
@pytest.mark.parametrize("periodic", [True, False])
def test_gradient_cart(ndim, periodic, rng):
def test_gradient_cart(ndim, method, periodic, rng):
"""test different gradient operators"""
bcs = _get_random_grid_bcs(ndim, dx="uniform", periodic=periodic)
field = ScalarField.random_uniform(bcs.grid, rng=rng)
res1 = field.gradient(bcs, backend="scipy").data
res2 = field.gradient(bcs, backend="numba").data
res1 = field.gradient(bcs, backend="scipy", method=method).data
res2 = field.gradient(bcs, backend="numba", method=method).data
assert res1.shape == (ndim,) + bcs.grid.shape
np.testing.assert_allclose(res1, res2)

Expand Down
4 changes: 4 additions & 0 deletions tests/grids/operators/test_polar_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ def test_findiff_polar():
np.testing.assert_allclose(grad.data[0, :], [1, 3, -6])
grad = s.gradient(bc="derivative")
np.testing.assert_allclose(grad.data[0, :], [1, 3, 2])
grad = s.gradient(bc="derivative", method="forward")
np.testing.assert_allclose(grad.data[0, :], [2, 4, 0])
grad = s.gradient(bc="derivative", method="backward")
np.testing.assert_allclose(grad.data[0, :], [0, 2, 4])

# test divergence
div = v.divergence(bc=["derivative", "value"])
Expand Down
4 changes: 4 additions & 0 deletions tests/grids/operators/test_spherical_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ def test_findiff_sph():
np.testing.assert_allclose(grad.data[0, :], [1, 3, -6])
grad = s.gradient(bc="derivative")
np.testing.assert_allclose(grad.data[0, :], [1, 3, 2])
grad = s.gradient(bc="derivative", method="forward")
np.testing.assert_allclose(grad.data[0, :], [2, 4, 0])
grad = s.gradient(bc="derivative", method="backward")
np.testing.assert_allclose(grad.data[0, :], [0, 2, 4])

# test divergence
div = v.divergence(bc=["derivative", "value"], conservative=False)
Expand Down

0 comments on commit 002dc32

Please sign in to comment.