From 002dc32d4379ecfcd1a55aab79393bc921428d55 Mon Sep 17 00:00:00 2001 From: David Zwicker Date: Thu, 9 May 2024 10:55:08 +0200 Subject: [PATCH] Added better support for forward and backward derivatives --- pde/grids/operators/cartesian.py | 132 ++++++++++++++---- pde/grids/operators/common.py | 4 +- pde/grids/operators/polar_sym.py | 22 ++- pde/grids/operators/spherical_sym.py | 24 +++- .../operators/test_cartesian_operators.py | 7 +- tests/grids/operators/test_polar_operators.py | 4 + .../operators/test_spherical_operators.py | 4 + 7 files changed, 157 insertions(+), 40 deletions(-) diff --git a/pde/grids/operators/cartesian.py b/pde/grids/operators/cartesian.py index 8696d851..0b8b6097 100644 --- a/pde/grids/operators/cartesian.py +++ b/pde/grids/operators/cartesian.py @@ -515,22 +515,36 @@ def make_laplace( return laplace -def _make_gradient_scipy_nd(grid: CartesianGrid) -> OperatorType: +def _make_gradient_scipy_nd( + grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central" +) -> OperatorType: """make a gradient operator using the scipy module Args: grid (:class:`~pde.grids.cartesian.CartesianGrid`): The grid for which the operator is created + method (str): + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an array of values """ from scipy import ndimage - scaling = 0.5 / grid.discretization + scaling = 1 / grid.discretization dim = grid.dim shape_out = (dim,) + grid.shape + if method == "central": + stencil = [-0.5, 0, 0.5] + elif method == "forward": + stencil = [0, -1, 1] + elif method == "backward": + stencil = [-1, 1, 0] + else: + raise ValueError(f"Unknown derivative type `{method}`") + def gradient(arr: np.ndarray, out: np.ndarray) -> None: """apply gradient operator to array `arr`""" assert arr.shape == grid._shape_full @@ -543,45 +557,68 @@ def gradient(arr: np.ndarray, out: np.ndarray) -> None: with np.errstate(all="ignore"): # some errors can happen for ghost cells for i in range(dim): - out[i] = ndimage.convolve1d(arr, [1, 0, -1], axis=i)[valid] * scaling[i] + out[i] = ndimage.correlate1d(arr, stencil, axis=i)[valid] * scaling[i] return gradient -def _make_gradient_numba_1d(grid: CartesianGrid) -> OperatorType: +def _make_gradient_numba_1d( + grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central" +) -> OperatorType: """make a 1d gradient operator using numba compilation Args: grid (:class:`~pde.grids.cartesian.CartesianGrid`): The grid for which the operator is created + method (str): + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an array of values """ + if method not in {"central", "forward", "backward"}: + raise ValueError(f"Unknown derivative type `{method}`") + dim_x = grid.shape[0] - scale = 0.5 / grid.discretization[0] + dx = grid.discretization[0] @jit def gradient(arr: np.ndarray, out: np.ndarray) -> None: """apply gradient operator to array `arr`""" for i in range(1, dim_x + 1): - out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale + if method == "central": + out[0, i - 1] = (arr[i + 1] - arr[i - 1]) / (2 * dx) + elif method == "forward": + out[0, i - 1] = (arr[i + 1] - arr[i]) / dx + elif method == "backward": + out[0, i - 1] = (arr[i] - arr[i - 1]) / dx return gradient # type: ignore -def _make_gradient_numba_2d(grid: CartesianGrid) -> OperatorType: +def _make_gradient_numba_2d( + grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central" +) -> OperatorType: """make a 2d gradient operator using numba compilation Args: grid (:class:`~pde.grids.cartesian.CartesianGrid`): The grid for which the operator is created + method (str): + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an array of values """ dim_x, dim_y = grid.shape - scale_x, scale_y = 0.5 / grid.discretization + if method == "central": + scale_x, scale_y = 0.5 / grid.discretization + elif method in {"forward", "backward"}: + scale_x, scale_y = 1 / grid.discretization + else: + raise ValueError(f"Unknown derivative type `{method}`") # use parallel processing for large enough arrays parallel = dim_x * dim_y >= config["numba.multithreading_threshold"] @@ -591,24 +628,41 @@ def gradient(arr: np.ndarray, out: np.ndarray) -> None: """apply gradient operator to array `arr`""" for i in nb.prange(1, dim_x + 1): for j in range(1, dim_y + 1): - out[0, i - 1, j - 1] = (arr[i + 1, j] - arr[i - 1, j]) * scale_x - out[1, i - 1, j - 1] = (arr[i, j + 1] - arr[i, j - 1]) * scale_y + if method == "central": + out[0, i - 1, j - 1] = (arr[i + 1, j] - arr[i - 1, j]) * scale_x + out[1, i - 1, j - 1] = (arr[i, j + 1] - arr[i, j - 1]) * scale_y + elif method == "forward": + out[0, i - 1, j - 1] = (arr[i + 1, j] - arr[i, j]) * scale_x + out[1, i - 1, j - 1] = (arr[i, j + 1] - arr[i, j]) * scale_y + elif method == "backward": + out[0, i - 1, j - 1] = (arr[i, j] - arr[i - 1, j]) * scale_x + out[1, i - 1, j - 1] = (arr[i, j] - arr[i, j - 1]) * scale_y return gradient # type: ignore -def _make_gradient_numba_3d(grid: CartesianGrid) -> OperatorType: +def _make_gradient_numba_3d( + grid: CartesianGrid, method: Literal["central", "forward", "backward"] = "central" +) -> OperatorType: """make a 3d gradient operator using numba compilation Args: grid (:class:`~pde.grids.cartesian.CartesianGrid`): The grid for which the operator is created + method (str): + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an array of values """ dim_x, dim_y, dim_z = grid.shape - scale_x, scale_y, scale_z = 0.5 / grid.discretization + if method == "central": + scale_x, scale_y, scale_z = 0.5 / grid.discretization + elif method in {"forward", "backward"}: + scale_x, scale_y, scale_z = 1 / grid.discretization + else: + raise ValueError(f"Unknown derivative type `{method}`") # use parallel processing for large enough arrays parallel = dim_x * dim_y * dim_z >= config["numba.multithreading_threshold"] @@ -619,22 +673,45 @@ def gradient(arr: np.ndarray, out: np.ndarray) -> None: for i in nb.prange(1, dim_x + 1): for j in range(1, dim_y + 1): for k in range(1, dim_z + 1): - out[0, i - 1, j - 1, k - 1] = ( - arr[i + 1, j, k] - arr[i - 1, j, k] - ) * scale_x - out[1, i - 1, j - 1, k - 1] = ( - arr[i, j + 1, k] - arr[i, j - 1, k] - ) * scale_y - out[2, i - 1, j - 1, k - 1] = ( - arr[i, j, k + 1] - arr[i, j, k - 1] - ) * scale_z + if method == "central": + out[0, i - 1, j - 1, k - 1] = ( + arr[i + 1, j, k] - arr[i - 1, j, k] + ) * scale_x + out[1, i - 1, j - 1, k - 1] = ( + arr[i, j + 1, k] - arr[i, j - 1, k] + ) * scale_y + out[2, i - 1, j - 1, k - 1] = ( + arr[i, j, k + 1] - arr[i, j, k - 1] + ) * scale_z + elif method == "forward": + out[0, i - 1, j - 1, k - 1] = ( + arr[i + 1, j, k] - arr[i, j, k] + ) * scale_x + out[1, i - 1, j - 1, k - 1] = ( + arr[i, j + 1, k] - arr[i, j, k] + ) * scale_y + out[2, i - 1, j - 1, k - 1] = ( + arr[i, j, k + 1] - arr[i, j, k] + ) * scale_z + elif method == "backward": + out[0, i - 1, j - 1, k - 1] = ( + arr[i, j, k] - arr[i - 1, j, k] + ) * scale_x + out[1, i - 1, j - 1, k - 1] = ( + arr[i, j, k] - arr[i, j - 1, k] + ) * scale_y + out[2, i - 1, j - 1, k - 1] = ( + arr[i, j, k] - arr[i, j, k - 1] + ) * scale_z return gradient # type: ignore @CartesianGrid.register_operator("gradient", rank_in=0, rank_out=1) def make_gradient( - grid: CartesianGrid, backend: Literal["auto", "numba", "scipy"] = "auto" + grid: CartesianGrid, + backend: Literal["auto", "numba", "scipy"] = "auto", + method: Literal["central", "forward", "backward"] = "central", ) -> OperatorType: """make a gradient operator on a Cartesian grid @@ -644,6 +721,9 @@ def make_gradient( backend (str): Backend used for calculating the gradient operator. If backend='auto', a suitable backend is chosen automatically. + method (str): + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an array of values @@ -659,18 +739,18 @@ def make_gradient( if backend == "numba": if dim == 1: - gradient = _make_gradient_numba_1d(grid) + gradient = _make_gradient_numba_1d(grid, method=method) elif dim == 2: - gradient = _make_gradient_numba_2d(grid) + gradient = _make_gradient_numba_2d(grid, method=method) elif dim == 3: - gradient = _make_gradient_numba_3d(grid) + gradient = _make_gradient_numba_3d(grid, method=method) else: raise NotImplementedError( f"Numba gradient operator not implemented for dimension {dim}" ) elif backend == "scipy": - gradient = _make_gradient_scipy_nd(grid) + gradient = _make_gradient_scipy_nd(grid, method=method) else: raise ValueError(f"Backend `{backend}` is not defined") diff --git a/pde/grids/operators/common.py b/pde/grids/operators/common.py index 04d17c81..b2ba3bec 100644 --- a/pde/grids/operators/common.py +++ b/pde/grids/operators/common.py @@ -32,8 +32,8 @@ def make_derivative( axis (int): The axis along which the derivative will be taken method (str): - The method for calculating the derivative. Possible values are - 'central', 'forward', and 'backward'. + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an full array of values including those at diff --git a/pde/grids/operators/polar_sym.py b/pde/grids/operators/polar_sym.py index d461492f..63095f84 100644 --- a/pde/grids/operators/polar_sym.py +++ b/pde/grids/operators/polar_sym.py @@ -62,7 +62,9 @@ def laplace(arr: np.ndarray, out: np.ndarray) -> None: @PolarSymGrid.register_operator("gradient", rank_in=0, rank_out=1) @fill_in_docstring -def make_gradient(grid: PolarSymGrid) -> OperatorType: +def make_gradient( + grid: PolarSymGrid, method: Literal["central", "forward", "backward"] = "central" +) -> OperatorType: """make a discretized gradient operator for a polar grid {DESCR_POLAR_GRID} @@ -70,6 +72,9 @@ def make_gradient(grid: PolarSymGrid) -> OperatorType: Args: grid (:class:`~pde.grids.spherical.PolarSymGrid`): The polar grid for which this operator will be defined + method (str): + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an array of values @@ -78,14 +83,23 @@ def make_gradient(grid: PolarSymGrid) -> OperatorType: # calculate preliminary quantities dim_r = grid.shape[0] - dr = grid.discretization[0] - scale_r = 1 / (2 * dr) + if method == "central": + scale_r = 0.5 / grid.discretization[0] + elif method in {"forward", "backward"}: + scale_r = 1 / grid.discretization[0] + else: + raise ValueError(f"Unknown derivative type `{method}`") @jit def gradient(arr: np.ndarray, out: np.ndarray) -> None: """apply gradient operator to array `arr`""" for i in range(1, dim_r + 1): # iterate inner radial points - out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r + if method == "central": + out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r + elif method == "forward": + out[0, i - 1] = (arr[i + 1] - arr[i]) * scale_r + elif method == "backward": + out[0, i - 1] = (arr[i] - arr[i - 1]) * scale_r out[1, i - 1] = 0 # no angular dependence by definition return gradient # type: ignore diff --git a/pde/grids/operators/spherical_sym.py b/pde/grids/operators/spherical_sym.py index b0d25a45..26e9313f 100644 --- a/pde/grids/operators/spherical_sym.py +++ b/pde/grids/operators/spherical_sym.py @@ -87,7 +87,10 @@ def laplace(arr: np.ndarray, out: np.ndarray) -> None: @SphericalSymGrid.register_operator("gradient", rank_in=0, rank_out=1) @fill_in_docstring -def make_gradient(grid: SphericalSymGrid) -> OperatorType: +def make_gradient( + grid: SphericalSymGrid, + method: Literal["central", "forward", "backward"] = "central", +) -> OperatorType: """make a discretized gradient operator for a spherical grid {DESCR_SPHERICAL_GRID} @@ -95,6 +98,9 @@ def make_gradient(grid: SphericalSymGrid) -> OperatorType: Args: grid (:class:`~pde.grids.spherical.SphericalSymGrid`): The polar grid for which this operator will be defined + method (str): + The method for calculating the derivative. Possible values are 'central', + 'forward', and 'backward'. Returns: A function that can be applied to an array of values @@ -103,15 +109,23 @@ def make_gradient(grid: SphericalSymGrid) -> OperatorType: # calculate preliminary quantities dim_r = grid.shape[0] - dr = grid.discretization[0] - - scale_r = 1 / (2 * dr) + if method == "central": + scale_r = 0.5 / grid.discretization[0] + elif method in {"forward", "backward"}: + scale_r = 1 / grid.discretization[0] + else: + raise ValueError(f"Unknown derivative type `{method}`") @jit def gradient(arr: np.ndarray, out: np.ndarray) -> None: """apply gradient operator to array `arr`""" for i in range(1, dim_r + 1): # iterate inner radial points - out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r + if method == "central": + out[0, i - 1] = (arr[i + 1] - arr[i - 1]) * scale_r + elif method == "forward": + out[0, i - 1] = (arr[i + 1] - arr[i]) * scale_r + elif method == "backward": + out[0, i - 1] = (arr[i] - arr[i - 1]) * scale_r out[1, i - 1] = out[2, i - 1] = 0 # no angular dependence by definition return gradient # type: ignore diff --git a/tests/grids/operators/test_cartesian_operators.py b/tests/grids/operators/test_cartesian_operators.py index 1ad231be..603480ab 100644 --- a/tests/grids/operators/test_cartesian_operators.py +++ b/tests/grids/operators/test_cartesian_operators.py @@ -168,13 +168,14 @@ def test_gradient_1d(): @pytest.mark.parametrize("ndim", [1, 2, 3]) +@pytest.mark.parametrize("method", ["central", "forward", "backward"]) @pytest.mark.parametrize("periodic", [True, False]) -def test_gradient_cart(ndim, periodic, rng): +def test_gradient_cart(ndim, method, periodic, rng): """test different gradient operators""" bcs = _get_random_grid_bcs(ndim, dx="uniform", periodic=periodic) field = ScalarField.random_uniform(bcs.grid, rng=rng) - res1 = field.gradient(bcs, backend="scipy").data - res2 = field.gradient(bcs, backend="numba").data + res1 = field.gradient(bcs, backend="scipy", method=method).data + res2 = field.gradient(bcs, backend="numba", method=method).data assert res1.shape == (ndim,) + bcs.grid.shape np.testing.assert_allclose(res1, res2) diff --git a/tests/grids/operators/test_polar_operators.py b/tests/grids/operators/test_polar_operators.py index 67ccb5e1..a32b5bc1 100644 --- a/tests/grids/operators/test_polar_operators.py +++ b/tests/grids/operators/test_polar_operators.py @@ -30,6 +30,10 @@ def test_findiff_polar(): np.testing.assert_allclose(grad.data[0, :], [1, 3, -6]) grad = s.gradient(bc="derivative") np.testing.assert_allclose(grad.data[0, :], [1, 3, 2]) + grad = s.gradient(bc="derivative", method="forward") + np.testing.assert_allclose(grad.data[0, :], [2, 4, 0]) + grad = s.gradient(bc="derivative", method="backward") + np.testing.assert_allclose(grad.data[0, :], [0, 2, 4]) # test divergence div = v.divergence(bc=["derivative", "value"]) diff --git a/tests/grids/operators/test_spherical_operators.py b/tests/grids/operators/test_spherical_operators.py index b17dac66..3083d0e1 100644 --- a/tests/grids/operators/test_spherical_operators.py +++ b/tests/grids/operators/test_spherical_operators.py @@ -30,6 +30,10 @@ def test_findiff_sph(): np.testing.assert_allclose(grad.data[0, :], [1, 3, -6]) grad = s.gradient(bc="derivative") np.testing.assert_allclose(grad.data[0, :], [1, 3, 2]) + grad = s.gradient(bc="derivative", method="forward") + np.testing.assert_allclose(grad.data[0, :], [2, 4, 0]) + grad = s.gradient(bc="derivative", method="backward") + np.testing.assert_allclose(grad.data[0, :], [0, 2, 4]) # test divergence div = v.divergence(bc=["derivative", "value"], conservative=False)