Skip to content

Commit

Permalink
Support writing complex valued functions (#31)
Browse files Browse the repository at this point in the history
* Support writing complex valued functions

* Flake8

* Various extra dtype additions to test. Currently failing on permutation elements

* Various updates to testing to try avoid mpi duplication

* Fix parallel tests by using copying when passing to current finite element. Finite element should be templated over the value type.

* Fix legacy reader and split coverage in two steps in serial and parallel

* Flake8

* Add file

* Dtype mypy + import sort
  • Loading branch information
jorgensd authored Sep 21, 2023
1 parent 08739b9 commit 7bae60b
Show file tree
Hide file tree
Showing 9 changed files with 354 additions and 187 deletions.
13 changes: 8 additions & 5 deletions .github/workflows/test_package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,15 @@ jobs:
- name: Install package
run: python3 -m pip install .[test]

- name: Run tests
run: coverage run --rcfile=.coveragerc -m mpi4py -m pytest -xvs ./tests/

- name: Run tests in parallel
run: mpirun -n 2 coverage run --rcfile=.coveragerc -m mpi4py -m pytest -xvs ./tests/
- name: Run tests (in two steps to avoid too many mpi communicator duplications)
run: |
coverage run --rcfile=.coveragerc -m mpi4py -m pytest -xvs ./tests/ --ignore ./tests/test_checkpointing_vector.py
coverage run --rcfile=.coveragerc -m mpi4py -m pytest -xvs ./tests/test_checkpointing_vector.py
- name: Run tests in parallel (in two steps to avoid too many mpi comm duplications)
run: |
mpirun -n 2 coverage run --rcfile=.coveragerc -m mpi4py -m pytest -xvs ./tests/ --ignore ./tests/test_checkpointing_vector.py
mpirun -n 2 coverage run --rcfile=.coveragerc -m mpi4py -m pytest -xvs ./tests/test_checkpointing_vector.py
- name: Combine coverage reports
run: |
Expand Down
39 changes: 24 additions & 15 deletions src/adios4dolfinx/adios2_helpers.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,28 @@
from mpi4py import MPI
import pathlib
import numpy as np
import numpy.typing as npt
import adios2
from .utils import compute_local_range
from typing import Tuple
import dolfinx.graph

import adios2
import dolfinx.cpp.graph
import dolfinx.graph
import numpy as np
import numpy.typing as npt
from mpi4py import MPI

from .utils import compute_local_range, valid_function_types

"""
Helpers reading/writing data with ADIOS2
"""

__all__ = ["read_array", "read_dofmap", "read_cell_perms"]
__all__ = ["read_array", "read_dofmap", "read_cell_perms", "adios_to_numpy_dtype"]

adios_to_numpy_dtype = {"float": np.float32, "double": np.float64,
"float complex": np.complex64, "double complex": np.complex128,
"uint32_t": np.uint32}


def read_cell_perms(
adios: adios2.adios2.ADIOS,
comm: MPI.Intracomm,
filename: pathlib.Path,
variable: str,
Expand All @@ -27,6 +34,7 @@ def read_cell_perms(
Split in continuous chunks based on number of cells in the mesh (global).
Args:
adios: The ADIOS instance
comm: The MPI communicator used to read the data
filename: Path to input file
variable: Name of cell-permutation variable
Expand All @@ -42,7 +50,6 @@ def read_cell_perms(

# Open ADIOS engine
io_name = f"{variable=}_reader"
adios = adios2.ADIOS(comm)
io = adios.DeclareIO(io_name)
io.SetEngine(engine)
infile = io.Open(str(filename), adios2.Mode.Read)
Expand All @@ -67,7 +74,7 @@ def read_cell_perms(
[[local_cell_range[0]], [local_cell_range[1] - local_cell_range[0]]]
)
in_perm = np.empty(
local_cell_range[1] - local_cell_range[0], dtype=perm_var.Type().strip("_t")
local_cell_range[1] - local_cell_range[0], dtype=adios_to_numpy_dtype[perm_var.Type()]
)
infile.Get(perm_var, in_perm, adios2.Mode.Sync)
infile.EndStep()
Expand All @@ -78,6 +85,7 @@ def read_cell_perms(


def read_dofmap(
adios: adios2.adios2.ADIOS,
comm: MPI.Intracomm,
filename: pathlib.Path,
dofmap: str,
Expand All @@ -90,6 +98,7 @@ def read_dofmap(
split in continuous chunks based on number of cells in the mesh (global).
Args:
adios: The ADIOS instance
comm: The MPI communicator used to read the data
filename: Path to input file
dofmap: Name of variable containing dofmap
Expand All @@ -107,7 +116,6 @@ def read_dofmap(

# Open ADIOS engine
io_name = f"{dofmap=}_reader"
adios = adios2.ADIOS(comm)
io = adios.DeclareIO(io_name)
io.SetEngine(engine)
infile = io.Open(str(filename), adios2.Mode.Read)
Expand Down Expand Up @@ -157,20 +165,21 @@ def read_dofmap(


def read_array(
filename: pathlib.Path, array_name: str, engine: str, comm: MPI.Intracomm
) -> Tuple[npt.NDArray[np.float64], int]:
adios: adios2.adios2.ADIOS,
filename: pathlib.Path, array_name: str, engine: str, comm: MPI.Intracomm
) -> Tuple[npt.NDArray[valid_function_types], int]:
"""
Read an array from file, return the global starting position of the local array
Args:
adios: The ADIOS instance
filename: Path to file to read array from
array_name: Name of array in file
engine: Name of engine to use to read file
comm: MPI communicator used for reading the data
Returns:
Local part of array and its global starting position
"""
adios = adios2.ADIOS(comm)
io = adios.DeclareIO("ArrayReader")
io.SetEngine(engine)
infile = io.Open(str(filename), adios2.Mode.Read)
Expand All @@ -190,10 +199,10 @@ def read_array(

if len(arr_shape) == 1:
arr.SetSelection([[arr_range[0]], [arr_range[1] - arr_range[0]]])
vals = np.empty(arr_range[1] - arr_range[0], dtype=np.dtype(arr.Type().strip("_t")))
vals = np.empty(arr_range[1] - arr_range[0], dtype=adios_to_numpy_dtype[arr.Type()])
else:
arr.SetSelection([[arr_range[0], 0], [arr_range[1] - arr_range[0], arr_shape[1]]])
vals = np.empty((arr_range[1] - arr_range[0], arr_shape[1]), dtype=np.dtype(arr.Type().strip("_t")))
vals = np.empty((arr_range[1] - arr_range[0], arr_shape[1]), dtype=adios_to_numpy_dtype[arr.Type()])

infile.Get(arr, vals, adios2.Mode.Sync)
infile.EndStep()
Expand Down
50 changes: 30 additions & 20 deletions src/adios4dolfinx/checkpointing.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,20 @@
# SPDX-License-Identifier: MIT

from pathlib import Path

import adios2
import basix
import dolfinx
import numpy as np
import ufl
from mpi4py import MPI

from .comm_helpers import (
send_and_recv_cell_perm,
send_dofs_and_recv_values,
send_dofmap_and_recv_values,
)
from .utils import compute_local_range, index_owner, compute_dofmap_pos
from .adios2_helpers import read_array, read_cell_perms, read_dofmap
from .adios2_helpers import (adios_to_numpy_dtype, read_array, read_cell_perms,
read_dofmap)
from .comm_helpers import (send_and_recv_cell_perm,
send_dofmap_and_recv_values,
send_dofs_and_recv_values)
from .utils import compute_dofmap_pos, compute_local_range, index_owner

__all__ = [
"read_mesh",
Expand Down Expand Up @@ -164,7 +164,7 @@ def read_function(u: dolfinx.fem.Function, filename: Path, engine: str = "BP4"):
"""
mesh = u.function_space.mesh
comm = mesh.comm

adios = adios2.ADIOS(comm)
# ----------------------Step 1---------------------------------
# Compute index of input cells and get cell permutation
num_owned_cells = mesh.topology.index_map(mesh.topology.dim).size_local
Expand All @@ -188,7 +188,7 @@ def read_function(u: dolfinx.fem.Function, filename: Path, engine: str = "BP4"):
dofmap_path = "Dofmap"
xdofmap_path = "XDofmap"
input_dofmap = read_dofmap(
comm, filename, dofmap_path, xdofmap_path, num_cells_global, engine
adios, comm, filename, dofmap_path, xdofmap_path, num_cells_global, engine
)
# Compute owner of dofs in dofmap
num_dofs_global = (
Expand All @@ -200,7 +200,7 @@ def read_function(u: dolfinx.fem.Function, filename: Path, engine: str = "BP4"):
# --------------------Step 4-----------------------------------
# Read array from file and communicate them to input dofmap process
array_path = "Values"
input_array, starting_pos = read_array(filename, array_path, engine, comm)
input_array, starting_pos = read_array(adios, filename, array_path, engine, comm)
recv_array = send_dofs_and_recv_values(
input_dofmap.array, dof_owner, comm, input_array, starting_pos
)
Expand All @@ -210,24 +210,35 @@ def read_function(u: dolfinx.fem.Function, filename: Path, engine: str = "BP4"):
# Then apply current permutation to the local data
element = u.function_space.element
if element.needs_dof_transformations:
is_real = np.isrealobj(recv_array)
bs = u.function_space.dofmap.bs

# Read input cell permutations on dofmap process
local_input_range = compute_local_range(comm, num_cells_global)
input_local_cell_index = inc_cells - local_input_range[0]
input_perms = read_cell_perms(
comm, filename, "CellPermutations", num_cells_global, engine
adios, comm, filename, "CellPermutations", num_cells_global, engine
)

# First invert input data to reference element then transform to current mesh
for i, l_cell in enumerate(input_local_cell_index):
start, end = input_dofmap.offsets[l_cell], input_dofmap.offsets[l_cell + 1]
element.apply_transpose_dof_transformation(
recv_array[start:end], input_perms[l_cell], bs
)
element.apply_inverse_transpose_dof_transformation(
recv_array[start:end], inc_perms[i], bs
)
if is_real:
element.apply_transpose_dof_transformation(
recv_array[start:end], input_perms[l_cell], bs
)
element.apply_inverse_transpose_dof_transformation(
recv_array[start:end], inc_perms[i], bs
)
else:
# NOTE: dolfinx.fem.FiniteElement is only templated over scalar precision, not complex types
parts = [recv_array[start:end].real.copy(), recv_array[start:end].imag.copy()]
for part in parts:
element.apply_transpose_dof_transformation(
part, input_perms[l_cell], bs)
element.apply_inverse_transpose_dof_transformation(
part, inc_perms[i], bs)
recv_array[start:end] = parts[0] + 1j*parts[1]

# ------------------Step 6----------------------------------------
# For each dof owned by a process, find the local position in the dofmap.
Expand Down Expand Up @@ -303,10 +314,9 @@ def read_mesh(
[[geometry_range[0], 0], [geometry_range[1] - geometry_range[0], x_shape[1]]]
)
mesh_geometry = np.empty(
(geometry_range[1] - geometry_range[0], x_shape[1]), dtype=np.float64
(geometry_range[1] - geometry_range[0], x_shape[1]), dtype=adios_to_numpy_dtype[geometry.Type()]
)
infile.Get(geometry, mesh_geometry, adios2.Mode.Deferred)

# Get mesh topology (distributed)
if "Topology" not in io.AvailableVariables().keys():
raise KeyError("Mesh topology not found at Topology'")
Expand Down Expand Up @@ -372,7 +382,7 @@ def write_function(
outfile.BeginStep()
val_var = io.DefineVariable(
"Values",
np.zeros(num_dofs_local, dtype=np.float64),
np.zeros(num_dofs_local, dtype=u.dtype),
shape=[num_dofs_global],
start=[local_start],
count=[num_dofs_local],
Expand Down
32 changes: 18 additions & 14 deletions src/adios4dolfinx/comm_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import numpy.typing as npt
from mpi4py import MPI

from .utils import compute_local_range, find_first
from .utils import compute_local_range, find_first, valid_function_types


__all__ = [
"send_dofmap_and_recv_values",
Expand All @@ -15,6 +16,9 @@
Helpers for sending and receiving values for checkpointing
"""

numpy_to_mpi = {np.float64: MPI.DOUBLE, np.float32: MPI.FLOAT,
np.complex64: MPI.COMPLEX, np.complex128: MPI.DOUBLE_COMPLEX}


def send_dofmap_and_recv_values(
comm: MPI.Intracomm,
Expand All @@ -24,9 +28,9 @@ def send_dofmap_and_recv_values(
input_cells: npt.NDArray[np.int64],
dofmap_pos: npt.NDArray[np.int32],
num_cells_global: np.int64,
values: npt.NDArray[np.float64],
values: npt.NDArray[valid_function_types],
dofmap_offsets: npt.NDArray[np.int32],
) -> npt.NDArray[np.float64]:
) -> npt.NDArray[valid_function_types]:
"""
Given a set of positions in input dofmap, give the global input index of this dofmap entry
in input file.
Expand Down Expand Up @@ -102,7 +106,7 @@ def send_dofmap_and_recv_values(
mesh_to_data_comm.Neighbor_alltoallv(s_msg, r_msg)

local_input_range = compute_local_range(comm, num_cells_global)
values_to_distribute = np.zeros_like(inc_pos, dtype=np.float64)
values_to_distribute = np.zeros_like(inc_pos, dtype=values.dtype)
for i, (cell, pos) in enumerate(zip(inc_cells, inc_pos)):
l_cell = cell - local_input_range[0]
values_to_distribute[i] = values[dofmap_offsets[l_cell] + pos]
Expand All @@ -112,13 +116,13 @@ def send_dofmap_and_recv_values(
dest_ranks.tolist(), source_ranks.tolist(), reorder=False
)

incoming_global_dofs = np.zeros(sum(out_size), dtype=np.float64)
s_msg = [values_to_distribute, recv_size, MPI.DOUBLE]
r_msg = [incoming_global_dofs, out_size, MPI.DOUBLE]
incoming_global_dofs = np.zeros(sum(out_size), dtype=values.dtype)
s_msg = [values_to_distribute, recv_size, numpy_to_mpi[values.dtype.type]]
r_msg = [incoming_global_dofs, out_size, numpy_to_mpi[values.dtype.type]]
data_to_mesh_comm.Neighbor_alltoallv(s_msg, r_msg)

# Sort incoming global dofs as they were inputted
sorted_global_dofs = np.zeros_like(incoming_global_dofs, dtype=np.float64)
sorted_global_dofs = np.zeros_like(incoming_global_dofs, dtype=values.dtype)
assert len(incoming_global_dofs) == len(input_cells)
for i in range(len(dest_ranks)):
for j in range(out_size[i]):
Expand Down Expand Up @@ -203,7 +207,7 @@ def send_dofs_and_recv_values(
input_dofmap: npt.NDArray[np.int64],
dofmap_owners: npt.NDArray[np.int32],
comm: MPI.Intracomm,
input_array: npt.NDArray[np.float64],
input_array: npt.NDArray[valid_function_types],
array_start: int,
):
"""
Expand Down Expand Up @@ -255,18 +259,18 @@ def send_dofs_and_recv_values(
dofmap_to_values.Neighbor_alltoallv(s_msg, r_msg)

# Send back appropriate input values
sending_values = np.zeros(len(inc_dofs), dtype=np.float64)
sending_values = np.zeros(len(inc_dofs), dtype=input_array.dtype)
for i, dof in enumerate(inc_dofs):
sending_values[i] = input_array[dof - array_start]

values_to_dofmap = comm.Create_dist_graph_adjacent(dest, source, reorder=False)
inc_values = np.zeros_like(out_dofs, dtype=np.float64)
s_msg_rev = [sending_values, recv_size, MPI.DOUBLE]
r_msg_rev = [inc_values, out_size, MPI.DOUBLE]
inc_values = np.zeros_like(out_dofs, dtype=input_array.dtype)
s_msg_rev = [sending_values, recv_size, numpy_to_mpi[input_array.dtype.type]]
r_msg_rev = [inc_values, out_size, numpy_to_mpi[input_array.dtype.type]]
values_to_dofmap.Neighbor_alltoallv(s_msg_rev, r_msg_rev)

# Sort inputs according to local dof number (input process)
values = np.empty_like(inc_values, dtype=np.float64)
values = np.empty_like(inc_values, dtype=input_array.dtype)

for i in range(len(dest_ranks)):
for j in range(out_size[i]):
Expand Down
18 changes: 7 additions & 11 deletions src/adios4dolfinx/legacy_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@
import ufl
from mpi4py import MPI

from .adios2_helpers import read_array
from .adios2_helpers import adios_to_numpy_dtype, read_array
from .comm_helpers import send_dofs_and_recv_values
from .utils import compute_dofmap_pos, compute_local_range, find_first, index_owner
from .utils import (compute_dofmap_pos, compute_local_range, find_first,
index_owner)

__all__ = [
"read_mesh_from_legacy_h5",
Expand Down Expand Up @@ -256,8 +257,8 @@ def read_mesh_geometry(io: adios2.ADIOS, infile: adios2.Engine, group: str):
[[local_range[0], 0], [local_range[1] - local_range[0], shape[1]]]
)
mesh_geometry = np.empty(
(local_range[1] - local_range[0], shape[1]), dtype=np.float64
)
(local_range[1] - local_range[0], shape[1]), dtype=adios_to_numpy_dtype[geometry.Type()])

infile.Get(geometry, mesh_geometry, adios2.Mode.Sync)
return mesh_geometry

Expand Down Expand Up @@ -409,13 +410,8 @@ def read_function_from_legacy_h5(
# NOTE: USE NBX in C++

# Read input data
local_array, starting_pos = read_array(filename, f"/{group}/{vector_group}", "HDF5", comm)

unique_dof_owners = np.unique(dof_owner)
mesh_to_dof_comm = mesh.comm.Create_dist_graph(
[mesh.comm.rank], [len(unique_dof_owners)], unique_dof_owners, reorder=False
)
dof_source, dof_dest, _ = mesh_to_dof_comm.Get_dist_neighbors()
adios = adios2.ADIOS(comm)
local_array, starting_pos = read_array(adios, filename, f"/{group}/{vector_group}", "HDF5", comm)

# Send global dof indices to correct input process, and receive value of given dof
local_values = send_dofs_and_recv_values(
Expand Down
Loading

0 comments on commit 7bae60b

Please sign in to comment.