Skip to content

Commit

Permalink
Add ParallelPointCloudDomain
Browse files Browse the repository at this point in the history
Added ParallelPointCloudDomain for specialized dispatch of MPI and non-MPI solvers. Added redirect for automatic swap to ParallelPointCloud when mpi_isparallel(). Need to add MPICache instantiation which will be based on RBFD MPI code. Then specialize solver to MPI version.
  • Loading branch information
jarias9 committed Jun 6, 2024
1 parent f1060fe commit 47d2207
Show file tree
Hide file tree
Showing 8 changed files with 275 additions and 91 deletions.
12 changes: 6 additions & 6 deletions src/MeshfreeTrixi.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ module MeshfreeTrixi
# Include other packages that are used in MeshfreeTrixi.jl
# (standard library packages first, other packages next, all of them sorted alphabetically)

# using LinearAlgebra: LinearAlgebra, Diagonal, diag, dot, mul!, norm, cross, normalize, I,
# UniformScaling, det
# using Printf: @printf, @sprintf, println
# using SparseArrays: AbstractSparseMatrix, AbstractSparseMatrixCSC, sparse, droptol!,
# rowvals, nzrange, nonzeros, spzeros
using LinearAlgebra: LinearAlgebra, Diagonal, diag, dot, mul!, norm, cross, normalize, I,
UniformScaling, det
using Printf: @printf, @sprintf, println
using SparseArrays: AbstractSparseMatrix, AbstractSparseMatrixCSC, sparse, droptol!,
rowvals, nzrange, nonzeros, spzeros

# import @reexport now to make it available for further imports/exports
using Reexport: @reexport
Expand Down Expand Up @@ -116,7 +116,7 @@ using TimerOutputs: TimerOutputs, @notimeit, TimerOutput, print_timer, reset_tim
@reexport using SimpleUnPack: @unpack
using SimpleUnPack: @pack!
using NaNMath
using Printf: @printf, @sprintf
# using Printf: @printf, @sprintf, println
using WriteVTK: vtk_grid, MeshCell, VTKCellTypes, paraview_collection, vtk_save
using IterativeSolvers
import Base: *
Expand Down
1 change: 1 addition & 0 deletions src/callbacks_step/info.jl
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ function squeeze(message, max_width; filler::Char = '…')
end

# Print a summary with a box around it with a given heading and a setup of key=>value pairs
# Replace with original Trixi def that is MPI-aware
function summary_box(io::IO, heading, setup = [])
summary_header(io, heading)
for (key, value) in setup
Expand Down
104 changes: 104 additions & 0 deletions src/domains/PointCloudDomain/ParallelPointCloud.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
# everything related to a DG semidiscretization in 2D using MPI,
# currently limited to Lobatto-Legendre nodes

# TODO: MPI dimension agnostic
mutable struct MPICache{uEltype <: Real}
mpi_neighbor_ranks::Vector{Int}
mpi_neighbor_interfaces::Vector{Vector{Int}}
mpi_neighbor_mortars::Vector{Vector{Int}}
mpi_send_buffers::Vector{Vector{uEltype}}
mpi_recv_buffers::Vector{Vector{uEltype}}
mpi_send_requests::Vector{MPI.Request}
mpi_recv_requests::Vector{MPI.Request}
n_elements_by_rank::OffsetArray{Int, 1, Array{Int, 1}}
n_elements_global::Int
first_element_global_id::Int
end

function MPICache(uEltype)
# MPI communication "just works" for bitstypes only
if !isbitstype(uEltype)
throw(ArgumentError("MPICache only supports bitstypes, $uEltype is not a bitstype."))
end
mpi_neighbor_ranks = Vector{Int}(undef, 0)
mpi_neighbor_interfaces = Vector{Vector{Int}}(undef, 0)
mpi_neighbor_mortars = Vector{Vector{Int}}(undef, 0)
mpi_send_buffers = Vector{Vector{uEltype}}(undef, 0)
mpi_recv_buffers = Vector{Vector{uEltype}}(undef, 0)
mpi_send_requests = Vector{MPI.Request}(undef, 0)
mpi_recv_requests = Vector{MPI.Request}(undef, 0)
n_elements_by_rank = OffsetArray(Vector{Int}(undef, 0), 0:-1)
n_elements_global = 0
first_element_global_id = 0

MPICache{uEltype}(mpi_neighbor_ranks, mpi_neighbor_interfaces, mpi_neighbor_mortars,
mpi_send_buffers, mpi_recv_buffers,
mpi_send_requests, mpi_recv_requests,
n_elements_by_rank, n_elements_global,
first_element_global_id)
end
@inline Base.eltype(::MPICache{uEltype}) where {uEltype} = uEltype

"""
ParallelPointCloudDomain(NDIMS, PointDataT <: PointData{NDIMS}, BoundaryFaceT}
- `pd` contains point data structure.
- `boundary tags` dictionary of all boundary tags and associated point indices.
"""
# struct ParallelPointCloudDomain{Dim, Tv, Ti}
# pd::PointData{Dim, Tv, Ti} # Encapsulates points and neighbors
# boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}} # Boundary data
# end
### Actual ParallelPointCloudDomain for dispatching problems with
struct ParallelPointCloudDomain{NDIMS, PointDataT <: PointData{NDIMS}, BoundaryFaceT}
pd::PointDataT
boundary_tags::BoundaryFaceT
mpi::MPICache
unsaved_changes::Bool # Required for SaveSolutionCallback
end

# Workaround so other calls to ParallelPointCloudDomain will still work
function ParallelPointCloudDomain(pd::PointData{NDIMS, Tv, Ti},
boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}}) where {
NDIMS,
Tv,
Ti
}
return ParallelPointCloudDomain{NDIMS, PointData{NDIMS, Tv, Ti},
Dict{Symbol, BoundaryData{Ti, Tv}}}(pd, boundary_tags,
false)
end

function ParallelPointCloudDomain(points::Vector{Tv}, neighbors::Vector{Vector{Ti}},
boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}}) where {
N,
Tv <:
SVector{N,
Float64},
Ti
}
pointData = PointData(points, neighbors) # Create an instance of PointData
return ParallelPointCloudDomain(pointData,
boundary_tags, false)
end

# Main function for instantiating all the necessary data for a ParallelPointCloudDomain
function ParallelPointCloudDomain(basis::RefPointData{NDIMS},
points::Vector{SVector{NDIMS, Float64}},
boundary_idxs::Vector{Vector{Int}},
boundary_normals::Vector{Vector{SVector{NDIMS, Float64}}},
boundary_names_dict::Dict{Symbol, Int}) where {NDIMS}
# Update to construct MPI cache
medusa_data, interior_idx, boundary_idxs, boundary_normals = read_medusa_file(filename)
pd = PointData(medusa_data, solver.basis)
boundary_tags = Dict(name => BoundaryData(boundary_idxs[idx], boundary_normals[idx])
for (name, idx) in boundary_names_dict)
return ParallelPointCloudDomain(pd,
boundary_tags, mpi, false)

# return ParallelPointCloudDomain{NDIMS, PointData{NDIMS, Tv, Ti},
# Dict{Symbol, BoundaryData{Ti, Tv}}}(pd, boundary_tags,
# mpi, false)
end

Base.ndims(::ParallelPointCloudDomain{NDIMS}) where {NDIMS} = NDIMS
126 changes: 48 additions & 78 deletions src/domains/PointCloudDomain/PointCloudDomain.jl
Original file line number Diff line number Diff line change
@@ -1,101 +1,71 @@
# Similar to StartUpDG.MeshData
# This is the underlying data structure for PointCloudDomain
# Mostly reworked
struct PointData{Dim, Tv, Ti}
points::Vector{Tv} # Point coordinates
neighbors::Vector{Vector{Ti}} # Neighbors for each point
num_points::Int # Number of points
num_neighbors::Int # Number of neighbors lists
dx_min::Float64 # Minimum distance between points
dx_avg::Float64 # Average distance between points
end
"""
PointCloudDomain(NDIMS, PointDataT <: PointData{NDIMS}, BoundaryFaceT}
## We need to calculate dx_min at runtime so we cannot use this constructor
# function PointData(points::Vector{Tv},
# neighbors::Vector{Vector{Ti}}) where {
# Dim,
# Tv <: SVector{Dim, Float64},
# Ti
# }
# PointData{Dim, Tv, Ti}(points, neighbors, length(points), length(neighbors[1]))
Domain specification containing point cloud data structure and boundary tags
for a point cloud domain. Includes specialization for MPI parallelism.
"""
# struct PointCloudDomain{NDIMS, domain_type, PointDataT <: PointData{NDIMS}, BoundaryFaceT}
# domain_type::DomainType
# pd::PointDataT
# boundary_tags::BoundaryFaceT
# unsaved_changes::Bool # Required for SaveSolutionCallback
# end

"""
PointData(medusa_data::Vector{Tv}, basis) where {Tv <: SVector{Dim, Float64}}
include("SerialPointCloud.jl")
include("ParallelPointCloud.jl")

- `medusa_data` contains point positions for entire point cloud.
- `basis` contains all basis information including the number of required neighbors to support required order of accuracy.
"""
function PointData(medusa_data::Vector{Tv},
basis::RefPointData) where {Dim, Tv <: SVector{Dim, Float64}}
nv = basis.nv # The number of neighbors

# Calculate neighbor list for all points
kdtree = KDTree(medusa_data)
n_idxs, n_dists = knn(kdtree, medusa_data, nv, true)
dx_min = minimum(n_dists)

# Calculate avg and min distances between points
idxs_y, dists_y = knn(kdtree, medusa_data, 2, true)
dx_avg = mean(dists_y)[2]
dx_min = minimum(dists_y)[2]

# Instantiate PointData with the points and neighbors. The num_points and num_neighbors are automatically computed.
return PointData{Dim, Tv, Int}(medusa_data, n_idxs, length(medusa_data), nv, dx_min,
dx_avg)
end
# const SerialPointCloudDomain{NDIMS} = PointCloudDomain{NDIMS, <:SerialPointCloud{NDIMS}}
# const ParallelPointCloudDomain{NDIMS} = PointCloudDomain{NDIMS, <:ParallelPointCloud{NDIMS}}

struct BoundaryData{Ti <: Integer, Tv <: SVector{N, T} where {N, T <: Number}}
idx::Vector{Ti} # Indices of boundary points
normals::Vector{Tv} # Normals at boundary points
end
@inline mpi_parallel(mesh::PointCloudDomain) = False()
# @inline mpi_parallel(mesh::SerialPointCloudDomain) = False()
@inline mpi_parallel(mesh::ParallelPointCloudDomain) = True()

# struct PointCloudDomain{Dim, Tv, Ti}
# pd::PointData{Dim, Tv, Ti} # Encapsulates points and neighbors
# boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}} # Boundary data
# end
### Actual PointCloudDomain for dispatching problems with
struct PointCloudDomain{NDIMS, PointDataT <: PointData{NDIMS}, BoundaryFaceT}
pd::PointDataT
boundary_tags::BoundaryFaceT
unsaved_changes::Bool # Required for SaveSolutionCallback
end
# Primary constructor for MPI-aware PointCloudDomain
function PointCloudDomain(basis::RefPointData{NDIMS},
points::Vector{SVector{NDIMS, Float64}},
boundary_idxs::Vector{Vector{Int}},
boundary_normals::Vector{Vector{SVector{NDIMS, Float64}}},
boundary_names_dict::Dict{Symbol, Int}) where {NDIMS}

# Workaround so other calls to PointCloudDomain will still work
function PointCloudDomain(pd::PointData{NDIMS, Tv, Ti},
boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}}) where {NDIMS,
Tv, Ti}
return PointCloudDomain{NDIMS, PointData{NDIMS, Tv, Ti},
Dict{Symbol, BoundaryData{Ti, Tv}}}(pd, boundary_tags, false)
end
# TODO: MPI, create nice interface for a parallel tree/mesh
if mpi_isparallel()
# TreeType = ParallelTree{NDIMS}
return ParallelPointCloudDomain(basis, points, boundary_idxs, boundary_normals,
boundary_names_dict)
else
# TreeType = SerialTree{NDIMS}
return SerialPointCloudDomain(basis, points, boundary_idxs, boundary_normals,
boundary_names_dict)
end

function PointCloudDomain(points::Vector{Tv}, neighbors::Vector{Vector{Ti}},
boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}}) where {
N,
Tv <:
SVector{N,
Float64},
Ti
}
pointData = PointData(points, neighbors) # Create an instance of PointData
return PointCloudDomain(pointData,
boundary_tags, false)
end
# Create mesh
# mesh = @trixi_timeit timer() "creation" TreeMesh{NDIMS, TreeType}(n_cells_max,
# domain_center,
# domain_length,
# periodicity)

# # Initialize mesh
# initialize!(mesh, initial_refinement_level, refinement_patches, coarsening_patches)

Base.ndims(::PointCloudDomain{NDIMS}) where {NDIMS} = NDIMS
# return mesh
end

# function Base.show(io::IO,
# mesh::PointCloudDomain{NDIMS, MeshType}) where {NDIMS, MeshType}
# @nospecialize mesh # reduce precompilation time
# print(io, "$MeshType PointCloudDomain with NDIMS = $NDIMS.")
# end

function Base.show(io::IO, mesh::PointCloudDomain{Dim, Tv, Ti}) where {Dim, Tv, Ti}
function Base.show(io::IO,
mesh::Union{PointCloudDomain{Dim, Tv, Ti},
ParallelPointCloudDomain{Dim, Tv, Ti}}) where {Dim, Tv, Ti}
print(io, "PointCloudDomain with NDIMS = $Dim")
end

function Base.show(io::IO, ::MIME"text/plain",
mesh::PointCloudDomain{Dim, Tv, Ti}) where {Dim, Tv, Ti}
mesh::Union{PointCloudDomain{Dim, Tv, Ti},
ParallelPointCloudDomain{Dim, Tv, Ti}}) where {Dim, Tv, Ti}
if get(io, :compact, false)
show(io, mesh)
else
Expand Down
54 changes: 54 additions & 0 deletions src/domains/PointCloudDomain/SerialPointCloud.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
"""
PointCloudDomain(NDIMS, PointDataT <: PointData{NDIMS}, BoundaryFaceT}
- `pd` contains point data structure.
- `boundary tags` dictionary of all boundary tags and associated point indices.
"""
# struct PointCloudDomain{Dim, Tv, Ti}
# pd::PointData{Dim, Tv, Ti} # Encapsulates points and neighbors
# boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}} # Boundary data
# end
### Actual PointCloudDomain for dispatching problems with
struct PointCloudDomain{NDIMS, PointDataT <: PointData{NDIMS}, BoundaryFaceT}
pd::PointDataT
boundary_tags::BoundaryFaceT
unsaved_changes::Bool # Required for SaveSolutionCallback
end

# Workaround so other calls to PointCloudDomain will still work
function PointCloudDomain(pd::PointData{NDIMS, Tv, Ti},
boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}}) where {NDIMS,
Tv, Ti}
return PointCloudDomain{NDIMS, PointData{NDIMS, Tv, Ti},
Dict{Symbol, BoundaryData{Ti, Tv}}}(pd, boundary_tags, false)
end

function PointCloudDomain(points::Vector{Tv}, neighbors::Vector{Vector{Ti}},
boundary_tags::Dict{Symbol, BoundaryData{Ti, Tv}}) where {
N,
Tv <:
SVector{N,
Float64},
Ti
}
pointData = PointData(points, neighbors) # Create an instance of PointData
return PointCloudDomain(pointData,
boundary_tags, false)
end

# Main function for instantiating all the necessary data for a SerialPointCloudDomain
function SerialPointCloudDomain(basis::RefPointData{NDIMS},
points::Vector{SVector{NDIMS, Float64}},
boundary_idxs::Vector{Vector{Int}},
boundary_normals::Vector{Vector{SVector{NDIMS,
Float64}}},
boundary_names_dict::Dict{Symbol, Int}) where {NDIMS}
medusa_data, interior_idx, boundary_idxs, boundary_normals = read_medusa_file(filename)
pd = PointData(medusa_data, solver.basis)
boundary_tags = Dict(name => BoundaryData(boundary_idxs[idx], boundary_normals[idx])
for (name, idx) in boundary_names_dict)
return PointCloudDomain(pd,
boundary_tags, false)
end

Base.ndims(::PointCloudDomain{NDIMS}) where {NDIMS} = NDIMS
52 changes: 52 additions & 0 deletions src/domains/PointCloudDomain/geometry_primatives.jl
Original file line number Diff line number Diff line change
Expand Up @@ -290,3 +290,55 @@ function poly_basis(elem::AbstractElemShape{3},
poly = monomials([x, y, z], 0:N)
return poly
end

# Similar to StartUpDG.MeshData
# This is the underlying data structure for PointCloudDomain
# Mostly reworked
struct PointData{Dim, Tv, Ti}
points::Vector{Tv} # Point coordinates
neighbors::Vector{Vector{Ti}} # Neighbors for each point
num_points::Int # Number of points
num_neighbors::Int # Number of neighbors lists
dx_min::Float64 # Minimum distance between points
dx_avg::Float64 # Average distance between points
end

## We need to calculate dx_min at runtime so we cannot use this constructor
# function PointData(points::Vector{Tv},
# neighbors::Vector{Vector{Ti}}) where {
# Dim,
# Tv <: SVector{Dim, Float64},
# Ti
# }
# PointData{Dim, Tv, Ti}(points, neighbors, length(points), length(neighbors[1]))
# end

"""
PointData(medusa_data::Vector{Tv}, basis) where {Tv <: SVector{Dim, Float64}}
- `medusa_data` contains point positions for entire point cloud.
- `basis` contains all basis information including the number of required neighbors to support required order of accuracy.
"""
function PointData(medusa_data::Vector{Tv},
basis::RefPointData) where {Dim, Tv <: SVector{Dim, Float64}}
nv = basis.nv # The number of neighbors

# Calculate neighbor list for all points
kdtree = KDTree(medusa_data)
n_idxs, n_dists = knn(kdtree, medusa_data, nv, true)
dx_min = minimum(n_dists)

# Calculate avg and min distances between points
idxs_y, dists_y = knn(kdtree, medusa_data, 2, true)
dx_avg = mean(dists_y)[2]
dx_min = minimum(dists_y)[2]

# Instantiate PointData with the points and neighbors. The num_points and num_neighbors are automatically computed.
return PointData{Dim, Tv, Int}(medusa_data, n_idxs, length(medusa_data), nv, dx_min,
dx_avg)
end

struct BoundaryData{Ti <: Integer, Tv <: SVector{N, T} where {N, T <: Number}}
idx::Vector{Ti} # Indices of boundary points
normals::Vector{Tv} # Normals at boundary points
end
Loading

0 comments on commit 47d2207

Please sign in to comment.