Skip to content

Commit

Permalink
Allow for MKArray to not be an AbstractArray
Browse files Browse the repository at this point in the history
This is a bit of a faff, because arrays in post-processing are `Array`,
not `MKArray`, so any function that we might want to use in
post-processing has to allow for either type.
  • Loading branch information
johnomotani committed Jan 27, 2025
1 parent aa13082 commit 81af1a9
Show file tree
Hide file tree
Showing 27 changed files with 450 additions and 393 deletions.
1 change: 1 addition & 0 deletions moment_kinetics/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ manufactured_solns_ext = ["Symbolics", "IfElse"]

[compat]
julia = "1.9.0"
InboundsArrays = "0.2.1"

[extras]
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Expand Down
12 changes: 6 additions & 6 deletions moment_kinetics/src/analysis.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ using ..interpolation: interpolate_to_grid_1d
using ..load_data: open_readonly_output_file, get_nranks, load_pdf_data, load_rank_data
using ..load_data: load_distributed_ion_pdf_slice
using ..looping
using ..type_definitions: mk_int, mk_float
using ..type_definitions
using ..velocity_moments: integrate_over_vspace

using FFTW
Expand Down Expand Up @@ -690,7 +690,7 @@ function steady_state_square_residuals(variable, variable_at_previous_time, dt,
end
variable_max = MPI.Allreduce(local_max, max, comm_world)
end
if isa(dt, AbstractVector)
if isa(dt, MKOrAbstractVector)
reshaped_dt = reshape(dt, tuple((1 for _ 1:t_dim-1)..., size(dt)...))
else
reshaped_dt = dt
Expand All @@ -699,9 +699,9 @@ function steady_state_square_residuals(variable, variable_at_previous_time, dt,
if size(dt) == ()
# local_max_absolute should always be at least a 1d array of size 1, not
# a 0d array, so that the MPI.Gather() below works correctly.
local_max_absolute = zeros(1)
local_max_absolute = mk_zeros(1)
else
local_max_absolute = zeros(size(dt))
local_max_absolute = mk_zeros(size(dt))
end
else
if size(dt) == ()
Expand Down Expand Up @@ -732,7 +732,7 @@ function steady_state_square_residuals(variable, variable_at_previous_time, dt,
# have size 1.
this_dims = tuple((1:t_dim-3)...)
if this_dims === ()
local_max_absolute = max.(local_max_absolute, [absolute_residual])
local_max_absolute[1] = max(local_max_absolute[1], absolute_residual[])
else
local_max_absolute = max.(local_max_absolute,
vec(maximum(absolute_residual,
Expand Down Expand Up @@ -996,7 +996,7 @@ phi_fit_result struct whose fields are:
"""
function fit_delta_phi_mode(t, z, delta_phi)
# First fit a cosine to each time slice
results = allocate_float(3, size(delta_phi)[2])
results = zeros(mk_float, 3, size(delta_phi)[2])
amplitude_guess = 1.0
offset_guess = 0.0
for (i, phi_z) in enumerate(eachcol(delta_phi))
Expand Down
36 changes: 18 additions & 18 deletions moment_kinetics/src/boundary_conditions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ using ..interpolation: interpolate_to_grid_1d!
using ..looping
using ..timer_utils
using ..moment_kinetics_structs: scratch_pdf, em_fields_struct
using ..type_definitions: mk_float, mk_int
using ..type_definitions
using ..velocity_moments: integrate_over_vspace, integrate_over_neutral_vspace,
integrate_over_positive_vz, integrate_over_negative_vz

Expand Down Expand Up @@ -75,10 +75,10 @@ end
"""
enforce boundary conditions on f in r
"""
function enforce_r_boundary_condition!(f::AbstractArray{mk_float,5}, f_r_bc, bc::String,
adv, vpa, vperp, z, r, composition, end1::AbstractArray{mk_float,4},
end2::AbstractArray{mk_float,4}, buffer1::AbstractArray{mk_float,4},
buffer2::AbstractArray{mk_float,4}, r_diffusion::Bool)
function enforce_r_boundary_condition!(f::MKOrAbstractArray{mk_float,5}, f_r_bc, bc::String,
adv, vpa, vperp, z, r, composition, end1::MKOrAbstractArray{mk_float,4},
end2::MKOrAbstractArray{mk_float,4}, buffer1::MKOrAbstractArray{mk_float,4},
buffer2::MKOrAbstractArray{mk_float,4}, r_diffusion::Bool)

nr = r.n

Expand Down Expand Up @@ -125,9 +125,9 @@ end
enforce boundary conditions on ion particle f in z
"""
function enforce_z_boundary_condition!(pdf, density, upar, ppar, phi, moments, bc::String, adv,
z, vperp, vpa, composition, end1::AbstractArray{mk_float,4},
end2::AbstractArray{mk_float,4}, buffer1::AbstractArray{mk_float,4},
buffer2::AbstractArray{mk_float,4})
z, vperp, vpa, composition, end1::MKOrAbstractArray{mk_float,4},
end2::MKOrAbstractArray{mk_float,4}, buffer1::MKOrAbstractArray{mk_float,4},
buffer2::MKOrAbstractArray{mk_float,4})
# this block ensures periodic BC can be supported with distributed memory MPI
if z.nelement_global > z.nelement_local
# reconcile internal element boundaries across processes
Expand Down Expand Up @@ -281,10 +281,10 @@ enforce boundary conditions on neutral particle distribution function
end
end

function enforce_neutral_r_boundary_condition!(f::AbstractArray{mk_float,6},
f_r_bc::AbstractArray{mk_float,6}, adv, vz, vr, vzeta, z, r, composition,
end1::AbstractArray{mk_float,5}, end2::AbstractArray{mk_float,5},
buffer1::AbstractArray{mk_float,5}, buffer2::AbstractArray{mk_float,5},
function enforce_neutral_r_boundary_condition!(f::MKOrAbstractArray{mk_float,6},
f_r_bc::MKOrAbstractArray{mk_float,6}, adv, vz, vr, vzeta, z, r, composition,
end1::MKOrAbstractArray{mk_float,5}, end2::MKOrAbstractArray{mk_float,5},
buffer1::MKOrAbstractArray{mk_float,5}, buffer2::MKOrAbstractArray{mk_float,5},
r_diffusion) #f_initial,

bc = r.bc
Expand Down Expand Up @@ -334,8 +334,8 @@ enforce boundary conditions on neutral particle f in z
function enforce_neutral_z_boundary_condition!(pdf, density, uz, pz, moments, density_ion,
upar_ion, Er, boundary_distributions, adv,
z, vzeta, vr, vz, composition, geometry,
end1::AbstractArray{mk_float,5}, end2::AbstractArray{mk_float,5},
buffer1::AbstractArray{mk_float,5}, buffer2::AbstractArray{mk_float,5})
end1::MKOrAbstractArray{mk_float,5}, end2::MKOrAbstractArray{mk_float,5},
buffer1::MKOrAbstractArray{mk_float,5}, buffer2::MKOrAbstractArray{mk_float,5})


if z.nelement_global > z.nelement_local
Expand Down Expand Up @@ -794,7 +794,7 @@ function enforce_neutral_wall_bc!(pdf, z, vzeta, vr, vz, pz, uz, density, wall_f
zero_vz_ind = 0
for ivz 1:vz.n
if vz.scratch2[ivz] <= -zero
pdf[ivz,:,:,1] .= N_in*pdf[ivz,:,:,1]
pdf[ivz,:,:,1] .= N_in .* pdf[ivz,:,:,1]
else
zero_vz_ind = ivz
if abs(vz.scratch2[ivz]) < zero
Expand Down Expand Up @@ -1046,22 +1046,22 @@ enforce zero boundary condition at vperp -> infinity
"""
function enforce_vperp_boundary_condition! end

function enforce_vperp_boundary_condition!(f::AbstractArray{mk_float,5}, bc, vperp, vperp_spectral, vperp_advect, diffusion)
function enforce_vperp_boundary_condition!(f::MKOrAbstractArray{mk_float,5}, bc, vperp, vperp_spectral, vperp_advect, diffusion)
@loop_s is begin
@views enforce_vperp_boundary_condition!(f[:,:,:,:,is], bc, vperp, vperp_spectral, vperp_advect[is], diffusion)
end
return nothing
end

function enforce_vperp_boundary_condition!(f::AbstractArray{mk_float,4}, bc, vperp, vperp_spectral, vperp_advect, diffusion)
function enforce_vperp_boundary_condition!(f::MKOrAbstractArray{mk_float,4}, bc, vperp, vperp_spectral, vperp_advect, diffusion)
@loop_r ir begin
@views enforce_vperp_boundary_condition!(f[:,:,:,ir], bc, vperp, vperp_spectral,
vperp_advect, diffusion, ir)
end
return nothing
end

function enforce_vperp_boundary_condition!(f::AbstractArray{mk_float,3}, bc, vperp,
function enforce_vperp_boundary_condition!(f::MKOrAbstractArray{mk_float,3}, bc, vperp,
vperp_spectral, vperp_advect, diffusion, ir)
if bc == "zero" || bc == "zero-impose-regularity"
nvperp = vperp.n
Expand Down
60 changes: 30 additions & 30 deletions moment_kinetics/src/calculus.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ export integral
using ..moment_kinetics_structs: discretization_info, null_spatial_dimension_info,
null_velocity_dimension_info, weak_discretization_info
using ..timer_utils
using ..type_definitions: mk_float, mk_int
using ..type_definitions
using MPI
using ..communication: block_rank
using ..communication: _block_synchronize
Expand Down Expand Up @@ -272,7 +272,7 @@ end

"""
"""
function derivative_elements_to_full_grid!(df1d, df2d, coord, adv_fac::AbstractArray{mk_float,1})
function derivative_elements_to_full_grid!(df1d, df2d, coord, adv_fac::MKOrAbstractVector{mk_float})
# no changes need to be made for the derivative at points away from element boundaries
elements_to_full_grid_interior_pts!(df1d, df2d, coord)
# resolve the multi-valued nature of the derivative at element boundaries
Expand Down Expand Up @@ -321,7 +321,7 @@ df is multi-valued at the overlapping point at the boundary
between neighboring elements.
here we choose to use the value of df from the upwind element.
"""
function reconcile_element_boundaries_upwind!(df1d, df2d, coord, adv_fac::AbstractArray{mk_float,1})
function reconcile_element_boundaries_upwind!(df1d, df2d, coord, adv_fac::MKOrAbstractVector{mk_float})
# note that the first ngrid points are classified as belonging to the first element
# and the next ngrid-1 points belonging to second element, etc.

Expand Down Expand Up @@ -426,8 +426,8 @@ updated to include each physical dimension required
in the main code
"""

function assign_endpoint!(df1d::AbstractArray{mk_float,Ndims},
receive_buffer::AbstractArray{mk_float,Mdims},key::String,coord) where {Ndims,Mdims}
function assign_endpoint!(df1d::MKOrAbstractArray{mk_float,Ndims},
receive_buffer::MKOrAbstractArray{mk_float,Mdims},key::String,coord) where {Ndims,Mdims}
if key == "lower"
j = 1
elseif key == "upper"
Expand Down Expand Up @@ -480,11 +480,11 @@ function assign_endpoint!(df1d::AbstractArray{mk_float,Ndims},
end

@timeit_debug global_timer reconcile_element_boundaries_MPI!(
df1d::AbstractArray{mk_float,Ndims},
dfdx_lower_endpoints::AbstractArray{mk_float,Mdims},
dfdx_upper_endpoints::AbstractArray{mk_float,Mdims},
receive_buffer1::AbstractArray{mk_float,Mdims},
receive_buffer2::AbstractArray{mk_float,Mdims},
df1d::MKOrAbstractArray{mk_float,Ndims},
dfdx_lower_endpoints::MKOrAbstractArray{mk_float,Mdims},
dfdx_upper_endpoints::MKOrAbstractArray{mk_float,Mdims},
receive_buffer1::MKOrAbstractArray{mk_float,Mdims},
receive_buffer2::MKOrAbstractArray{mk_float,Mdims},
coord) where {Ndims,Mdims} = begin

# synchronize buffers
Expand Down Expand Up @@ -556,7 +556,7 @@ end
_block_synchronize()
end

function apply_adv_fac!(buffer::AbstractArray{mk_float,Ndims},adv_fac::AbstractArray{mk_float,Ndims},endpoints::AbstractArray{mk_float,Ndims},sgn::mk_int) where Ndims
function apply_adv_fac!(buffer::MKOrAbstractArray{mk_float,Ndims},adv_fac::MKOrAbstractArray{mk_float,Ndims},endpoints::MKOrAbstractArray{mk_float,Ndims},sgn::mk_int) where Ndims
#buffer contains off-process endpoint
#adv_fac < 0 is positive advection speed
#adv_fac > 0 is negative advection speed
Expand All @@ -578,13 +578,13 @@ function apply_adv_fac!(buffer::AbstractArray{mk_float,Ndims},adv_fac::AbstractA
end

@timeit_debug global_timer reconcile_element_boundaries_MPI!(
df1d::AbstractArray{mk_float,Ndims},
adv_fac_lower_endpoints::AbstractArray{mk_float,Mdims},
adv_fac_upper_endpoints::AbstractArray{mk_float,Mdims},
dfdx_lower_endpoints::AbstractArray{mk_float,Mdims},
dfdx_upper_endpoints::AbstractArray{mk_float,Mdims},
receive_buffer1::AbstractArray{mk_float,Mdims},
receive_buffer2::AbstractArray{mk_float,Mdims},
df1d::MKOrAbstractArray{mk_float,Ndims},
adv_fac_lower_endpoints::MKOrAbstractArray{mk_float,Mdims},
adv_fac_upper_endpoints::MKOrAbstractArray{mk_float,Mdims},
dfdx_lower_endpoints::MKOrAbstractArray{mk_float,Mdims},
dfdx_upper_endpoints::MKOrAbstractArray{mk_float,Mdims},
receive_buffer1::MKOrAbstractArray{mk_float,Mdims},
receive_buffer2::MKOrAbstractArray{mk_float,Mdims},
coord) where {Ndims,Mdims} = begin

# synchronize buffers
Expand Down Expand Up @@ -660,11 +660,11 @@ end
# Special version for pdf_electron with no r-dimension, which has the same number of
# dimensions as an ion/neutral moment variable, but different dimensions.
@timeit_debug global_timer reconcile_element_boundaries_MPI_z_pdf_vpavperpz!(
df1d::AbstractArray{mk_float,3},
dfdx_lower_endpoints::AbstractArray{mk_float,2},
dfdx_upper_endpoints::AbstractArray{mk_float,2},
receive_buffer1::AbstractArray{mk_float,2},
receive_buffer2::AbstractArray{mk_float,2}, coord) = begin
df1d::MKOrAbstractArray{mk_float,3},
dfdx_lower_endpoints::MKOrAbstractArray{mk_float,2},
dfdx_upper_endpoints::MKOrAbstractArray{mk_float,2},
receive_buffer1::MKOrAbstractArray{mk_float,2},
receive_buffer2::MKOrAbstractArray{mk_float,2}, coord) = begin

# synchronize buffers
# -- this all-to-all block communicate here requires that this function is NOT called from within a parallelised loop
Expand Down Expand Up @@ -738,13 +738,13 @@ end
# Special version for pdf_electron with no r-dimension, which has the same number of
# dimensions as an ion/neutral moment variable, but different dimensions.
@timeit_debug global_timer reconcile_element_boundaries_MPI_z_pdf_vpavperpz!(
df1d::AbstractArray{mk_float,3},
adv_fac_lower_endpoints::AbstractArray{mk_float,2},
adv_fac_upper_endpoints::AbstractArray{mk_float,2},
dfdx_lower_endpoints::AbstractArray{mk_float,2},
dfdx_upper_endpoints::AbstractArray{mk_float,2},
receive_buffer1::AbstractArray{mk_float,2},
receive_buffer2::AbstractArray{mk_float,2}, coord) = begin
df1d::MKOrAbstractArray{mk_float,3},
adv_fac_lower_endpoints::MKOrAbstractArray{mk_float,2},
adv_fac_upper_endpoints::MKOrAbstractArray{mk_float,2},
dfdx_lower_endpoints::MKOrAbstractArray{mk_float,2},
dfdx_upper_endpoints::MKOrAbstractArray{mk_float,2},
receive_buffer1::MKOrAbstractArray{mk_float,2},
receive_buffer2::MKOrAbstractArray{mk_float,2}, coord) = begin

# synchronize buffers
# -- this all-to-all block communicate here requires that this function is NOT called from within a parallelised loop
Expand Down
Loading

0 comments on commit 81af1a9

Please sign in to comment.