From 11f3eeedd98f83963ff4bfb054d49e6576026bf4 Mon Sep 17 00:00:00 2001 From: Jorn Bruggeman Date: Mon, 22 Apr 2024 20:23:01 +0100 Subject: [PATCH] allow horizontal APIs being called for all-masked points (#68) * use catalog index to access catalog%xx_sources * correct masked value of conserved quantities (no used, just for documentation) * allow horizontal APIs being called for all-masked points --- include/fabm_private.h | 27 +++++++------ src/fabm.F90 | 78 ++++++++++++++++++++++--------------- src/fabm_work.F90 | 32 ++++++++++++++++ src/test/host.F90 | 87 +++++++++++++++++++++++++++--------------- 4 files changed, 152 insertions(+), 72 deletions(-) diff --git a/include/fabm_private.h b/include/fabm_private.h index e6780ca4..e69454b2 100644 --- a/include/fabm_private.h +++ b/include/fabm_private.h @@ -495,7 +495,6 @@ #ifdef _HAS_MASK_ # define _PACK_GLOBAL_(in,out,i,cache) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_SLICE_PLUS_1_(i) = in _INDEX_GLOBAL_INTERIOR_(cache%ipack(_I_));_LOOP_END_ -# define _PACK_GLOBAL_PLUS_1_(in,i,out,j,cache) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_INTERIOR_PLUS_1_(cache%ipack(_I_),i);_LOOP_END_ # define _UNPACK_(in,i,out,cache,missing) _DO_CONCURRENT_(_I_,_START_,_STOP_);out(_I_) = in(cache%iunpack(_I_),i);end do # define _UNPACK_TO_PLUS_1_(in,i,out,j,cache,missing) _DO_CONCURRENT_(_I_,_START_,_STOP_);out(_I_,j) = in(cache%iunpack(_I_),i);end do # define _UNPACK_AND_ADD_TO_PLUS_1_(in,i,out,j,cache) _DO_CONCURRENT_(_I_,_START_,_STOP_);out(_I_,j) = out(_I_,j) + in(cache%iunpack(_I_),i);end do @@ -503,7 +502,6 @@ # define _UNPACK_TO_GLOBAL_PLUS_1_(in,i,out,j,cache,missing) _DO_CONCURRENT_(_I_,_START_,_STOP_);out _INDEX_GLOBAL_INTERIOR_PLUS_1_(_I_,j) = in(cache%iunpack(_I_),i);end do #else # define _PACK_GLOBAL_(in,out,i,cache) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_SLICE_PLUS_1_(i) = in _INDEX_GLOBAL_INTERIOR_(_START_+_I_-1);_LOOP_END_ -# define _PACK_GLOBAL_PLUS_1_(in,i,out,j,cache) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_INTERIOR_PLUS_1_(_START_+_I_-1,i);_LOOP_END_ # define _UNPACK_(in,i,out,cache,missing) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_EXT_SLICE_ = in _INDEX_SLICE_PLUS_1_(i);_LOOP_END_ # define _UNPACK_TO_PLUS_1_(in,i,out,j,cache,missing) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_EXT_SLICE_PLUS_1_(j) = in _INDEX_SLICE_PLUS_1_(i);_LOOP_END_ # define _UNPACK_AND_ADD_TO_PLUS_1_(in,i,out,j,cache) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_EXT_SLICE_PLUS_1_(j) = out _INDEX_EXT_SLICE_PLUS_1_(j) + in _INDEX_SLICE_PLUS_1_(i);_LOOP_END_ @@ -511,17 +509,24 @@ # define _UNPACK_TO_GLOBAL_PLUS_1_(in,i,out,j,cache,missing) _CONCURRENT_LOOP_BEGIN_EX_(cache);out _INDEX_GLOBAL_INTERIOR_PLUS_1_(_START_+_I_-1,j) = in _INDEX_SLICE_PLUS_1_(i);_LOOP_END_ #endif -#if defined(_HORIZONTAL_IS_VECTORIZED_)&&defined(_HAS_MASK_) -# define _HORIZONTAL_PACK_GLOBAL_(in,out,j,cache) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_HORIZONTAL_(cache%ipack(_J_));_HORIZONTAL_LOOP_END_ -# define _HORIZONTAL_PACK_GLOBAL_PLUS_1_(in,i,out,j,cache) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_HORIZONTAL_PLUS_1_(cache%ipack(_J_),i);_HORIZONTAL_LOOP_END_ -# define _HORIZONTAL_UNPACK_(in,i,out,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out(_J_) = in(cache%iunpack(_J_),i);end do -# define _HORIZONTAL_UNPACK_TO_PLUS_1_(in,i,out,j,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out(_J_,j) = in(cache%iunpack(_J_),i);end do -# define _HORIZONTAL_UNPACK_AND_ADD_TO_PLUS_1_(in,i,out,j,cache) _DO_CONCURRENT_(_J_,_START_,_STOP_);out(_J_,j) = out(_J_,j) + in(cache%iunpack(_J_),i);end do -# define _HORIZONTAL_UNPACK_TO_GLOBAL_(in,i,out,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out _INDEX_GLOBAL_HORIZONTAL_(_J_) = in(cache%iunpack(_J_),i);end do -# define _HORIZONTAL_UNPACK_TO_GLOBAL_PLUS_1_(in,i,out,j,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out _INDEX_GLOBAL_HORIZONTAL_PLUS_1_(_J_,j) = in(cache%iunpack(_J_),i);end do +#ifdef _HAS_MASK_ +# ifdef _HORIZONTAL_IS_VECTORIZED_ +# define _HORIZONTAL_PACK_GLOBAL_(in,out,j,cache) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_HORIZONTAL_(cache%ipack(_J_));_HORIZONTAL_LOOP_END_ +# define _HORIZONTAL_UNPACK_(in,i,out,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out(_J_) = in(cache%iunpack(_J_),i);end do +# define _HORIZONTAL_UNPACK_TO_PLUS_1_(in,i,out,j,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out(_J_,j) = in(cache%iunpack(_J_),i);end do +# define _HORIZONTAL_UNPACK_AND_ADD_TO_PLUS_1_(in,i,out,j,cache) _DO_CONCURRENT_(_J_,_START_,_STOP_);out(_J_,j) = out(_J_,j) + in(cache%iunpack(_J_),i);end do +# define _HORIZONTAL_UNPACK_TO_GLOBAL_(in,i,out,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out _INDEX_GLOBAL_HORIZONTAL_(_J_) = in(cache%iunpack(_J_),i);end do +# define _HORIZONTAL_UNPACK_TO_GLOBAL_PLUS_1_(in,i,out,j,cache,missing) _DO_CONCURRENT_(_J_,_START_,_STOP_);out _INDEX_GLOBAL_HORIZONTAL_PLUS_1_(_J_,j) = in(cache%iunpack(_J_),i);end do +# else +# define _HORIZONTAL_PACK_GLOBAL_(in,out,j,cache) out _INDEX_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_HORIZONTAL_(_START_+_J_-1) +# define _HORIZONTAL_UNPACK_(in,i,out,cache,missing) if (cache%n == 0) then;out _INDEX_EXT_HORIZONTAL_SLICE_ = missing;else;out _INDEX_EXT_HORIZONTAL_SLICE_ = in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);end if +# define _HORIZONTAL_UNPACK_TO_PLUS_1_(in,i,out,j,cache,missing) if (cache%n == 0) then;out _INDEX_EXT_HORIZONTAL_SLICE_PLUS_1_(j) = missing;else;out _INDEX_EXT_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);end if +# define _HORIZONTAL_UNPACK_AND_ADD_TO_PLUS_1_(in,i,out,j,cache) if (cache%n /= 0) then;out _INDEX_EXT_HORIZONTAL_SLICE_PLUS_1_(j) = out _INDEX_EXT_HORIZONTAL_SLICE_PLUS_1_(j) + in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);end if +# define _HORIZONTAL_UNPACK_TO_GLOBAL_(in,i,out,cache,missing) if (cache%n == 0) then;out _INDEX_GLOBAL_HORIZONTAL_(_START_+_J_-1) = missing;else;out _INDEX_GLOBAL_HORIZONTAL_(_START_+_J_-1) = in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);end if +# define _HORIZONTAL_UNPACK_TO_GLOBAL_PLUS_1_(in,i,out,j,cache,missing) if (cache%n == 0) then;out _INDEX_GLOBAL_HORIZONTAL_PLUS_1_(_START_+_J_-1,j) = missing;else;out _INDEX_GLOBAL_HORIZONTAL_PLUS_1_(_START_+_J_-1,j) = in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);end if +# endif #else # define _HORIZONTAL_PACK_GLOBAL_(in,out,j,cache) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_HORIZONTAL_(_START_+_J_-1);_HORIZONTAL_LOOP_END_ -# define _HORIZONTAL_PACK_GLOBAL_PLUS_1_(in,i,out,j,cache) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_GLOBAL_HORIZONTAL_PLUS_1_(_START_+_J_-1,i);_HORIZONTAL_LOOP_END_ # define _HORIZONTAL_UNPACK_(in,i,out,cache,missing) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_EXT_HORIZONTAL_SLICE_ = in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);_HORIZONTAL_LOOP_END_ # define _HORIZONTAL_UNPACK_TO_PLUS_1_(in,i,out,j,cache,missing) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_EXT_HORIZONTAL_SLICE_PLUS_1_(j) = in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);_HORIZONTAL_LOOP_END_ # define _HORIZONTAL_UNPACK_AND_ADD_TO_PLUS_1_(in,i,out,j,cache) _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(cache);out _INDEX_EXT_HORIZONTAL_SLICE_PLUS_1_(j) = out _INDEX_EXT_HORIZONTAL_SLICE_PLUS_1_(j) + in _INDEX_HORIZONTAL_SLICE_PLUS_1_(i);_HORIZONTAL_LOOP_END_ diff --git a/src/fabm.F90 b/src/fabm.F90 index 4c741481..f74f1491 100644 --- a/src/fabm.F90 +++ b/src/fabm.F90 @@ -1461,7 +1461,7 @@ subroutine initialize_interior_state(self _POSTARG_INTERIOR_IN_) class (type_fabm_model), intent(inout) :: self _DECLARE_ARGUMENTS_INTERIOR_IN_ - integer :: ivar, read_index, icall + integer :: ivar, read_index, catalog_index, icall _DECLARE_INTERIOR_INDICES_ #ifndef NDEBUG @@ -1485,9 +1485,10 @@ subroutine initialize_interior_state(self _POSTARG_INTERIOR_IN_) ! Copy from cache back to global data store [NB variable values have been set in the *read* cache]. do ivar = 1, size(self%interior_state_variables) - read_index = self%interior_state_variables(ivar)%target%read_indices%value - if (self%catalog%interior_sources(read_index) == data_source_fabm) then - _UNPACK_TO_GLOBAL_(self%cache_int%read, read_index, self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p, self%cache_int, self%interior_state_variables(ivar)%missing_value) + catalog_index = self%interior_state_variables(ivar)%target%catalog_index + if (self%catalog%interior_sources(catalog_index) == data_source_fabm) then + read_index = self%interior_state_variables(ivar)%target%read_indices%value + _UNPACK_TO_GLOBAL_(self%cache_int%read, read_index, self%catalog%interior(catalog_index)%p, self%cache_int, self%interior_state_variables(ivar)%missing_value) end if end do @@ -1498,7 +1499,7 @@ subroutine initialize_bottom_state(self _POSTARG_HORIZONTAL_IN_) class (type_fabm_model), intent(inout) :: self _DECLARE_ARGUMENTS_HORIZONTAL_IN_ - integer :: icall, ivar, read_index + integer :: icall, ivar, read_index, catalog_index _DECLARE_HORIZONTAL_INDICES_ #ifndef NDEBUG @@ -1522,9 +1523,10 @@ subroutine initialize_bottom_state(self _POSTARG_HORIZONTAL_IN_) ! Copy from cache back to global data store [NB variable values have been set in the *read* cache]. do ivar = 1, size(self%bottom_state_variables) - read_index = self%bottom_state_variables(ivar)%target%read_indices%value - if (self%catalog%horizontal_sources(read_index) == data_source_fabm) then - _HORIZONTAL_UNPACK_TO_GLOBAL_(self%cache_hz%read_hz, read_index, self%catalog%horizontal(self%bottom_state_variables(ivar)%target%catalog_index)%p, self%cache_hz, self%bottom_state_variables(ivar)%missing_value) + catalog_index = self%bottom_state_variables(ivar)%target%catalog_index + if (self%catalog%horizontal_sources(catalog_index) == data_source_fabm) then + read_index = self%bottom_state_variables(ivar)%target%read_indices%value + _HORIZONTAL_UNPACK_TO_GLOBAL_(self%cache_hz%read_hz, read_index, self%catalog%horizontal(catalog_index)%p, self%cache_hz, self%bottom_state_variables(ivar)%missing_value) end if end do @@ -1535,7 +1537,7 @@ subroutine initialize_surface_state(self _POSTARG_HORIZONTAL_IN_) class (type_fabm_model), intent(inout) :: self _DECLARE_ARGUMENTS_HORIZONTAL_IN_ - integer :: icall, ivar, read_index + integer :: icall, ivar, read_index, catalog_index _DECLARE_HORIZONTAL_INDICES_ #ifndef NDEBUG @@ -1559,9 +1561,10 @@ subroutine initialize_surface_state(self _POSTARG_HORIZONTAL_IN_) ! Copy from cache back to global data store [NB variable values have been set in the *read* cache]. do ivar = 1, size(self%surface_state_variables) - read_index = self%surface_state_variables(ivar)%target%read_indices%value - if (self%catalog%horizontal_sources(read_index) == data_source_fabm) then - _HORIZONTAL_UNPACK_TO_GLOBAL_(self%cache_hz%read_hz, read_index, self%catalog%horizontal(self%surface_state_variables(ivar)%target%catalog_index)%p, self%cache_hz, self%surface_state_variables(ivar)%missing_value) + catalog_index = self%surface_state_variables(ivar)%target%catalog_index + if (self%catalog%horizontal_sources(catalog_index) == data_source_fabm) then + read_index = self%surface_state_variables(ivar)%target%read_indices%value + _HORIZONTAL_UNPACK_TO_GLOBAL_(self%cache_hz%read_hz, read_index, self%catalog%horizontal(catalog_index)%p, self%cache_hz, self%surface_state_variables(ivar)%missing_value) end if end do @@ -1640,7 +1643,7 @@ subroutine check_interior_state(self _POSTARG_INTERIOR_IN_, repair, valid) logical, intent(out) :: valid logical :: valid_ranges - integer :: ivar, read_index + integer :: ivar, read_index, catalog_index real(rki) :: value, minimum, maximum character(len=256) :: err _DECLARE_INTERIOR_INDICES_ @@ -1656,8 +1659,14 @@ subroutine check_interior_state(self _POSTARG_INTERIOR_IN_, repair, valid) call process_interior_slice(self%check_interior_state_job%first_task, self%domain, self%catalog, self%cache_fill_values, self%store, self%cache_int _POSTARG_INTERIOR_IN_) valid = self%cache_int%valid + + ! If any of the models' custom state checks failed, and repairing was not allowed, + ! no need to also apply our simple range checks - just return if (.not. (valid .or. repair)) return + ! If there are no unmasked points to process, we are done + if (self%cache_int%n == 0) return + ! Finally check whether all state variable values lie within their prescribed [constant] bounds. ! This is always done, independently of any model-specific checks that may have been called above. @@ -1710,9 +1719,10 @@ subroutine check_interior_state(self _POSTARG_INTERIOR_IN_, repair, valid) if (self%cache_int%set_interior .or. .not. valid_ranges) then do ivar = 1, size(self%interior_state_variables) - read_index = self%check_interior_state_data(ivar)%index - if (self%catalog%interior_sources(read_index) == data_source_fabm) then - _UNPACK_TO_GLOBAL_(self%cache_int%read, read_index, self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p, self%cache_int, self%interior_state_variables(ivar)%missing_value) + catalog_index = self%interior_state_variables(ivar)%target%catalog_index + if (self%catalog%interior_sources(catalog_index) == data_source_fabm) then + read_index = self%check_interior_state_data(ivar)%index + _UNPACK_TO_GLOBAL_(self%cache_int%read, read_index, self%catalog%interior(catalog_index)%p, self%cache_int, self%interior_state_variables(ivar)%missing_value) end if end do end if @@ -1755,7 +1765,7 @@ subroutine internal_check_horizontal_state(self, job _POSTARG_HORIZONTAL_IN_, ch logical, intent(out) :: valid logical :: valid_ranges - integer :: ivar, read_index + integer :: ivar, read_index, catalog_index real(rki) :: value, minimum, maximum character(len=256) :: err _DECLARE_HORIZONTAL_INDICES_ @@ -1771,8 +1781,14 @@ subroutine internal_check_horizontal_state(self, job _POSTARG_HORIZONTAL_IN_, ch call process_horizontal_slice(job%first_task, self%domain, self%catalog, self%cache_fill_values, self%store, self%cache_hz _POSTARG_HORIZONTAL_IN_) valid = self%cache_hz%valid + + ! If any of the models' custom state checks failed, and repairing was not allowed, + ! no need to also apply our simple range checks - just return if (.not. (valid .or. repair)) return + ! If there are no unmasked points to process, we are done + if (self%cache_hz%n == 0) return + ! Quick bounds check for the common case where all values are valid. valid_ranges = .true. do ivar = 1, size(check_state_data) @@ -1825,9 +1841,10 @@ subroutine internal_check_horizontal_state(self, job _POSTARG_HORIZONTAL_IN_, ch if (self%cache_hz%set_horizontal .or. .not. valid_ranges) then do ivar = 1, size(state_variables) - read_index = check_state_data(ivar)%index - if (self%catalog%horizontal_sources(read_index) == data_source_fabm) then - _HORIZONTAL_UNPACK_TO_GLOBAL_(self%cache_hz%read_hz, read_index, self%catalog%horizontal(state_variables(ivar)%target%catalog_index)%p, self%cache_hz, state_variables(ivar)%missing_value) + catalog_index = state_variables(ivar)%target%catalog_index + if (self%catalog%horizontal_sources(catalog_index) == data_source_fabm) then + read_index = check_state_data(ivar)%index + _HORIZONTAL_UNPACK_TO_GLOBAL_(self%cache_hz%read_hz, read_index, self%catalog%horizontal(catalog_index)%p, self%cache_hz, state_variables(ivar)%missing_value) end if end do end if @@ -1856,24 +1873,25 @@ subroutine internal_check_horizontal_state(self, job _POSTARG_HORIZONTAL_IN_, ch #endif do ivar = 1, size(self%interior_state_variables) - read_index = self%interior_state_variables(ivar)%target%read_indices%value - if (self%catalog%interior_sources(read_index) == data_source_fabm) then + catalog_index = self%interior_state_variables(ivar)%target%catalog_index + if (self%catalog%interior_sources(catalog_index) == data_source_fabm) then + read_index = self%interior_state_variables(ivar)%target%read_indices%value #if _FABM_BOTTOM_INDEX_==-1&&defined(_HORIZONTAL_IS_VECTORIZED_) if (flag == 1) then #endif #ifdef _HORIZONTAL_IS_VECTORIZED_ # ifdef _HAS_MASK_ - self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p _INDEX_GLOBAL_INTERIOR_(self%cache_hz%ipack(1:self%cache_hz%n)) = self%cache_hz%read(1:self%cache_hz%n, read_index) + self%catalog%interior(catalog_index)%p _INDEX_GLOBAL_INTERIOR_(self%cache_hz%ipack(1:self%cache_hz%n)) = self%cache_hz%read(1:self%cache_hz%n, read_index) # else _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(self%cache_hz) - self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p _INDEX_GLOBAL_INTERIOR_(_START_+_I_-1) = self%cache_hz%read _INDEX_SLICE_PLUS_1_(read_index) + self%catalog%interior(catalog_index)%p _INDEX_GLOBAL_INTERIOR_(_START_+_I_-1) = self%cache_hz%read _INDEX_SLICE_PLUS_1_(read_index) _HORIZONTAL_LOOP_END_ # endif #elif defined(_INTERIOR_IS_VECTORIZED_) - self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p _INDEX_LOCATION_ = self%cache_hz%read(1,read_index) + self%catalog%interior(catalog_index)%p _INDEX_LOCATION_ = self%cache_hz%read(1,read_index) #else - self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p _INDEX_LOCATION_ = self%cache_hz%read(read_index) + self%catalog%interior(catalog_index)%p _INDEX_LOCATION_ = self%cache_hz%read(read_index) #endif #if _FABM_BOTTOM_INDEX_==-1&&defined(_HORIZONTAL_IS_VECTORIZED_) @@ -1882,10 +1900,10 @@ subroutine internal_check_horizontal_state(self, job _POSTARG_HORIZONTAL_IN_, ch _CONCURRENT_HORIZONTAL_LOOP_BEGIN_EX_(self%cache_hz) # ifdef _HAS_MASK_ _VERTICAL_ITERATOR_ = self%domain%bottom_indices _INDEX_GLOBAL_HORIZONTAL_(self%cache_hz%ipack(_J_)) - self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p _INDEX_GLOBAL_INTERIOR_(self%cache_hz%ipack(_J_)) = self%cache_hz%read _INDEX_SLICE_PLUS_1_(read_index) + self%catalog%interior(catalog_index)%p _INDEX_GLOBAL_INTERIOR_(self%cache_hz%ipack(_J_)) = self%cache_hz%read _INDEX_SLICE_PLUS_1_(read_index) # else _VERTICAL_ITERATOR_ = self%domain%bottom_indices _INDEX_GLOBAL_HORIZONTAL_(_START_+_J_-1) - self%catalog%interior(self%interior_state_variables(ivar)%target%catalog_index)%p _INDEX_GLOBAL_INTERIOR_(_START_+_I_-1) = self%cache_hz%read _INDEX_SLICE_PLUS_1_(read_index) + self%catalog%interior(catalog_index)%p _INDEX_GLOBAL_INTERIOR_(_START_+_I_-1) = self%cache_hz%read _INDEX_SLICE_PLUS_1_(read_index) # endif _HORIZONTAL_LOOP_END_ end if @@ -2047,7 +2065,7 @@ subroutine get_interior_conserved_quantities(self _POSTARG_INTERIOR_IN_, sums) call process_interior_slice(self%get_interior_conserved_quantities_job%first_task, self%domain, self%catalog, self%cache_fill_values, self%store, self%cache_int _POSTARG_INTERIOR_IN_) do i = 1, size(self%conserved_quantities) - _UNPACK_TO_PLUS_1_(self%cache_int%write, self%conserved_quantities(i)%index, sums, i, self%cache_int, 0.0_rke) + _UNPACK_TO_PLUS_1_(self%cache_int%write, self%conserved_quantities(i)%index, sums, i, self%cache_int, self%conserved_quantities(i)%missing_value) end do end subroutine get_interior_conserved_quantities @@ -2071,7 +2089,7 @@ subroutine get_horizontal_conserved_quantities(self _POSTARG_HORIZONTAL_IN_, sum call process_horizontal_slice(self%get_horizontal_conserved_quantities_job%first_task, self%domain, self%catalog, self%cache_fill_values, self%store, self%cache_hz _POSTARG_HORIZONTAL_IN_) do i = 1, size(self%conserved_quantities) - _HORIZONTAL_UNPACK_TO_PLUS_1_(self%cache_hz%write_hz, self%conserved_quantities(i)%horizontal_index, sums, i, self%cache_hz, 0.0_rke) + _HORIZONTAL_UNPACK_TO_PLUS_1_(self%cache_hz%write_hz, self%conserved_quantities(i)%horizontal_index, sums, i, self%cache_hz, self%conserved_quantities(i)%missing_value) end do end subroutine get_horizontal_conserved_quantities diff --git a/src/fabm_work.F90 b/src/fabm_work.F90 index 7afaf505..1a6e434c 100644 --- a/src/fabm_work.F90 +++ b/src/fabm_work.F90 @@ -283,8 +283,12 @@ subroutine begin_interior_task(domain, catalog, fill_values, task, cache _POSTAR # else _N_ = _STOP_ - _START_ + 1 # endif +#else + _N_ = 1 #endif + if (_N_ == 0) return + _DO_CONCURRENT_(i, 1, size(task%load)) j = task%load(i) if (j /= 0) then @@ -343,8 +347,16 @@ subroutine begin_horizontal_task(domain, catalog, fill_values, task, cache _POST # else _N_ = _STOP_ - _START_ + 1 # endif +#else + if (_IS_UNMASKED_(domain%mask_hz _INDEX_HORIZONTAL_LOCATION_)) then + _N_ = 1 + else + _N_ = 0 + end if #endif + if (_N_ == 0) return + _DO_CONCURRENT_(i, 1, size(task%load_hz)) j = task%load_hz(i) if (j /= 0) then @@ -497,8 +509,16 @@ subroutine begin_vertical_task(domain, catalog, fill_values, task, cache _POSTAR # else _N_ = _VERTICAL_STOP_ - _VERTICAL_START_ + 1 # endif +#else + if (_IS_UNMASKED_(domain%mask_hz _INDEX_HORIZONTAL_LOCATION_)) then + _N_ = 1 + else + _N_ = 0 + end if #endif + if (_N_ == 0) return + _DO_CONCURRENT_(i, 1, size(task%load)) j = task%load(i) if (j /= 0) then @@ -653,6 +673,8 @@ subroutine process_interior_slice(task, domain, catalog, cache_fill_values, stor call cache_pack(domain, catalog, cache_fill_values, task, cache _POSTARG_INTERIOR_IN_) + if (_N_ /= 0) then + ncopy = 0 do icall = 1, size(task%calls) if (task%calls(icall)%active) then @@ -683,6 +705,8 @@ subroutine process_interior_slice(task, domain, catalog, cache_fill_values, stor ncopy = ncopy + task%calls(icall)%ncopy_int end do + end if + call cache_unpack(task, cache, store _POSTARG_INTERIOR_IN_) end subroutine process_interior_slice @@ -701,6 +725,8 @@ subroutine process_horizontal_slice(task, domain, catalog, cache_fill_values, st call cache_pack(domain, catalog, cache_fill_values, task, cache _POSTARG_HORIZONTAL_IN_) + if (_N_ /= 0) then + ncopy = 0 do icall = 1, size(task%calls) if (task%calls(icall)%active) then @@ -734,6 +760,8 @@ subroutine process_horizontal_slice(task, domain, catalog, cache_fill_values, st ncopy = ncopy + task%calls(icall)%ncopy_hz end do + end if + call cache_unpack(task, cache, store _POSTARG_HORIZONTAL_IN_) end subroutine process_horizontal_slice @@ -752,6 +780,8 @@ subroutine process_vertical_slice(task, domain, catalog, cache_fill_values, stor call cache_pack(domain, catalog, cache_fill_values, task, cache _POSTARG_VERTICAL_IN_) + if (_N_ /= 0) then + ncopy_int = 0 ncopy_hz = 0 do icall = 1, size(task%calls) @@ -789,6 +819,8 @@ subroutine process_vertical_slice(task, domain, catalog, cache_fill_values, stor ncopy_hz = ncopy_hz + task%calls(icall)%ncopy_hz end do + end if + call cache_unpack(task, cache, store _POSTARG_VERTICAL_IN_) end subroutine process_vertical_slice diff --git a/src/test/host.F90 b/src/test/host.F90 index 18065756..e32e6a77 100644 --- a/src/test/host.F90 +++ b/src/test/host.F90 @@ -628,6 +628,16 @@ subroutine configure_mask(randomize) call random_number(tmp_hz) mask_hz = _FABM_UNMASKED_VALUE_ where (tmp_hz > masked_fraction) mask_hz = _FABM_MASKED_VALUE_ + + ! If the bottom index varies horizontally, also mask any points where the bottom index is invalid + ! NB there will be masked points where the bottom index is valid, but that does not cause problems +# if _FABM_BOTTOM_INDEX_==-1 +# ifdef _FABM_VERTICAL_BOTTOM_TO_SURFACE_ + where (bottom_index > domain_stop(_FABM_DEPTH_DIMENSION_INDEX_)) mask_hz = _FABM_MASKED_VALUE_ +# else + where (bottom_index < domain_start(_FABM_DEPTH_DIMENSION_INDEX_)) mask_hz = _FABM_MASKED_VALUE_ +# endif +# endif # else ! Apply random mask across interior domain (half of grid cells masked) call random_number(tmp) @@ -637,7 +647,7 @@ subroutine configure_mask(randomize) # if _FABM_BOTTOM_INDEX_==-1 ! Bottom index varies in the horizontal. Ensure the bottom cell itself is unmasked, and anything deeper is masked. _BEGIN_GLOBAL_HORIZONTAL_LOOP_ - ! Valid bottom index - unmask associated cell, then mask all deeper ones + ! If we have a valid bottom index - unmask associated cell, then mask all deeper ones # ifdef _FABM_VERTICAL_BOTTOM_TO_SURFACE_ if (bottom_index _INDEX_HORIZONTAL_LOCATION_ <= domain_stop(_FABM_DEPTH_DIMENSION_INDEX_)) mask _INDEX_GLOBAL_VERTICAL_(bottom_index _INDEX_HORIZONTAL_LOCATION_) = _FABM_UNMASKED_VALUE_ mask _INDEX_GLOBAL_VERTICAL_(:bottom_index _INDEX_HORIZONTAL_LOCATION_ - 1) = _FABM_MASKED_VALUE_ @@ -657,9 +667,16 @@ subroutine configure_mask(randomize) ! For valid points in the horizontal, make sure that index 1 (bottom or surface) is unmasked _BEGIN_GLOBAL_HORIZONTAL_LOOP_ if (_IS_UNMASKED_(mask_hz _INDEX_HORIZONTAL_LOCATION_)) then +# ifdef _FABM_VERTICAL_BOTTOM_TO_SURFACE_ + mask _INDEX_GLOBAL_VERTICAL_(domain_stop(_FABM_DEPTH_DIMENSION_INDEX_)) = _FABM_UNMASKED_VALUE_ +# if _FABM_BOTTOM_INDEX_!=-1 + mask _INDEX_GLOBAL_VERTICAL_(domain_start(_FABM_DEPTH_DIMENSION_INDEX_)) = _FABM_UNMASKED_VALUE_ +# endif +# else mask _INDEX_GLOBAL_VERTICAL_(domain_start(_FABM_DEPTH_DIMENSION_INDEX_)) = _FABM_UNMASKED_VALUE_ -# if _FABM_BOTTOM_INDEX_!=-1 +# if _FABM_BOTTOM_INDEX_!=-1 mask _INDEX_GLOBAL_VERTICAL_(domain_stop(_FABM_DEPTH_DIMENSION_INDEX_)) = _FABM_UNMASKED_VALUE_ +# endif # endif end if _END_GLOBAL_HORIZONTAL_LOOP_ @@ -687,7 +704,7 @@ subroutine configure_mask(randomize) if (_IS_UNMASKED_(mask_hz _INDEX_HORIZONTAL_LOCATION_)) then call assert(bottom_index _INDEX_HORIZONTAL_LOCATION_ >= domain_start(_FABM_DEPTH_DIMENSION_INDEX_) & .and. bottom_index _INDEX_HORIZONTAL_LOCATION_ <= domain_stop(_FABM_DEPTH_DIMENSION_INDEX_), 'randomize_mask', & - 'BUG: unmaked horizontal location has invalid bottom index') + 'BUG: unmasked horizontal location has invalid bottom index') end if _END_GLOBAL_HORIZONTAL_LOOP_ #endif @@ -703,19 +720,28 @@ subroutine count_active_points() interior_count = 0 _BEGIN_GLOBAL_LOOP_ + active = .true. + + ! Mark as inactive if the point is masked #ifdef _HAS_MASK_ # ifdef _FABM_HORIZONTAL_MASK_ active = _IS_UNMASKED_(mask_hz _INDEX_HORIZONTAL_LOCATION_) # else active = _IS_UNMASKED_(mask _INDEX_LOCATION_) # endif -#elif _FABM_BOTTOM_INDEX_==-1 && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) +#endif + + ! Mark as inactive if the point lies below the bottom + ! This is not caught by the mask check above if (a) there is not mask, + ! or (b) the mask is horizontal only +#if _FABM_BOTTOM_INDEX_==-1 # ifdef _FABM_VERTICAL_BOTTOM_TO_SURFACE_ - active = _ITERATOR_ >= bottom_index _INDEX_HORIZONTAL_LOCATION_ + active = active .and. _ITERATOR_ >= bottom_index _INDEX_HORIZONTAL_LOCATION_ # else - active = _ITERATOR_ <= bottom_index _INDEX_HORIZONTAL_LOCATION_ + active = active .and. _ITERATOR_ <= bottom_index _INDEX_HORIZONTAL_LOCATION_ # endif #endif + if (active) interior_count = interior_count + 1 _END_GLOBAL_LOOP_ @@ -954,24 +980,31 @@ subroutine test_update(apply_mask, restrict_range) interior_loop_count = 0 _BEGIN_OUTER_INTERIOR_LOOP_ dy = 0 -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) +#if _FABM_BOTTOM_INDEX_==-1 && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) ! We are looping over depth, but as we have a non-constant bottom index (yet no mask), we need to skip everything below bottom # ifdef _FABM_VERTICAL_BOTTOM_TO_SURFACE_ _START_ = bottom_index _INDEX_HORIZONTAL_LOCATION_ # else _STOP_ = bottom_index _INDEX_HORIZONTAL_LOCATION_ # endif + if (_START_ <= _STOP_) then #endif + call model%get_interior_sources(_PREARG_INTERIOR_IN_ dy _INTERIOR_SLICE_RANGE_PLUS_1_) do ivar = 1, size(model%interior_state_variables) call check_interior_slice_plus_1(dy _INTERIOR_SLICE_RANGE_PLUS_1_, ivar, 0.0_rke, -real(ivar + interior_state_offset, rke) _POSTARG_INTERIOR_IN_) end do + +#if _FABM_BOTTOM_INDEX_==-1 && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) + end if +#endif _END_OUTER_INTERIOR_LOOP_ -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) + ! If we varied start or stop indices of the inner loop, reset them now +#if _FABM_BOTTOM_INDEX_==-1 && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) _START_ = domain_start(_FABM_VECTORIZED_DIMENSION_INDEX_) _STOP_ = domain_stop(_FABM_VECTORIZED_DIMENSION_INDEX_) -# endif +#endif call assert(interior_loop_count == interior_count, 'get_interior_sources', & 'call count does not match number of (unmasked) interior points') @@ -1004,9 +1037,6 @@ subroutine test_update(apply_mask, restrict_range) call start_test('get_surface_sources') surface_loop_count = 0 _BEGIN_OUTER_HORIZONTAL_LOOP_ -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) - if (bottom_index _INDEX_HORIZONTAL_LOCATION_ >= domain_start(_FABM_DEPTH_DIMENSION_INDEX_) .and. bottom_index _INDEX_HORIZONTAL_LOCATION_ <= domain_stop(_FABM_DEPTH_DIMENSION_INDEX_)) then -#endif flux = 0 sms_sf = 0 call model%get_surface_sources(_PREARG_HORIZONTAL_IN_ flux _HORIZONTAL_SLICE_RANGE_PLUS_1_, sms_sf _HORIZONTAL_SLICE_RANGE_PLUS_1_) @@ -1016,9 +1046,6 @@ subroutine test_update(apply_mask, restrict_range) do ivar = 1, size(model%surface_state_variables) call check_horizontal_slice_plus_1(sms_sf _HORIZONTAL_SLICE_RANGE_PLUS_1_, ivar, 0.0_rke, -real(ivar + surface_state_offset, rke) _POSTARG_HORIZONTAL_IN_) end do -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) - end if -#endif _END_OUTER_HORIZONTAL_LOOP_ call assert(surface_loop_count == horizontal_count, 'get_surface_sources', & 'call count does not match number of (unmasked) horizontal points') @@ -1052,9 +1079,6 @@ subroutine test_update(apply_mask, restrict_range) call start_test('get_bottom_sources') bottom_loop_count = 0 _BEGIN_OUTER_HORIZONTAL_LOOP_ -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) - if (bottom_index _INDEX_HORIZONTAL_LOCATION_ >= domain_start(_FABM_DEPTH_DIMENSION_INDEX_) .and. bottom_index _INDEX_HORIZONTAL_LOCATION_ <= domain_stop(_FABM_DEPTH_DIMENSION_INDEX_)) then -#endif flux = 0 sms_bt = 0 call model%get_bottom_sources(_PREARG_HORIZONTAL_IN_ flux _HORIZONTAL_SLICE_RANGE_PLUS_1_, sms_bt _HORIZONTAL_SLICE_RANGE_PLUS_1_) @@ -1064,9 +1088,6 @@ subroutine test_update(apply_mask, restrict_range) do ivar = 1, size(model%bottom_state_variables) call check_horizontal_slice_plus_1(sms_bt _HORIZONTAL_SLICE_RANGE_PLUS_1_, ivar, 0.0_rke, -real(ivar + bottom_state_offset, rke) _POSTARG_HORIZONTAL_IN_) end do -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) - end if -#endif _END_OUTER_HORIZONTAL_LOOP_ call assert(bottom_loop_count == horizontal_count, 'get_bottom_sources', & 'call count does not match number of (unmasked) horizontal points') @@ -1123,14 +1144,16 @@ subroutine test_update(apply_mask, restrict_range) call start_test('get_vertical_movement') vertical_movement_loop_count = 0 _BEGIN_OUTER_INTERIOR_LOOP_ -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) +#if _FABM_BOTTOM_INDEX_==-1 && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) ! We are looping over depth, but as we have a non-constant bottom index (yet no mask), we need to skip everything below bottom # ifdef _FABM_VERTICAL_BOTTOM_TO_SURFACE_ _START_ = bottom_index _INDEX_HORIZONTAL_LOCATION_ # else _STOP_ = bottom_index _INDEX_HORIZONTAL_LOCATION_ # endif + if (_START_ <= _STOP_) then #endif + call model%get_vertical_movement(_PREARG_INTERIOR_IN_ w _INTERIOR_SLICE_RANGE_PLUS_1_) do ivar = 1, size(model%interior_state_variables) if (mod(ivar, 2) == 0) then @@ -1141,12 +1164,18 @@ subroutine test_update(apply_mask, restrict_range) _POSTARG_INTERIOR_IN_) end if end do + +#if _FABM_BOTTOM_INDEX_==-1 && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) + end if +#endif + _END_OUTER_INTERIOR_LOOP_ -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) + ! If we varied start or stop indices of the inner loop, reset them now +#if _FABM_BOTTOM_INDEX_==-1 && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) _START_ = domain_start(_FABM_VECTORIZED_DIMENSION_INDEX_) _STOP_ = domain_stop(_FABM_VECTORIZED_DIMENSION_INDEX_) -# endif +#endif call assert(vertical_movement_loop_count == interior_count, 'get_vertical_movement', & 'call count does not match number of (unmasked) interior points') @@ -1173,6 +1202,7 @@ subroutine test_update(apply_mask, restrict_range) end do _END_OUTER_INTERIOR_LOOP_ + ! If we varied start or stop indices of the inner loop, reset them now #if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) && _FABM_VECTORIZED_DIMENSION_INDEX_==_FABM_DEPTH_DIMENSION_INDEX_ && defined(_FABM_DEPTH_DIMENSION_INDEX_) _START_ = domain_start(_FABM_VECTORIZED_DIMENSION_INDEX_) _STOP_ = domain_stop(_FABM_VECTORIZED_DIMENSION_INDEX_) @@ -1181,18 +1211,12 @@ subroutine test_update(apply_mask, restrict_range) call start_test('get_horizontal_conserved_quantities') _BEGIN_OUTER_HORIZONTAL_LOOP_ -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) - if (bottom_index _INDEX_HORIZONTAL_LOCATION_ >= domain_start(_FABM_DEPTH_DIMENSION_INDEX_) .and. bottom_index _INDEX_HORIZONTAL_LOCATION_ <= domain_stop(_FABM_DEPTH_DIMENSION_INDEX_)) then -#endif call model%get_horizontal_conserved_quantities(_PREARG_HORIZONTAL_IN_ total_hz _HORIZONTAL_SLICE_RANGE_PLUS_1_) do ivar = 1, size(model%conserved_quantities) call check_horizontal_slice_plus_1(total_hz _HORIZONTAL_SLICE_RANGE_PLUS_1_, ivar, model%conserved_quantities(ivar)%missing_value, & (surface_state_offset + 0.5_rke * (test_model%nsurface_state + 1)) * test_model%nsurface_state + & (bottom_state_offset + 0.5_rke * (test_model%nbottom_state + 1)) * test_model%nbottom_state _POSTARG_HORIZONTAL_IN_) end do -#if _FABM_BOTTOM_INDEX_==-1 && !defined(_HAS_MASK_) - endif -#endif _END_OUTER_HORIZONTAL_LOOP_ call report_test_result() @@ -1538,7 +1562,8 @@ subroutine check_interior(dat, required_masked_value, required_value) # else unmasked = _IS_UNMASKED_(mask _INDEX_LOCATION_) # endif -#elif _FABM_BOTTOM_INDEX_==-1 +#endif +#if _FABM_BOTTOM_INDEX_==-1 # ifdef _FABM_VERTICAL_BOTTOM_TO_SURFACE_ if (_ITERATOR_ < bottom_index _INDEX_HORIZONTAL_LOCATION_) cycle # else