From bf981c802bf0a8986991555a3a2cdf724f7956f2 Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Mon, 7 Oct 2024 11:55:17 -0700 Subject: [PATCH 1/3] Update ReduceCceWorldube to take multiple input formats The worldtube data can now be one of MetricModal, MetricNodal, BondiModal, BondiNodal. Also, the executable can now combine multiple H5 files into one. --- docs/Tutorials/CCE.md | 278 ++++---- .../ReduceCceWorldtube/ReduceCceWorldtube.cpp | 610 ++++++++++++++---- .../ReduceCceWorldtube.yaml | 6 +- tests/Unit/Executables/CMakeLists.txt | 37 ++ .../Executables/Test_ReduceCceWorldtube.cpp | 488 ++++++++++++++ 5 files changed, 1184 insertions(+), 235 deletions(-) create mode 100644 tests/Unit/Executables/Test_ReduceCceWorldtube.cpp diff --git a/docs/Tutorials/CCE.md b/docs/Tutorials/CCE.md index 0b92832804e5..bb88003678b6 100644 --- a/docs/Tutorials/CCE.md +++ b/docs/Tutorials/CCE.md @@ -22,10 +22,10 @@ release). Inside this tarball is - the CCE executable `CharacteristicExtract` - an example YAML input file - an example set of Bondi-Sachs worldtube data in the `Tests/` directory (see - [Input worldtube data formats](#input_worldtube_data_formats) section) + [Input worldtube data formats](#input_worldtube_data_format) section) - example output from CCE in the `Tests/` directory - a `ReduceCceWorldtube` executable and YAML file for converting between - [worldtube data formats](#input_worldtube_data_formats) in the + [worldtube data formats](#input_worldtube_data_format) in the `ReduceCceWorldtube/` diretory - a python script `CheckCceOutput.py` (meant to be run from the root of the tarball and after you run the example YAML input file also in the root of the @@ -69,36 +69,106 @@ make CharacteristicExtract \note You may want to add the `-j4` flag to speed up compilation. However, be warned that this executable will need several GB of memory to build. -## Input worldtube data formats {#input_worldtube_data_formats} +## Input worldtube data format {#input_worldtube_data_format} -The worldtube data must be constructed as spheres of constant coordinate -radius, and (for the time being) written to a filename of the format -`...CceRXXXX.h5`, where the `XXXX` is to be replaced by the integer for which -the extraction radius is equal to `XXXX`M. For instance, a 100M extraction -should have filename `...CceR0100.h5`. This scheme of labeling files with the -extraction radius is constructed for compatibility with SpEC worldtube data. +In order to run the CCE executable, the worldtube data must be represented as +Bondi-Sachs variables decomposed as a subset of spin-weighted spherical harmonic +modes on a sphere of constant coordinate radius. We have chosen this format +because it is far more space-efficient to store on disk than other formats. This +section will detail the +[required data format](#required_h5_worldtube_data_format), provide options for +[converting worldtube data](#converting_worldtube_data) from other NR codes into +our format, and give insights into +[what the worldtube data should look like](#worldtube_data_looks). -Currently CCE is able to read in worldtube data in two different formats. +### Required H5 worldtube data format {#required_h5_worldtube_data_format} -### Cartesian metric and derivatives {#cartesian_metric_and_derivatives} +Within the H5 file that holds the worldtube data, there must be the following +datasets with these exact names (including the `.dat` suffix): -This metric data format must be provided as the following datasets: -- `gxx.dat`, `gxy.dat`, `gxz.dat`, `gyy.dat`, `gyz.dat`, `gzz.dat` -- `Drgxx.dat`, `Drgxy.dat`, `Drgxz.dat`, `Drgyy.dat`, `Drgyz.dat`, `Drgzz.dat` -- `Dtgxx.dat`, `Dtgxy.dat`, `Dtgxz.dat`, `Dtgyy.dat`, `Dtgyz.dat`, `Dtgzz.dat` -- `Shiftx.dat`, `Shifty.dat`, `Shiftz.dat` -- `DrShiftx.dat`, `DrShifty.dat`, `DrShiftz.dat` -- `DtShiftx.dat`, `DtShifty.dat`, `DtShiftz.dat` -- `Lapse.dat` -- `DrLapse.dat` -- `DtLapse.dat` +- `Beta.dat` +- `DrJ.dat` +- `DuR.dat` +- `H.dat` +- `J.dat` +- `Q.dat` +- `R.dat` +- `U.dat` +- `W.dat` -#### Spherical harmonic modes {#cartesian_spherical_modes} +Each dataset in the file must also have an attribute named `Legend` which +is an ASCII-encoded null-terminated variable-length string. That is, the HDF5 +type is: + +``` +DATATYPE H5T_STRING { + STRSIZE H5T_VARIABLE; + STRPAD H5T_STR_NULLTERM; + CSET H5T_CSET_ASCII; + CTYPE H5T_C_S1; +} +``` + +This can be checked for a dataset by running + +``` +h5dump -a Beta.dat/Legend WorldtubeFile.h5 +``` + +For the ordering of the data, we use spherical harmonic conventions documented +by the ylm::Spherepack class. Each row must start with the time stamp, and the +remaining values are the complex modes in m-varies-fastest format. For +spin-weight zero Bondi variables (`Beta`, `R`, `DuR`, `W`), we omit the +redundant negative-m modes and imaginary parts of the m=0 modes to save space on +disk. Here is an example of a legend for the spin-weight zero variables: + +``` +"time", "Re(0,0)", "Re(1,0)", "Re(1,1)", "Im(1,1)", "Re(2,0)", +"Re(2,1)", "Im(2,1)", "Re(2,2)", "Im(2,2)", "Re(3,0)", "Re(3,1)", +"Im(3,1)", "Re(3,2)", "Im(3,2)", "Re(3,3)", "Im(3,3)", ... +``` + +For non-zero spin-weight Bondi variables (`J`, `DrJ`, `H`, `Q`, `U`) we must +store all complex m-modes. Here is an example of a legend for variables where +all complex m-modes must be specified: + +``` +"time", "Re(0,0)", "Im(0,0)", "Re(1,-1)", "Im(1,-1)", "Re(1,0)", "Im(1,0)", +"Re(1,1)", "Im(1,1)", "Re(2,-2)", "Im(2,-2)", "Re(2,-1)", "Im(2,-1)", "Re(2,0)", +"Im(2,0)", "Re(2,1)", "Im(2,1)", "Re(2,2)", "Im(2,2)", ... +``` + +We don't have strict requirement on the name of the H5 file that holds the +worldtube data. However, it is recommended to name the H5 file `...CceRXXXX.h5`, +where the `XXXX` is to be replaced by the zero-padded integer for which the +extraction radius is equal to `XXXX`M. For instance, a 100M extraction should +have filename `...CceR0100.h5`. If you do not adhere to this naming convention, +you will need to specify the extraction radius in your YAML input file. + +\note This scheme of labeling files with the extraction radius is constructed +for compatibility with worldtube data from the SXS Collaboration's SpEC code. + +### Converting to the required H5 format {#converting_worldtube_data} + +Unless you are using worldtube data that was generated from SpECTRE (or SpEC), +it's possible that your worldtube data is not in the correct format. We allow +conversion into our data format from a few other data formats using the +[`ReduceCceWorldtube` executable provided](#acquiring_the_cce_module). These are + +- Nodal cartesian metric data (which we refer to as "metric nodal") +- Modal cartesian metric data (which we refer to as "metric modal") +- Nodal Bondi-Sachs data (which we refer to as "bondi nodal") + +Requirements for these data formats are listed below. + +#### Spherical harmonic modes {#spherical_modes} + +When we refer to a "modal" data format, we mean that the worldtube data are +stored as spherical harmonic coefficients (a.k.a. modes). We use spherical +harmonic conventions documented by the ylm::Spherepack class. For each dataset, +each row must start with the time stamp, and the remaining values are the +complex modes in m-varies-fastest format. That is, -In this format, the worldtube data are stored as spherical harmonic -coefficients. We use spherical harmonic conventions documented by the -ylm::Spherepack class. Each row must start with the time stamp, and the -remaining values are the complex modes in m-varies-fastest format. That is, ``` "time", "Lapse_Re(0,0)", "Lapse_Im(0,0)", "Lapse_Re(1,1)", "Lapse_Im(1,1)", "Lapse_Re(1,0)", "Lapse_Im(1,0)", @@ -108,102 +178,80 @@ remaining values are the complex modes in m-varies-fastest format. That is, "Lapse_Re(2,-2)", "Lapse_Im(2,-2)" ``` -#### Spherical harmonic nodes {#cartesian_spherical_nodes} +Each dataset in the H5 file must also have an attribute +named `Legend` which is an ASCII-encoded null-terminated variable-length string. -\warning This format is not yet fully supported but is under development. If you -need it please file an issue so we can escalate the priority. +##### Spherical harmonic nodes {#spherical_nodes} -In this format the value of the functions at specially chosen collocation points -(grid points) is read in. This allows SpECTRE to perform integrals, derivatives, -and interpolation exactly on the input data. These grid points are -Gauss-Legendre in $cos(\theta)$ and equally spaced in $\phi$. Below is a routine -for computing the spherical harmonic $\theta$ and $\phi$ values. These can be -used to compute the Cartesian locations for a given radius using the standard -transformation. The routine supports \f$\ell\in[4, 32]\f$. +When we refer to a "nodal" data format, we mean that the worldtube data are +stored as values at specially chosen collocation points (a.k.a. grid points or +nodes). This allows SpECTRE to perform integrals, derivatives, and interpolation +exactly on the input data. These grid points are Gauss-Legendre in $cos(\theta)$ +and equally spaced in $\phi$. + +Below is a routine for computing the spherical +harmonic $\theta$ and $\phi$ values. These can be used to compute the Cartesian +locations for a given radius using the standard transformation. The routine +supports \f$\ell\in[4, 32]\f$.
C Code for computing SpECTRE CCE gridpoint locations \snippet Test_Spherepack.cpp spectre_cce_grid_point_locations
-Each `dat` file holds `1 + (l_max + 1) * (2 * l_max + 1)` columns, with the +Each dataset holds `1 + (l_max + 1) * (2 * l_max + 1)` columns, with the first one being the `time`. The columns must be in \f$\theta\f$-varies-fastest ordering. That is, + ``` "time", -"Theta_0_Phi_0", "Theta_1_Phi_0", "Theta_2_Phi_0", "Theta_3_Phi_0", -"Theta_4_Phi_0", -"Theta_0_Phi_1", "Theta_1_Phi_1", "Theta_2_Phi_1", "Theta_3_Phi_1", -"Theta_4_Phi_1", +"Phi_0_Theta_0", "Phi_0_Theta_1", "Phi_0_Theta_2", "Phi_0_Theta_3", "Phi_0_Theta_4", +"Phi_1_Theta_0", "Phi_1_Theta_1", "Phi_1_Theta_2", "Phi_1_Theta_3", "Phi_1_Theta_4", ``` +Each dataset in the H5 file must also have an attribute +named `Legend` which is an ASCII-encoded null-terminated variable-length string. -#### Formatting of data types +\note Nodal data is likely the easiest to write out since no conversion to +spherical harmonic coefficients is necessary. -Each dataset in the file must also have an attribute named "Legend" which -is an ASCII-encoded null-terminated variable-length string. That is, the HDF5 -type is: -``` -DATATYPE H5T_STRING { - STRSIZE H5T_VARIABLE; - STRPAD H5T_STR_NULLTERM; - CSET H5T_CSET_ASCII; - CTYPE H5T_C_S1; -} -``` -This can be checked for a dataset by running -``` -h5dump -a DrLapse.dat/Legend CceR0150.h5 -``` +#### Cartesian metric and derivatives {#cartesian_metric_and_derivatives} -### Bondi-Sachs {#bondi_sachs} +For worldtube data stored in an H5 file in either the "metric nodal" or "metric +modal" formats, there must be the following datasets with these exact names +(including the `.dat` suffix): -The second format is Bondi-Sachs metric component data. -This format is far more space-efficient (by around a factor of 4) than the -[cartesian_metric](#cartesian_metric_and_derivatives) format. - -The format is similar to the -[cartesian_metric](#cartesian_metric_and_derivatives) format, except in -spin-weighted spherical harmonic modes, and the real (spin-weight-0) quantities -omit the redundant negative-m modes and imaginary parts of m=0 modes. -The quantities that must be provided by the Bondi-Sachs metric data format are: -- `Beta.dat` -- `DrJ.dat` -- `DuR.dat` -- `H.dat` -- `J.dat` -- `Q.dat` -- `R.dat` -- `U.dat` -- `W.dat` +- `gxx.dat`, `gxy.dat`, `gxz.dat`, `gyy.dat`, `gyz.dat`, `gzz.dat` +- `Drgxx.dat`, `Drgxy.dat`, `Drgxz.dat`, `Drgyy.dat`, `Drgyz.dat`, `Drgzz.dat` +- `Dtgxx.dat`, `Dtgxy.dat`, `Dtgxz.dat`, `Dtgyy.dat`, `Dtgyz.dat`, `Dtgzz.dat` +- `Shiftx.dat`, `Shifty.dat`, `Shiftz.dat` +- `DrShiftx.dat`, `DrShifty.dat`, `DrShiftz.dat` +- `DtShiftx.dat`, `DtShifty.dat`, `DtShiftz.dat` +- `Lapse.dat` +- `DrLapse.dat` +- `DtLapse.dat` -An example of the columns of these dat files is +The layout of each of these datasets must be in either +[spherical harmonic modes](#spherical_modes) or +[spherical harmonic nodes](#spherical_nodes). -``` -"time", "Re(0,0)", "Re(1,0)", "Re(1,1)", "Im(1,1)", "Re(2,0)", -"Re(2,1)", "Im(2,1)", "Re(2,2)", "Im(2,2)", "Re(3,0)", "Re(3,1)", -"Im(3,1)", "Re(3,2)", "Im(3,2)", "Re(3,3)", "Im(3,3)", ... -``` +#### Bondi-Sachs {#bondi_sachs} -See \cite Moxon2020gha for a description of these quantities. +In the "bondi nodal" format, you must have the same Bondi variables as the +[required format](#required_h5_worldtube_data_format), but each variable layout +must be the [spherical harmonic nodal layout](#spherical_nodes). -\note The columns of the legend of the -[cartesian_metric](#cartesian_metric_and_derivatives) format are different from -the [Bondi-Sachs](#bondi_sachs) format. In the -[cartesian_metric](#cartesian_metric_and_derivatives), the name of the quantity -is in the legend, while for [Bondi-Sachs](#bondi_sachs) it isn't. +If you already have data in the +[required "bondi modal" format](#required_h5_worldtube_data_format), then +nothing needs to be done. -### Converting data formats +#### Running the ReduceCceWorldtube executable -Since the [Bondi-Sachs](#bondi_sachs) format is far more space-efficient, -SpECTRE provides a separate executable for converting from the -[cartesian_metric](#cartesian_metric_and_derivatives) format to the -[Bondi-Sachs](#bondi_sachs) worldtube format called `ReduceCceWorldtube`. -The `ReduceCceWorldtube` executable should be run on a -[cartesian_metric](#cartesian_metric_and_derivatives) worldtube file, and will -produce a corresponding 'reduced' Bondi-Sachs worldtube file. -This executable works similarly to our other executables by accepting a YAML -input file: +The `ReduceCceWorldtube` executable should be run on any of the +[allowed input formats](#converting_worldtube_data), and will produce a +corresponding Bondi-Sachs worldtube file that can be read in by CCE. This +executable works similarly to our other executables by accepting a YAML input +file: ``` ReduceCceWorldtube --input-file ReduceCceWorldtube.yaml @@ -213,15 +261,29 @@ with a YAML file \snippet ReduceCceWorldtube.yaml reduce_cce_worldtube_yaml_doxygen_example -The option `LMaxFactor` determines the factor by which the resolution of -the boundary computation that is run will exceed the resolution of the -input and output files. -Empirically, we have found that `LMaxFactor` of 3 is sufficient to achieve -roundoff precision in all boundary data we have attempted, and an `LMaxFactor` -of 2 is usually sufficient to vastly exceed the precision of the simulation that -provided the boundary dataset. - -### What Worldtube data "should" look like +In addition to converting worldtube data formats, `ReduceCceWorldtube` also +accepts multiple input worldtube H5 files that have sequential times (e.g. from +different checkpoints) and will combine the times from all H5 files alongside +converting the worldtube data format. If there are duplicate or overlapping +times, the last/latest of the times are chosen. If you pass multiple input +worldtube H5 files, it is assumed that they are ordered increasing in time. + +Here are some notes about the different options in the YAML input file: + +- If the extraction radius is in the `InputH5File` names, then the + `ExtractionRadius` option can be `Auto`. Otherwise, it must be specified. +- The option `LMaxFactor` determines the factor by which the resolution of the + boundary computation that is run will exceed the resolution of the input and + output files. Empirically, we have found that `LMaxFactor` of 3 is sufficient + to achieve roundoff precision in all boundary data we have attempted, and an + `LMaxFactor` of 2 is usually sufficient to vastly exceed the precision of the + simulation that provided the boundary dataset. +- `FixSpecNormalization` should always be `False` unless you are using a + particualy old version of SpEC +- `BufferDepth` is an advanced option that lets you load more data into RAM at + once so there are fewer filesystem accesses. + +### What Worldtube data "should" look like {#worldtube_data_looks} While no two simulations will look exactly the same, there are some general trends in the worldtube data to look for. Here is a plot of some modes of the @@ -347,7 +409,7 @@ with h5py.File(input_file,'r') as input_h5,\ ``` The rechunked data will still be in the same -[format](#input_worldtube_data_formats) as before, but will just have a +[format](#input_worldtube_data_format) as before, but will just have a different underlying structure in the H5 file that makes it faster to read in. ## Running the CCE executable {#running_the_cce_executable} diff --git a/src/Executables/ReduceCceWorldtube/ReduceCceWorldtube.cpp b/src/Executables/ReduceCceWorldtube/ReduceCceWorldtube.cpp index bac41f151d83..80c4017f67f9 100644 --- a/src/Executables/ReduceCceWorldtube/ReduceCceWorldtube.cpp +++ b/src/Executables/ReduceCceWorldtube/ReduceCceWorldtube.cpp @@ -2,31 +2,44 @@ // See LICENSE.txt for details. #include +#include #include #include #include +#include +#include #include "DataStructures/ComplexDataVector.hpp" #include "DataStructures/ComplexModalVector.hpp" #include "DataStructures/DataBox/DataBox.hpp" +#include "DataStructures/DataBox/PrefixHelpers.hpp" #include "DataStructures/DataBox/Tag.hpp" #include "DataStructures/DataVector.hpp" #include "DataStructures/SpinWeighted.hpp" #include "DataStructures/Variables.hpp" #include "DataStructures/VariablesTag.hpp" #include "Evolution/Systems/Cce/BoundaryData.hpp" +#include "Evolution/Systems/Cce/ExtractionRadius.hpp" #include "Evolution/Systems/Cce/SpecBoundaryData.hpp" #include "Evolution/Systems/Cce/Tags.hpp" #include "Evolution/Systems/Cce/WorldtubeBufferUpdater.hpp" #include "Evolution/Systems/Cce/WorldtubeModeRecorder.hpp" +#include "IO/H5/CombineH5.hpp" +#include "IO/H5/Dat.hpp" +#include "IO/Logging/Verbosity.hpp" #include "NumericalAlgorithms/SpinWeightedSphericalHarmonics/SwshCoefficients.hpp" #include "NumericalAlgorithms/SpinWeightedSphericalHarmonics/SwshCollocation.hpp" +#include "NumericalAlgorithms/SpinWeightedSphericalHarmonics/SwshTags.hpp" #include "Options/Auto.hpp" +#include "Options/Options.hpp" #include "Options/ParseOptions.hpp" #include "Options/String.hpp" #include "Parallel/CreateFromOptions.hpp" #include "Parallel/Printf/Printf.hpp" +#include "Utilities/ErrorHandling/Error.hpp" +#include "Utilities/FileSystem.hpp" #include "Utilities/Gsl.hpp" +#include "Utilities/Overloader.hpp" #include "Utilities/TMPL.hpp" #include "Utilities/TaggedTuple.hpp" @@ -35,102 +48,137 @@ extern "C" void CkRegisterMainModule(void) {} namespace { -using metric_input_tags = Cce::cce_metric_input_tags; -// from a time-varies-fastest set of buffers provided by +// Convenient tag lists +using modal_metric_input_tags = Cce::cce_metric_input_tags; +using nodal_metric_input_tags = Cce::cce_metric_input_tags; +using modal_bondi_input_tags = Cce::Tags::worldtube_boundary_tags_for_writing< + Spectral::Swsh::Tags::SwshTransform>; +using nodal_bondi_input_tags = + Cce::Tags::worldtube_boundary_tags_for_writing; + +// from a data-varies-fastest set of buffers provided by // `MetricWorldtubeH5BufferUpdater` extract the set of coefficients for a // particular time given by `buffer_time_offset` into the `time_span` size of // buffer. void slice_buffers_to_libsharp_modes( - const gsl::not_null*> coefficients_set, - const Variables& coefficients_buffers, - const size_t time_span, const size_t buffer_time_offset, const size_t l_max, - const size_t computation_l_max) { - SpinWeighted spin_weighted_buffer; - - const auto convert_modes = [&](const ComplexModalVector& coefficients_buffer, - const auto& libsharp_mode) { - if (libsharp_mode.l > l_max) { - Spectral::Swsh::goldberg_modes_to_libsharp_modes_single_pair( - libsharp_mode, make_not_null(&spin_weighted_buffer), 0, 0.0, 0.0); - - } else { - Spectral::Swsh::goldberg_modes_to_libsharp_modes_single_pair( - libsharp_mode, make_not_null(&spin_weighted_buffer), 0, - coefficients_buffer[time_span * - Spectral::Swsh::goldberg_mode_index( - l_max, libsharp_mode.l, - static_cast(libsharp_mode.m)) + - buffer_time_offset], - coefficients_buffer[time_span * - Spectral::Swsh::goldberg_mode_index( - l_max, libsharp_mode.l, - -static_cast(libsharp_mode.m)) + - buffer_time_offset]); + const gsl::not_null*> coefficients_set, + const Variables& coefficients_buffers, + const size_t buffer_time_offset, const size_t computation_l_max) { + SpinWeighted goldberg_mode_buffer; + SpinWeighted libsharp_mode_buffer; + + const size_t goldberg_mode_size = square(computation_l_max + 1); + const size_t libsharp_mode_size = + Spectral::Swsh::size_of_libsharp_coefficient_vector(computation_l_max); + + for (size_t i = 0; i < 3; ++i) { + for (size_t j = i; j < 3; ++j) { + tmpl::for_each>>( + [&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + const auto& all_goldberg_modes = + get(coefficients_buffers).get(i, j); + auto& all_libsharp_modes = get(*coefficients_set).get(i, j); + + // NOLINTBEGIN + goldberg_mode_buffer.set_data_ref( + const_cast(all_goldberg_modes).data() + + buffer_time_offset * goldberg_mode_size, + goldberg_mode_size); + libsharp_mode_buffer.set_data_ref(all_libsharp_modes.data(), + libsharp_mode_size); + // NOLINTEND + + Spectral::Swsh::goldberg_to_libsharp_modes( + make_not_null(&libsharp_mode_buffer), goldberg_mode_buffer, + computation_l_max); + }); } - }; - - for (const auto& libsharp_mode : - Spectral::Swsh::cached_coefficients_metadata(computation_l_max)) { - for (size_t i = 0; i < 3; ++i) { - for (size_t j = i; j < 3; ++j) { - tmpl::for_each>>( - [&](auto tag_v) { - using tag = typename decltype(tag_v)::type; - spin_weighted_buffer.set_data_ref( - get(*coefficients_set).get(i, j).data(), - Spectral::Swsh::size_of_libsharp_coefficient_vector( - computation_l_max)); - - convert_modes(get(coefficients_buffers).get(i, j), - libsharp_mode); - }); - } - tmpl::for_each>>([&](auto tag_v) { - using tag = typename decltype(tag_v)::type; - spin_weighted_buffer.set_data_ref( - get(*coefficients_set).get(i).data(), - Spectral::Swsh::size_of_libsharp_coefficient_vector( - computation_l_max)); - - convert_modes(get(coefficients_buffers).get(i), libsharp_mode); - }); - } - tmpl::for_each>>([&](auto tag_v) { + tmpl::for_each>>([&](auto tag_v) { using tag = typename decltype(tag_v)::type; - spin_weighted_buffer.set_data_ref( - get(get(*coefficients_set)).data(), - Spectral::Swsh::size_of_libsharp_coefficient_vector( - computation_l_max)); - - convert_modes(get(get(coefficients_buffers)), libsharp_mode); + const auto& all_goldberg_modes = get(coefficients_buffers).get(i); + auto& all_libsharp_modes = get(*coefficients_set).get(i); + + // NOLINTBEGIN + goldberg_mode_buffer.set_data_ref( + const_cast(all_goldberg_modes).data() + + buffer_time_offset * goldberg_mode_size, + goldberg_mode_size); + libsharp_mode_buffer.set_data_ref(all_libsharp_modes.data(), + libsharp_mode_size); + // NOLINTEND + + Spectral::Swsh::goldberg_to_libsharp_modes( + make_not_null(&libsharp_mode_buffer), goldberg_mode_buffer, + computation_l_max); }); } + tmpl::for_each>>([&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + const auto& all_goldberg_modes = get(get(coefficients_buffers)); + auto& all_libsharp_modes = get(get(*coefficients_set)); + + // NOLINTBEGIN + goldberg_mode_buffer.set_data_ref( + const_cast(all_goldberg_modes).data() + + buffer_time_offset * goldberg_mode_size, + goldberg_mode_size); + libsharp_mode_buffer.set_data_ref(all_libsharp_modes.data(), + libsharp_mode_size); + // NOLINTEND + + Spectral::Swsh::goldberg_to_libsharp_modes( + make_not_null(&libsharp_mode_buffer), goldberg_mode_buffer, + computation_l_max); + }); +} + +template +void write_bondi_data_to_disk( + const gsl::not_null recorder, + const BoundaryData& nodal_boundary_data, const double time, + const size_t data_l_max) { + tmpl::for_each([&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + + const ComplexDataVector& nodal_data = + get(get(nodal_boundary_data)).data(); + + recorder->append_modal_data( + Cce::dataset_label_for_tag(), time, nodal_data, + data_l_max); + }); } // read in the data from a (previously standard) SpEC worldtube file // `input_file`, perform the boundary computation, and dump the (considerably -// smaller) dataset associated with the spin-weighted scalars to `output_file`. +// smaller) dataset associated with the spin-weighted scalars to +// `output_file`. void perform_cce_worldtube_reduction( const std::string& input_file, const std::string& output_file, - const size_t buffer_depth, const size_t l_max_factor, - const bool fix_spec_normalization = false) { + const size_t input_buffer_depth, const size_t l_max_factor, + const std::optional& extraction_radius, + const bool fix_spec_normalization) { Cce::MetricWorldtubeH5BufferUpdater buffer_updater{ - input_file}; + input_file, extraction_radius, fix_spec_normalization}; const size_t l_max = buffer_updater.get_l_max(); - // Perform the boundary computation to scalars at twice the input l_max to be - // absolutely certain that there are no problems associated with aliasing. + // Perform the boundary computation to scalars at some factor > 1 of the input + // l_max to be absolutely certain that there are no problems associated with + // aliasing. const size_t computation_l_max = l_max_factor * l_max; - // we're not interpolating, this is just a reasonable number of rows to ingest - // at a time. - const size_t size_of_buffer = square(l_max + 1) * (buffer_depth); const DataVector& time_buffer = buffer_updater.get_time_buffer(); - - Variables coefficients_buffers{size_of_buffer}; - Variables coefficients_set{ + // We're not interpolating in time, this is just a reasonable number of rows + // to ingest at a time. If the buffer depth from the input file is larger than + // the number of times we have, just use the number of times + const size_t buffer_depth = std::min(time_buffer.size(), input_buffer_depth); + const size_t size_of_buffer = square(computation_l_max + 1) * buffer_depth; + + Variables coefficients_buffers{size_of_buffer}; + Variables coefficients_set{ Spectral::Swsh::size_of_libsharp_coefficient_vector(computation_l_max)}; Variables(create_boundary_data); + tmpl::as_pack(create_boundary_data); + + write_bondi_data_to_disk(make_not_null(&recorder), boundary_data_variables, + time, computation_l_max); + } +} + +template +tuples::tagged_tuple_from_typelist +create_bondi_nodal_views(const Variables& bondi_boundary_data, + const size_t time_offset, const size_t l_max) { + const size_t number_of_angular_points = + Spectral::Swsh::number_of_swsh_collocation_points(l_max); + tuples::tagged_tuple_from_typelist result; + + tmpl::for_each([&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + + make_const_view( + make_not_null(&std::as_const(get(tuples::get(result)).data())), + get(get(bondi_boundary_data)).data(), + time_offset * number_of_angular_points, number_of_angular_points); + }); + + return result; +} + +template +tuples::tagged_tuple_from_typelist +create_metric_nodal_views(const Variables& bondi_boundary_data, + const size_t time_offset, const size_t l_max) { + const size_t number_of_angular_points = + Spectral::Swsh::number_of_swsh_collocation_points(l_max); + tuples::tagged_tuple_from_typelist result; + + for (size_t i = 0; i < 3; ++i) { + for (size_t j = i; j < 3; ++j) { + tmpl::for_each>>([&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + + make_const_view( + make_not_null(&std::as_const(tuples::get(result).get(i, j))), + get(bondi_boundary_data).get(i, j), + time_offset * number_of_angular_points, number_of_angular_points); + }); + } + tmpl::for_each>>([&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + + make_const_view( + make_not_null(&std::as_const(tuples::get(result).get(i))), + get(bondi_boundary_data).get(i), + time_offset * number_of_angular_points, number_of_angular_points); + }); + } + + tmpl::for_each< + Cce::Tags::detail::apply_derivs_t>>( + [&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + + make_const_view( + make_not_null(&std::as_const(get(tuples::get(result)))), + get(get(bondi_boundary_data)), + time_offset * number_of_angular_points, number_of_angular_points); + }); + + return result; +} + +void bondi_nodal_to_bondi_modal( + const std::string& input_file, const std::string& output_file, + const size_t input_buffer_depth, + const std::optional& extraction_radius) { + Cce::BondiWorldtubeH5BufferUpdater buffer_updater{ + input_file, extraction_radius}; + const size_t l_max = buffer_updater.get_l_max(); + + const size_t number_of_angular_points = + Spectral::Swsh::number_of_swsh_collocation_points(l_max); + const DataVector& time_buffer = buffer_updater.get_time_buffer(); + // We're not interpolating in time, this is just a reasonable number of rows + // to ingest at a time. If the buffer depth from the input file is larger than + // the number of times we have, just use the number of times + const size_t buffer_depth = std::min(time_buffer.size(), input_buffer_depth); + const size_t size_of_buffer = buffer_depth * number_of_angular_points; + + Variables nodal_buffer{size_of_buffer}; + + size_t time_span_start = 0; + size_t time_span_end = 0; + Cce::WorldtubeModeRecorder recorder{l_max, output_file}; - // loop over the tags that we want to dump. - tmpl::for_each>( - [&recorder, &boundary_data_variables, &computation_l_max, - &time](auto tag_v) { - using tag = typename decltype(tag_v)::type; + for (size_t i = 0; i < time_buffer.size(); i++) { + const double time = time_buffer[i]; + buffer_updater.update_buffers_for_time( + make_not_null(&nodal_buffer), make_not_null(&time_span_start), + make_not_null(&time_span_end), time, l_max, 0, buffer_depth, false); - const ComplexDataVector& nodal_data = - get(get(boundary_data_variables)).data(); + const auto nodal_data_at_time = + create_bondi_nodal_views(nodal_buffer, i - time_span_start, l_max); - recorder.append_modal_data( - Cce::dataset_label_for_tag(), time, nodal_data, - computation_l_max); - }); + write_bondi_data_to_disk(make_not_null(&recorder), nodal_data_at_time, time, + l_max); + } +} + +void metric_nodal_to_bondi_modal( + const std::string& input_file, const std::string& output_file, + const size_t input_buffer_depth, + const std::optional& extraction_radius) { + Cce::MetricWorldtubeH5BufferUpdater buffer_updater{ + input_file, extraction_radius, false}; + const size_t l_max = buffer_updater.get_l_max(); + + const size_t number_of_angular_points = + Spectral::Swsh::number_of_swsh_collocation_points(l_max); + const DataVector& time_buffer = buffer_updater.get_time_buffer(); + // We're not interpolating in time, this is just a reasonable number of rows + // to ingest at a time. If the buffer depth from the input file is larger than + // the number of times we have, just use the number of times + const size_t buffer_depth = std::min(time_buffer.size(), input_buffer_depth); + const size_t size_of_buffer = buffer_depth * number_of_angular_points; + + Variables nodal_buffer{size_of_buffer}; + + Variables> + boundary_data_variables{ + Spectral::Swsh::number_of_swsh_collocation_points(l_max)}; + + size_t time_span_start = 0; + size_t time_span_end = 0; + Cce::WorldtubeModeRecorder recorder{l_max, output_file}; + + for (size_t i = 0; i < time_buffer.size(); i++) { + const double time = time_buffer[i]; + + buffer_updater.update_buffers_for_time( + make_not_null(&nodal_buffer), make_not_null(&time_span_start), + make_not_null(&time_span_end), time, l_max, 0, buffer_depth, false); + + const auto metric_nodal_data_at_time = + create_metric_nodal_views(nodal_buffer, i - time_span_start, l_max); + + tmpl::as_pack([&](const auto&... tags) { + Cce::create_bondi_boundary_data( + make_not_null(&boundary_data_variables), + get>>( + metric_nodal_data_at_time)..., + buffer_updater.get_extraction_radius(), l_max); + }); + + write_bondi_data_to_disk(make_not_null(&recorder), boundary_data_variables, + time, l_max); + } +} + +enum class InputDataFormat { MetricNodal, MetricModal, BondiNodal, BondiModal }; + +std::ostream& operator<<(std::ostream& os, + const InputDataFormat input_data_format) { + switch (input_data_format) { + case InputDataFormat::MetricNodal: + return os << "MetricNodal"; + case InputDataFormat::MetricModal: + return os << "MetricModal"; + case InputDataFormat::BondiNodal: + return os << "BondiNodal"; + case InputDataFormat::BondiModal: + return os << "BondiModal"; + default: + ERROR("Unknown InputDataFormat type"); } - Parallel::printf("\n"); } namespace OptionTags { -struct InputH5File { - using type = std::string; +struct InputH5Files { + static std::string name() { return "InputH5File"; } + using type = std::variant>; static constexpr Options::String help = - "Name of the H5 worldtube file. A '.h5' extension will be added if " - "needed."; + "Name of H5 worldtube file(s). A '.h5' extension will be added if " + "needed. Can specify a single file or if multiple files are specified, " + "this will combine the times in each file. If there are " + "duplicate/overlapping times, the last/latest of the times are chosen."; +}; + +struct InputDataFormat { + using type = ::InputDataFormat; + static constexpr Options::String help = + "The type of data stored in the 'InputH5Files'. Can be 'MetricNodal', " + "'MetricModal', 'BondiNodal', or 'BondiModal'."; }; struct OutputH5File { @@ -204,12 +419,24 @@ struct OutputH5File { "Name of output H5 file. A '.h5' extension will be added if needed."; }; +struct ExtractionRadius { + using type = Options::Auto; + static constexpr Options::String help = + "The radius of the spherical worldtube. " + "If the 'InputH5File' supplied ends with '_RXXXX.h5' (where XXXX is the " + "zero-padded extraction radius rounded to the nearest integer), then " + "this option should be 'Auto'. If the extraction radius is not supplied " + "in the 'InputH5File' name, then this option must be supplied. If the " + "extraction radius is supplied in the 'InputH5File' name, and this " + "option is specified, then this option will take precedence."; +}; + struct FixSpecNormalization { using type = bool; static constexpr Options::String help = "Apply corrections associated with documented SpEC worldtube file " - "errors. If you are using worldtube data from SpECTRE or from another NR " - "code but in the SpECTRE format, then this option must be 'False'"; + "errors. If you are using worldtube data from SpECTRE or from another " + "NR code but in the SpECTRE format, then this option must be 'False'"; }; struct BufferDepth { @@ -224,27 +451,45 @@ struct LMaxFactor { using type = Options::Auto; static constexpr Options::String help = "The boundary computations will be performed at a resolution that is " - "'LMaxFactor' times the input file LMax to avoid aliasing. Set to 'Auto' " - "to use a default value (2)."; + "'LMaxFactor' times the input file LMax to avoid aliasing. Set to " + "'Auto' to use a default value (2)."; }; } // namespace OptionTags using option_tags = - tmpl::list; using OptionTuple = tuples::tagged_tuple_from_typelist; namespace ReduceCceTags { -struct InputH5File : db::SimpleTag { - using type = std::string; - using option_tags = tmpl::list; +struct InputH5Files : db::SimpleTag { + using type = std::vector; + using option_tags = tmpl::list; static constexpr bool pass_metavariables = false; - static type create_from_options(std::string option) { - if (not option.ends_with(".h5")) { - option += ".h5"; + static type create_from_options( + const std::variant>& option) { + std::vector result = std::visit( + Overloader{[](const std::vector& input) { return input; }, + [](const std::string& input) { return std::vector{input}; }}, + option); + for (std::string& filename : result) { + if (not filename.ends_with(".h5")) { + filename += ".h5"; + } } - return option; + + return result; + } +}; + +struct InputDataFormat : db::SimpleTag { + using type = ::InputDataFormat; + using option_tags = tmpl::list; + static constexpr bool pass_metavariables = false; + static type create_from_options(type input_data_format) { + return input_data_format; } }; @@ -260,6 +505,15 @@ struct OutputH5File : db::SimpleTag { } }; +struct ExtractionRadius : db::SimpleTag { + using type = std::optional; + using option_tags = tmpl::list; + static constexpr bool pass_metavariables = false; + static type create_from_options(const std::optional& option) { + return option; + } +}; + struct FixSpecNormalization : db::SimpleTag { using type = bool; using option_tags = tmpl::list; @@ -286,12 +540,35 @@ struct LMaxFactor : db::SimpleTag { }; } // namespace ReduceCceTags -using tags = tmpl::list; +using tags = + tmpl::list; using TagsTuple = tuples::tagged_tuple_from_typelist; } // namespace +// Has to be outside the anon namespace +template <> +struct Options::create_from_yaml { + template + static InputDataFormat create(const Options::Option& options) { + const auto ordering = options.parse_as(); + if (ordering == "MetricNodal") { + return InputDataFormat::MetricNodal; + } else if (ordering == "MetricModal") { + return InputDataFormat::MetricModal; + } else if (ordering == "BondiNodal") { + return InputDataFormat::BondiNodal; + } else if (ordering == "BondiModal") { + return InputDataFormat::BondiModal; + } + PARSE_ERROR(options.context(), + "InputDataFormat must be 'MetricNodal', 'MetricModal', " + "'BondiNodal', or 'BondiModal'"); + } +}; + /* * This executable is used for converting the unnecessarily large SpEC worldtube * data format into a far smaller representation (roughly a factor of 4) just @@ -335,7 +612,7 @@ int main(int argc, char** argv) { const std::string input_yaml = vars["input-file"].as(); // Actually parse the yaml. This does a check if it exists. - parser.parse_file(input_yaml); + parser.parse_file(input_yaml, false); // First create option tags, and then actual tags. const OptionTuple options = parser.template apply( @@ -343,13 +620,98 @@ int main(int argc, char** argv) { const TagsTuple inputs = Parallel::create_from_options(options, tags{}); - // Do the reduction - perform_cce_worldtube_reduction( - tuples::get(inputs), - tuples::get(inputs), - tuples::get(inputs), - tuples::get(inputs), - tuples::get(inputs)); + const InputDataFormat input_data_format = + tuples::get(inputs); + const std::vector& input_files = + tuples::get(inputs); + const std::string& output_h5_file = + tuples::get(inputs); + + std::optional temporary_combined_h5_file{}; + + if (input_files.size() != 1) { + // If the input format is BondiModal, then we don't actually have to do + // any transformations, only combining H5 files. So the temporary file + // name is just the output file + if (input_data_format == InputDataFormat::BondiModal) { + temporary_combined_h5_file = output_h5_file; + } else { + // Otherwise we have to do a transformation so a temporary H5 file is + // necessary. Name the file based on the current time so it doesn't + // conflict with another h5 file + const auto now = std::chrono::system_clock::now(); + const auto now_ns = + std::chrono::time_point_cast(now); + const auto value = now_ns.time_since_epoch(); + temporary_combined_h5_file = + "tmp_combined_" + std::to_string(value.count()) + ".h5"; + } + + // Now combine the h5 files into a single file + h5::combine_h5_dat(input_files, temporary_combined_h5_file.value(), + Verbosity::Quiet); + } else if (input_data_format == InputDataFormat::BondiModal) { + // Error here if the input data format is BondiModal since there's nothing + // to do + ERROR_NO_TRACE( + "Only a single input H5 file was supplied and the input data " + "format is BondiModal. This means that no combination needs to be " + "done and running ReduceCceWorldtube is unnecessary."); + } + + if (tuples::get(inputs) and + input_data_format != InputDataFormat::MetricModal) { + ERROR_NO_TRACE( + "The option FixSpecNormalization can only be 'true' when the input " + "data format is MetricModal. Otherwise, it must be 'false'"); + } + + const auto input_worldtube_filename = [&]() -> const std::string& { + return temporary_combined_h5_file.has_value() + ? temporary_combined_h5_file.value() + : input_files[0]; + }; + + const auto clean_temporary_file = [&temporary_combined_h5_file]() { + if (temporary_combined_h5_file.has_value()) { + file_system::rm(temporary_combined_h5_file.value(), false); + } + }; + + switch (input_data_format) { + case InputDataFormat::BondiModal: + // Nothing to do here because this is the desired output format and the + // H5 files were combined above + return 0; + case InputDataFormat::BondiNodal: + bondi_nodal_to_bondi_modal( + input_worldtube_filename(), output_h5_file, + tuples::get(inputs), + tuples::get(inputs)); + + clean_temporary_file(); + return 0; + case InputDataFormat::MetricModal: + perform_cce_worldtube_reduction( + input_worldtube_filename(), output_h5_file, + tuples::get(inputs), + tuples::get(inputs), + tuples::get(inputs), + tuples::get(inputs)); + + clean_temporary_file(); + return 0; + case InputDataFormat::MetricNodal: + metric_nodal_to_bondi_modal( + input_worldtube_filename(), output_h5_file, + tuples::get(inputs), + tuples::get(inputs)); + + clean_temporary_file(); + return 0; + default: + ERROR("Unknown input data format " << input_data_format); + } } catch (const std::exception& exception) { Parallel::printf("%s\n", exception.what()); return 1; diff --git a/tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml b/tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml index 535cf996fd5d..63571caa9cb4 100644 --- a/tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml +++ b/tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml @@ -2,11 +2,11 @@ # See LICENSE.txt for details. # [reduce_cce_worldtube_yaml_doxygen_example] - -InputH5File: InputFilenameR0292.h5 +InputH5File: InputFilename.h5 OutputH5File: ReducedWorldtubeR0292.h5 +InputDataFormat: MetricModal +ExtractionRadius: 292 FixSpecNormalization: False BufferDepth: Auto LMaxFactor: 3 - # [reduce_cce_worldtube_yaml_doxygen_example] diff --git a/tests/Unit/Executables/CMakeLists.txt b/tests/Unit/Executables/CMakeLists.txt index 65c61c84ef7d..47724b87db99 100644 --- a/tests/Unit/Executables/CMakeLists.txt +++ b/tests/Unit/Executables/CMakeLists.txt @@ -36,3 +36,40 @@ add_standalone_test( EXECUTABLE "TimeStepperSummary" REGEX_TO_MATCH "AdamsBashforth\\[2\\][^\\n]*N *Y" ) + +set( + TEST_REDUCECCEWORLDTUBE_LIBRARIES + Boost::boost + Cce + CceHelpers + DataStructures + GeneralRelativitySolutions + Spectral + SpinWeightedSphericalHarmonics + ) + +set(EXECUTABLE "Test_ReduceCceWorldtube") + +add_standalone_test_executable(${EXECUTABLE}) + +target_compile_definitions( + ${EXECUTABLE} + PUBLIC + BINDIR="${CMAKE_BINARY_DIR}" + ) + +target_link_libraries( + "${EXECUTABLE}" + PRIVATE + "${TEST_REDUCECCEWORLDTUBE_LIBRARIES}") + +add_test( + NAME "Unit.Executables.ReduceCceWorldtube" + COMMAND + ${SHELL_EXECUTABLE} + -c + "rm -rf ReduceCceWorldtube && + mkdir -p ReduceCceWorldtube && \ + cd ReduceCceWorldtube && \ + ${CMAKE_BINARY_DIR}/bin/${EXECUTABLE}" + ) diff --git a/tests/Unit/Executables/Test_ReduceCceWorldtube.cpp b/tests/Unit/Executables/Test_ReduceCceWorldtube.cpp new file mode 100644 index 000000000000..ef3db4243f35 --- /dev/null +++ b/tests/Unit/Executables/Test_ReduceCceWorldtube.cpp @@ -0,0 +1,488 @@ +// Distributed under the MIT License. +// See LICENSE.txt for details. + +#include "Framework/TestingFramework.hpp" + +#include +#include +#include +#include + +#include "DataStructures/ComplexModalVector.hpp" +#include "DataStructures/DataVector.hpp" +#include "Evolution/Systems/Cce/BoundaryData.hpp" +#include "Evolution/Systems/Cce/Tags.hpp" +#include "Evolution/Systems/Cce/WorldtubeBufferUpdater.hpp" +#include "Evolution/Systems/Cce/WorldtubeModeRecorder.hpp" +#include "Helpers/Evolution/Systems/Cce/BoundaryTestHelpers.hpp" +#include "IO/H5/AccessType.hpp" +#include "IO/H5/File.hpp" +#include "NumericalAlgorithms/SpinWeightedSphericalHarmonics/SwshCoefficients.hpp" +#include "NumericalAlgorithms/SpinWeightedSphericalHarmonics/SwshTags.hpp" +#include "PointwiseFunctions/AnalyticSolutions/GeneralRelativity/KerrSchild.hpp" +#include "Utilities/EqualWithinRoundoff.hpp" +#include "Utilities/ErrorHandling/CaptureForError.hpp" +#include "Utilities/FileSystem.hpp" +#include "Utilities/Math.hpp" + +#if defined(__GNUC__) and not defined(__clang__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmissing-declarations" +#endif // defined(__GNUC__) and not defined(__clang__) +// Need this for linking, but it doesn't do anything +extern "C" void CkRegisterMainModule() {} +#if defined(__GNUC__) and not defined(__clang__) +#pragma GCC diagnostic pop +#endif // defined(__GNUC__) and not defined(__clang__) + +namespace { +constexpr size_t number_of_times = 30; + +double compute_time(const double target_time, const size_t time_index) { + // This formula matches the one in BoundaryTestHelpers.hpp which we use to + // write some of the worldtube data to disk + return 0.1 * static_cast(time_index) + target_time - 1.5; +} + +template +void write_bondi_test_file(const std::string& worldtube_filename, + const size_t l_max, const double target_time, + const double extraction_radius, + const Solution& solution, const double amplitude, + const double frequency) { + const size_t number_of_angular_points = + Spectral::Swsh::number_of_swsh_collocation_points(l_max); + Variables> + boundary_data_variables{number_of_angular_points}; + + const size_t libsharp_size = + Spectral::Swsh::size_of_libsharp_coefficient_vector(l_max); + tnsr::ii spatial_metric_coefficients{libsharp_size}; + tnsr::ii dt_spatial_metric_coefficients{libsharp_size}; + tnsr::ii dr_spatial_metric_coefficients{libsharp_size}; + tnsr::I shift_coefficients{libsharp_size}; + tnsr::I dt_shift_coefficients{libsharp_size}; + tnsr::I dr_shift_coefficients{libsharp_size}; + Scalar lapse_coefficients{libsharp_size}; + Scalar dt_lapse_coefficients{libsharp_size}; + Scalar dr_lapse_coefficients{libsharp_size}; + + using RecorderType = + tmpl::conditional_t; + RecorderType recorder{l_max, worldtube_filename}; + for (size_t t = 0; t < number_of_times; ++t) { + const double time = compute_time(target_time, t); + Cce::TestHelpers::create_fake_time_varying_data( + make_not_null(&spatial_metric_coefficients), + make_not_null(&dt_spatial_metric_coefficients), + make_not_null(&dr_spatial_metric_coefficients), + make_not_null(&shift_coefficients), + make_not_null(&dt_shift_coefficients), + make_not_null(&dr_shift_coefficients), + make_not_null(&lapse_coefficients), + make_not_null(&dt_lapse_coefficients), + make_not_null(&dr_lapse_coefficients), solution, extraction_radius, + amplitude, frequency, time, l_max, false); + + Cce::create_bondi_boundary_data( + make_not_null(&boundary_data_variables), spatial_metric_coefficients, + dt_spatial_metric_coefficients, dr_spatial_metric_coefficients, + shift_coefficients, dt_shift_coefficients, dr_shift_coefficients, + lapse_coefficients, dt_lapse_coefficients, dr_lapse_coefficients, + extraction_radius, l_max); + + // loop over the tags that we want to dump. + tmpl::for_each>( + [&](auto tag_v) { + using tag = typename decltype(tag_v)::type; + + const ComplexDataVector& nodal_data = + get(get(boundary_data_variables)).data(); + + if constexpr (Modal) { + recorder.template append_modal_data( + Cce::dataset_label_for_tag(), time, + nodal_data, l_max); + } else { + // This will write nodal data + recorder.append_worldtube_mode_data( + Cce::dataset_label_for_tag(), time, + nodal_data, l_max); + } + }); + } +} + +using modal_tags = Cce::Tags::worldtube_boundary_tags_for_writing< + Spectral::Swsh::Tags::SwshTransform>; +using ExpectedDataType = std::vector>; + +template +ExpectedDataType create_expected_data(const size_t l_max, + const double target_time, + const double extraction_radius, + const Solution& solution, + const double amplitude, + const double frequency) { + const size_t computation_l_max = 3 * l_max; + const size_t number_of_angular_points = + Spectral::Swsh::number_of_swsh_collocation_points(computation_l_max); + Variables> + boundary_data_variables{number_of_angular_points}; + + const size_t libsharp_size = + Spectral::Swsh::size_of_libsharp_coefficient_vector(computation_l_max); + tnsr::ii spatial_metric_coefficients{libsharp_size}; + tnsr::ii dt_spatial_metric_coefficients{libsharp_size}; + tnsr::ii dr_spatial_metric_coefficients{libsharp_size}; + tnsr::I shift_coefficients{libsharp_size}; + tnsr::I dt_shift_coefficients{libsharp_size}; + tnsr::I dr_shift_coefficients{libsharp_size}; + Scalar lapse_coefficients{libsharp_size}; + Scalar dt_lapse_coefficients{libsharp_size}; + Scalar dr_lapse_coefficients{libsharp_size}; + + const size_t computation_modal_size = square(computation_l_max + 1); + ComplexModalVector computation_goldberg_mode_buffer{computation_modal_size}; + + const size_t modal_size = square(l_max + 1); + std::vector> result{number_of_times}; + for (size_t i = 0; i < number_of_times; i++) { + result[i] = Variables{modal_size}; + } + + for (size_t t = 0; t < number_of_times; ++t) { + const double time = compute_time(target_time, t); + // Create fake metric nodal data + Cce::TestHelpers::create_fake_time_varying_data( + make_not_null(&spatial_metric_coefficients), + make_not_null(&dt_spatial_metric_coefficients), + make_not_null(&dr_spatial_metric_coefficients), + make_not_null(&shift_coefficients), + make_not_null(&dt_shift_coefficients), + make_not_null(&dr_shift_coefficients), + make_not_null(&lapse_coefficients), + make_not_null(&dt_lapse_coefficients), + make_not_null(&dr_lapse_coefficients), solution, extraction_radius, + amplitude, frequency, time, computation_l_max, false); + + // Convert to Bondi nodal + Cce::create_bondi_boundary_data( + make_not_null(&boundary_data_variables), spatial_metric_coefficients, + dt_spatial_metric_coefficients, dr_spatial_metric_coefficients, + shift_coefficients, dt_shift_coefficients, dr_shift_coefficients, + lapse_coefficients, dt_lapse_coefficients, dr_lapse_coefficients, + extraction_radius, computation_l_max); + + // Convert to Bondi modal + tmpl::for_each([&](auto tag_v) { + using wrapped_tag = tmpl::type_from; + using tag = typename wrapped_tag::tag; + constexpr int Spin = tag::type::type::spin; + + SpinWeighted nodal_data_view; + nodal_data_view.set_data_ref( + make_not_null(&const_cast( // NOLINT + get(get>(boundary_data_variables)) + .data()))); + SpinWeighted goldberg_modes; + goldberg_modes.set_data_ref(computation_goldberg_mode_buffer.data(), + computation_modal_size); + + // First transform to coefficients using swsh_transform, and then convert + // libsharp coefficients into modes + Spectral::Swsh::libsharp_to_goldberg_modes( + make_not_null(&goldberg_modes), + Spectral::Swsh::swsh_transform(computation_l_max, 1, nodal_data_view), + computation_l_max); + + // Restrict the values back to the correct modal size from the computation + // modal size + ComplexModalVector goldberg_mode_view; + goldberg_mode_view.set_data_ref(goldberg_modes.data().data(), modal_size); + + get(get(result[t])).data() = goldberg_mode_view; + }); + } + + return result; +} + +// Accuracy to which we check the data +constexpr double epsilon = 1.e-12; + +void check_expected_data(const std::string& output_filename, const size_t l_max, + const ExpectedDataType& expected_data, + const ExpectedDataType& second_expected_data, + const double target_time, + const double second_target_time, + const bool check_second_expected_data) { + const size_t expected_number_of_times = + check_second_expected_data ? 2 * number_of_times : number_of_times; + const h5::H5File output_file{output_filename}; + + CAPTURE_FOR_ERROR(output_filename); + CAPTURE_FOR_ERROR(epsilon); + + tmpl::for_each([&](auto tag_v) { + using wrapped_tag = tmpl::type_from; + using tag = typename wrapped_tag::tag; + constexpr int Spin = tag::type::type::spin; + constexpr bool is_real = Spin == 0; + + const std::string dataset_name = Cce::dataset_label_for_tag(); + const auto& bondi_dat_subfile = output_file.get(dataset_name); + CAPTURE_FOR_ERROR(dataset_name); + const auto bondi_data = + bondi_dat_subfile.get_data>>(); + + SPECTRE_PARALLEL_REQUIRE(bondi_data.size() == expected_number_of_times); + + const auto check_data_for_target_time = [&](const double local_target_time, + const auto& local_expected_data, + const size_t offset) { + for (size_t t = 0; t < number_of_times; t++) { + const double exptected_time = compute_time(local_target_time, t); + const auto& expected_bondi_var = + get(get(local_expected_data[t])).data(); + const std::vector& bondi_var = bondi_data[offset + t]; + + CAPTURE_FOR_ERROR(bondi_var[0]); + CAPTURE_FOR_ERROR(t); + CAPTURE_FOR_ERROR(exptected_time); + SPECTRE_PARALLEL_REQUIRE(bondi_var[0] == exptected_time); + + (void)expected_bondi_var; + + CAPTURE_FOR_ERROR(expected_bondi_var); + CAPTURE_FOR_ERROR(bondi_var); + + for (int l = 0; l <= static_cast(l_max); l++) { + for (int m = (is_real ? 0 : -l); m <= l; m++) { + const size_t goldberg_index = Spectral::Swsh::goldberg_mode_index( + l_max, static_cast(l), m); + + CAPTURE_FOR_ERROR(l); + CAPTURE_FOR_ERROR(m); + CAPTURE_FOR_ERROR(goldberg_index); + CAPTURE_FOR_ERROR(expected_bondi_var[goldberg_index]); + std::complex written_mode{}; + size_t matrix_index = 0; + if (is_real) { + if (m == 0) { + matrix_index = static_cast(square(l)); // NOLINT + written_mode = + std::complex{bondi_var[1 + matrix_index], 0.0}; + } else { + matrix_index = + static_cast(square(l) + 2 * abs(m)); // NOLINT + written_mode = + ((m > 0 or abs(m) % 2 == 0) ? 1.0 : -1.0) * + std::complex{bondi_var[1 + matrix_index - 1], + sgn(m) * bondi_var[1 + matrix_index]}; + } + } else { + matrix_index = goldberg_index; + written_mode = std::complex{ + bondi_var[1 + 2 * matrix_index], + bondi_var[1 + 2 * matrix_index + 1], + }; + } + CAPTURE_FOR_ERROR(matrix_index); + CAPTURE_FOR_ERROR(written_mode); + SPECTRE_PARALLEL_REQUIRE(equal_within_roundoff( + expected_bondi_var[goldberg_index], written_mode, epsilon)); + } + } + } + }; + + check_data_for_target_time(target_time, expected_data, 0); + + if (check_second_expected_data) { + check_data_for_target_time(second_target_time, second_expected_data, + number_of_times); + } + + output_file.close_current_object(); + }); +} + +void write_input_file(const std::string& input_data_format, + const std::vector& input_worldtube_filenames, + const std::string& output_filename, + const std::optional& worldtube_radius) { + std::string input_file = + "# Distributed under the MIT License.\n" + "# See LICENSE.txt for details.\n" + "\n" + "InputH5File: "; + + if (input_worldtube_filenames.size() > 1) { + input_file += "["; + } + + for (size_t i = 0; i < input_worldtube_filenames.size(); i++) { + input_file += input_worldtube_filenames[i]; + if (i != input_worldtube_filenames.size() - 1) { + input_file += ", "; + } + } + + if (input_worldtube_filenames.size() > 1) { + input_file += "]"; + } + + input_file += "\nOutputH5File: " + output_filename + "\n"; + input_file += "InputDataFormat: " + input_data_format + "\n"; + input_file += + "ExtractionRadius: " + + (worldtube_radius.has_value() ? std::to_string(worldtube_radius.value()) + : "Auto") + + "\n"; + input_file += + "FixSpecNormalization: False\n" + "BufferDepth: Auto\n" + "LMaxFactor: 3\n"; + + std::ofstream yaml_file(input_data_format + ".yaml"); + yaml_file << input_file; + yaml_file.close(); +} +} // namespace + +int main() { + const size_t l_max = 16; + const double target_time = 20.0; + const double second_target_time = target_time + 20.0; + const double worldtube_radius = 123.0; + // These are just to create fake data + const double frequency = 0.01; + const double amplitude = 0.01; + + const double mass = 3.5; + const std::array spin{-0.3, -0.2, 0.1}; + const std::array center{0.0, 0.0, 0.0}; + const gr::Solutions::KerrSchild solution{mass, spin, center}; + + // Input worldtube H5 filenames + // Some have the worldtube radius and some don't to test the ExtractionRadius + // option + const std::string metric_modal_input_worldtube_filename{ + "Test_InputMetricModal_R0123.h5"}; + const std::string metric_nodal_1_input_worldtube_filename{ + "Test_InputMetricNodal_1.h5"}; + const std::string metric_nodal_2_input_worldtube_filename{ + "Test_InputMetricNodal_2.h5"}; + const std::string bondi_modal_1_input_worldtube_filename{ + "Test_InputBondiModal_1_R0123.h5"}; + const std::string bondi_modal_2_input_worldtube_filename{ + "Test_InputBondiModal_2_R0123.h5"}; + const std::string bondi_nodal_input_worldtube_filename{ + "Test_InputBondiNodal_R0123.h5"}; + + // Output worldtube H5 filenames + const std::string metric_modal_output_worldtube_filename{ + "Test_OutputMetricModal_R0123.h5"}; + const std::string metric_nodal_output_worldtube_filename{ + "Test_OutputMetricNodal.h5"}; + const std::string bondi_modal_output_worldtube_filename{ + "Test_OutputBondiModal_R0123.h5"}; + const std::string bondi_nodal_output_worldtube_filename{ + "Test_OutputBondiNodal_R0123.h5"}; + + // Write metric data + Cce::TestHelpers::write_test_file( + solution, metric_modal_input_worldtube_filename, target_time, + worldtube_radius, frequency, amplitude, l_max, false); + Cce::TestHelpers::write_test_file( + solution, metric_nodal_1_input_worldtube_filename, target_time, + worldtube_radius, frequency, amplitude, l_max, false); + Cce::TestHelpers::write_test_file( + solution, metric_nodal_2_input_worldtube_filename, second_target_time, + worldtube_radius, frequency, amplitude, l_max, false); + + // Write bondi data + write_bondi_test_file(bondi_modal_1_input_worldtube_filename, l_max, + target_time, worldtube_radius, solution, + amplitude, frequency); + write_bondi_test_file(bondi_modal_2_input_worldtube_filename, l_max, + second_target_time, worldtube_radius, solution, + amplitude, frequency); + write_bondi_test_file(bondi_nodal_input_worldtube_filename, l_max, + target_time, worldtube_radius, solution, + amplitude, frequency); + + // Write input file + write_input_file("MetricModal", {metric_modal_input_worldtube_filename}, + metric_modal_output_worldtube_filename, std::nullopt); + write_input_file("MetricNodal", + {metric_nodal_1_input_worldtube_filename, + metric_nodal_2_input_worldtube_filename}, + metric_nodal_output_worldtube_filename, {worldtube_radius}); + write_input_file("BondiModal", + {bondi_modal_1_input_worldtube_filename, + bondi_modal_2_input_worldtube_filename}, + bondi_modal_output_worldtube_filename, std::nullopt); + write_input_file("BondiNodal", {bondi_nodal_input_worldtube_filename}, + bondi_nodal_output_worldtube_filename, {worldtube_radius}); + +// Get path to executable with a macro set in CMakeLists.txt +#ifdef BINDIR + std::string executable = BINDIR; +#else + std::string executable = "nothing"; + ERROR( + "BINDIR preprocessor macro not set from CMake. Something is wrong with " + "the build system."); +#endif + if (not executable.ends_with("/")) { + executable += "/"; + } + executable += "bin/ReduceCceWorldtube"; + + const auto call_reduce_cce_worldtube = + [&](const std::string& input_data_format) { + const std::string to_execute = executable + " --input-file " + + input_data_format + ".yaml > " + + input_data_format + ".out 2>&1"; + + const int exit_code = std::system(to_execute.c_str()); // NOLINT + + SPECTRE_PARALLEL_REQUIRE(exit_code == 0); + (void)exit_code; + }; + + // Call ReduceCceWorldtube in a shell + call_reduce_cce_worldtube("MetricModal"); + call_reduce_cce_worldtube("MetricNodal"); + call_reduce_cce_worldtube("BondiModal"); + call_reduce_cce_worldtube("BondiNodal"); + + // Create the expected bondi modal data + const auto expected_data = create_expected_data( + l_max, target_time, worldtube_radius, solution, amplitude, frequency); + const auto second_expected_data = + create_expected_data(l_max, second_target_time, worldtube_radius, + solution, amplitude, frequency); + + // Check that the expected bondi modal data is what was written in the output + // files for the different InputDataFormats + check_expected_data(metric_modal_output_worldtube_filename, l_max, + expected_data, second_expected_data, target_time, + second_target_time, false); + check_expected_data(metric_nodal_output_worldtube_filename, l_max, + expected_data, second_expected_data, target_time, + second_target_time, true); + check_expected_data(bondi_modal_output_worldtube_filename, l_max, + expected_data, second_expected_data, target_time, + second_target_time, true); + check_expected_data(bondi_nodal_output_worldtube_filename, l_max, + expected_data, second_expected_data, target_time, + second_target_time, false); + + return 0; +} From f9628378df86ff004de3c58c2b21de171f82a0f8 Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Fri, 13 Dec 2024 12:12:48 -0800 Subject: [PATCH 2/3] Add static exec for writing worldtube coords to file --- .github/workflows/DeployStaticExecutables.yaml | 4 ++++ containers/Dockerfile.buildenv | 1 + docs/Tutorials/CCE.md | 11 +++++++++++ 3 files changed, 16 insertions(+) diff --git a/.github/workflows/DeployStaticExecutables.yaml b/.github/workflows/DeployStaticExecutables.yaml index 3e5ba5326a6f..2bcfba6a7a3e 100644 --- a/.github/workflows/DeployStaticExecutables.yaml +++ b/.github/workflows/DeployStaticExecutables.yaml @@ -72,6 +72,10 @@ jobs: docker cp static-execs:/work/spectre/build/bin/ReduceCceWorldtube ./CceExecutables/ReduceCceWorldtube/ + + docker cp + static-execs:/work/spectre/build/bin/WriteCceWorldtubeCoordsToFile + ./CceExecutables/ReduceCceWorldtube/ - name: Test CCE executable outside of container run: | mv BondiSachsCceR0200.h5 ./CceExecutables/Tests/ diff --git a/containers/Dockerfile.buildenv b/containers/Dockerfile.buildenv index 93bdcd9e7e67..2fbed649f086 100644 --- a/containers/Dockerfile.buildenv +++ b/containers/Dockerfile.buildenv @@ -570,4 +570,5 @@ RUN if [ ${UBUNTU_VERSION} != 18.04 ] && [ "$TARGETARCH" != "amd64" ]; then \ -D USE_CCACHE=OFF \ .. \ && make ${PARALLEL_MAKE_ARG} CharacteristicExtract ReduceCceWorldtube \ + WriteCceWorldtubeCoordsToFile \ && ctest -LE unit -R CharacteristicExtract diff --git a/docs/Tutorials/CCE.md b/docs/Tutorials/CCE.md index bb88003678b6..7ef5616ae6b1 100644 --- a/docs/Tutorials/CCE.md +++ b/docs/Tutorials/CCE.md @@ -27,6 +27,9 @@ release). Inside this tarball is - a `ReduceCceWorldtube` executable and YAML file for converting between [worldtube data formats](#input_worldtube_data_format) in the `ReduceCceWorldtube/` diretory +- a `WriteCceWorldtubeCoordsToFile` executable that writes + [grid points on a sphere](#spherical_nodes) to a text file in the + `ReduceCceWorldtube/` directory - a python script `CheckCceOutput.py` (meant to be run from the root of the tarball and after you run the example YAML input file also in the root of the tarball) that will check if the example output is correct @@ -199,6 +202,14 @@ supports \f$\ell\in[4, 32]\f$. \snippet Test_Spherepack.cpp spectre_cce_grid_point_locations +Alternatively, if your code can read in grid points from a text file, you can +run the `WriteCceWorldtubeCoordsToFile` executable like so to get a text file +with three columns for the x,y,z coordinates of each point. + +``` +./WriteCceWorldtubeCoordsToFile -r 200 -L 16 -o GridPointsR200.txt +``` + Each dataset holds `1 + (l_max + 1) * (2 * l_max + 1)` columns, with the first one being the `time`. The columns must be in \f$\theta\f$-varies-fastest ordering. That is, From 2fad23bda0842594b30c7daca97f38961756e945 Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Mon, 9 Dec 2024 16:55:10 -0800 Subject: [PATCH 3/3] Rename ReduceCceWorldtube to PreprocessCceWorldtube --- .../workflows/DeployStaticExecutables.yaml | 13 +-- cmake/AddInputFileTests.cmake | 2 +- containers/Dockerfile.buildenv | 2 +- docs/Tutorials/CCE.md | 18 ++--- src/Evolution/Systems/Cce/OptionTags.hpp | 2 +- .../Systems/Cce/WorldtubeBufferUpdater.hpp | 2 +- .../Systems/Cce/WorldtubeDataManager.cpp | 2 +- .../Systems/Cce/WorldtubeDataManager.hpp | 2 +- src/Executables/CMakeLists.txt | 2 +- .../CMakeLists.txt | 4 +- .../PreprocessCceWorldtube.cpp} | 2 +- .../PreprocessCceWorldtube.yaml} | 4 +- .../Systems/Cce/Test_WorldtubeData.cpp | 79 +++++++++---------- tests/Unit/Executables/CMakeLists.txt | 14 ++-- ...be.cpp => Test_PreprocessCceWorldtube.cpp} | 14 ++-- 15 files changed, 80 insertions(+), 82 deletions(-) rename src/Executables/{ReduceCceWorldtube => PreprocessCceWorldtube}/CMakeLists.txt (85%) rename src/Executables/{ReduceCceWorldtube/ReduceCceWorldtube.cpp => PreprocessCceWorldtube/PreprocessCceWorldtube.cpp} (99%) rename tests/InputFiles/{ReduceCceWorldtube/ReduceCceWorldtube.yaml => PreprocessCceWorldtube/PreprocessCceWorldtube.yaml} (71%) rename tests/Unit/Executables/{Test_ReduceCceWorldtube.cpp => Test_PreprocessCceWorldtube.cpp} (98%) diff --git a/.github/workflows/DeployStaticExecutables.yaml b/.github/workflows/DeployStaticExecutables.yaml index 2bcfba6a7a3e..12a88342d55e 100644 --- a/.github/workflows/DeployStaticExecutables.yaml +++ b/.github/workflows/DeployStaticExecutables.yaml @@ -57,25 +57,26 @@ jobs: - name: Copy CCE executables from the container run: > mkdir CceExecutables; - mkdir ./CceExecutables/ReduceCceWorldtube; + mkdir ./CceExecutables/PreprocessCceWorldtube; mkdir ./CceExecutables/Tests; cp ./tests/InputFiles/Cce/CharacteristicExtract.yaml ./CceExecutables/CharacteristicExtract.yaml - cp ./tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml - ./CceExecutables/ReduceCceWorldtube/ReduceCceWorldtube.yaml + cp + ./tests/InputFiles/PreprocessCceWorldtube/PreprocessCceWorldtube.yaml + ./CceExecutables/PreprocessCceWorldtube/PreprocessCceWorldtube.yaml docker cp static-execs:/work/spectre/build/bin/CharacteristicExtract ./CceExecutables/ - docker cp static-execs:/work/spectre/build/bin/ReduceCceWorldtube - ./CceExecutables/ReduceCceWorldtube/ + docker cp static-execs:/work/spectre/build/bin/PreprocessCceWorldtube + ./CceExecutables/PreprocessCceWorldtube/ docker cp static-execs:/work/spectre/build/bin/WriteCceWorldtubeCoordsToFile - ./CceExecutables/ReduceCceWorldtube/ + ./CceExecutables/PreprocessCceWorldtube/ - name: Test CCE executable outside of container run: | mv BondiSachsCceR0200.h5 ./CceExecutables/Tests/ diff --git a/cmake/AddInputFileTests.cmake b/cmake/AddInputFileTests.cmake index 91b1a464a54d..961ee5519fff 100644 --- a/cmake/AddInputFileTests.cmake +++ b/cmake/AddInputFileTests.cmake @@ -249,6 +249,6 @@ configure_file( # These paths should be relative to the input file directory passed to # `add_input_file_tests` set(INPUT_FILE_WHITELIST - "ReduceCceWorldtube/ReduceCceWorldtube.yaml") + "PreprocessCceWorldtube/PreprocessCceWorldtube.yaml") add_input_file_tests("${CMAKE_SOURCE_DIR}/tests/InputFiles/" ${INPUT_FILE_WHITELIST}) diff --git a/containers/Dockerfile.buildenv b/containers/Dockerfile.buildenv index 2fbed649f086..ef035548a88e 100644 --- a/containers/Dockerfile.buildenv +++ b/containers/Dockerfile.buildenv @@ -569,6 +569,6 @@ RUN if [ ${UBUNTU_VERSION} != 18.04 ] && [ "$TARGETARCH" != "amd64" ]; then \ -D BUILD_DOCS=OFF \ -D USE_CCACHE=OFF \ .. \ - && make ${PARALLEL_MAKE_ARG} CharacteristicExtract ReduceCceWorldtube \ + && make ${PARALLEL_MAKE_ARG} CharacteristicExtract PreprocessCceWorldtube \ WriteCceWorldtubeCoordsToFile \ && ctest -LE unit -R CharacteristicExtract diff --git a/docs/Tutorials/CCE.md b/docs/Tutorials/CCE.md index 7ef5616ae6b1..644ef13ffa82 100644 --- a/docs/Tutorials/CCE.md +++ b/docs/Tutorials/CCE.md @@ -24,12 +24,12 @@ release). Inside this tarball is - an example set of Bondi-Sachs worldtube data in the `Tests/` directory (see [Input worldtube data formats](#input_worldtube_data_format) section) - example output from CCE in the `Tests/` directory -- a `ReduceCceWorldtube` executable and YAML file for converting between +- a `PreprocessCceWorldtube` executable and YAML file for converting between [worldtube data formats](#input_worldtube_data_format) in the - `ReduceCceWorldtube/` diretory + `PreprocessCceWorldtube/` diretory - a `WriteCceWorldtubeCoordsToFile` executable that writes [grid points on a sphere](#spherical_nodes) to a text file in the - `ReduceCceWorldtube/` directory + `PreprocessCceWorldtube/` directory - a python script `CheckCceOutput.py` (meant to be run from the root of the tarball and after you run the example YAML input file also in the root of the tarball) that will check if the example output is correct @@ -156,7 +156,7 @@ for compatibility with worldtube data from the SXS Collaboration's SpEC code. Unless you are using worldtube data that was generated from SpECTRE (or SpEC), it's possible that your worldtube data is not in the correct format. We allow conversion into our data format from a few other data formats using the -[`ReduceCceWorldtube` executable provided](#acquiring_the_cce_module). These are +[`PreprocessCceWorldtube` executable provided](#acquiring_the_cce_module). These are - Nodal cartesian metric data (which we refer to as "metric nodal") - Modal cartesian metric data (which we refer to as "metric modal") @@ -256,23 +256,23 @@ If you already have data in the [required "bondi modal" format](#required_h5_worldtube_data_format), then nothing needs to be done. -#### Running the ReduceCceWorldtube executable +#### Running the PreprocessCceWorldtube executable -The `ReduceCceWorldtube` executable should be run on any of the +The `PreprocessCceWorldtube` executable should be run on any of the [allowed input formats](#converting_worldtube_data), and will produce a corresponding Bondi-Sachs worldtube file that can be read in by CCE. This executable works similarly to our other executables by accepting a YAML input file: ``` -ReduceCceWorldtube --input-file ReduceCceWorldtube.yaml +PreprocessCceWorldtube --input-file PreprocessCceWorldtube.yaml ``` with a YAML file -\snippet ReduceCceWorldtube.yaml reduce_cce_worldtube_yaml_doxygen_example +\snippet PreprocessCceWorldtube.yaml preprocess_cce_worldtube_yaml_doxygen_example -In addition to converting worldtube data formats, `ReduceCceWorldtube` also +In addition to converting worldtube data formats, `PreprocessCceWorldtube` also accepts multiple input worldtube H5 files that have sequential times (e.g. from different checkpoints) and will combine the times from all H5 files alongside converting the worldtube data format. If there are duplicate or overlapping diff --git a/src/Evolution/Systems/Cce/OptionTags.hpp b/src/Evolution/Systems/Cce/OptionTags.hpp index 960d841c0746..7a1723a5fe95 100644 --- a/src/Evolution/Systems/Cce/OptionTags.hpp +++ b/src/Evolution/Systems/Cce/OptionTags.hpp @@ -348,7 +348,7 @@ struct H5WorldtubeBoundaryDataManager : db::SimpleTag { "\nDEPRECATION WARNING: Reading worldtube H5 files that are in the " "Metric data format (i.e. cartesian components of the metric and " "derivs expressed in modal coefficients) is deprecated. Convert your " - "data to the Bondi modal format using the 'ReduceCceWorldtube' " + "data to the Bondi modal format using the 'PreprocessCceWorldtube' " "executable. See https://spectre-code.org/tutorial_cce.html for " "details. Support for reading the Metric data format will be " "dropped in January 2025.\n"); diff --git a/src/Evolution/Systems/Cce/WorldtubeBufferUpdater.hpp b/src/Evolution/Systems/Cce/WorldtubeBufferUpdater.hpp index 1b2c698068d7..41c0cd143e74 100644 --- a/src/Evolution/Systems/Cce/WorldtubeBufferUpdater.hpp +++ b/src/Evolution/Systems/Cce/WorldtubeBufferUpdater.hpp @@ -143,7 +143,7 @@ class KleinGordonWorldtubeH5BufferUpdater; /*! * \brief Abstract base class for utilities that are able to perform the buffer * updating procedure needed by the `WorldtubeDataManager` or by the - * `ReduceCceWorldtube` executable. + * `PreprocessCceWorldtube` executable. * * \details The methods that are required to be overridden in the derived * classes are: diff --git a/src/Evolution/Systems/Cce/WorldtubeDataManager.cpp b/src/Evolution/Systems/Cce/WorldtubeDataManager.cpp index 7d71582b6ef9..da2a897dc352 100644 --- a/src/Evolution/Systems/Cce/WorldtubeDataManager.cpp +++ b/src/Evolution/Systems/Cce/WorldtubeDataManager.cpp @@ -411,7 +411,7 @@ bool BondiWorldtubeDataManager::populate_hypersurface_boundary_data( du_r / bondi_r; // there's only a couple of tags desired by the core computation that aren't - // stored in the 'reduced' format, so we perform the remaining computation + // stored in the bondi format, so we perform the remaining computation // in-line here. const auto& du_bondi_j = get(get>>( *boundary_data_variables)); diff --git a/src/Evolution/Systems/Cce/WorldtubeDataManager.hpp b/src/Evolution/Systems/Cce/WorldtubeDataManager.hpp index da9288b3239d..745b4f7bb245 100644 --- a/src/Evolution/Systems/Cce/WorldtubeDataManager.hpp +++ b/src/Evolution/Systems/Cce/WorldtubeDataManager.hpp @@ -191,7 +191,7 @@ class MetricWorldtubeDataManager }; /*! - * \brief Manages the 'reduced' cached buffer dataset associated with a CCE + * \brief Manages the bondi cached buffer dataset associated with a CCE * worldtube and interpolates to requested time points to provide worldtube * boundary data to the main evolution routines. * diff --git a/src/Executables/CMakeLists.txt b/src/Executables/CMakeLists.txt index d7cfdbdfd98c..3d66de9584df 100644 --- a/src/Executables/CMakeLists.txt +++ b/src/Executables/CMakeLists.txt @@ -8,6 +8,6 @@ add_subdirectory(ExportEquationOfStateForRotNS) add_subdirectory(Examples) add_subdirectory(ExportCoordinates) add_subdirectory(ParallelInfo) -add_subdirectory(ReduceCceWorldtube) +add_subdirectory(PreprocessCceWorldtube) add_subdirectory(TimeStepperSummary) add_subdirectory(WriteCceWorldtubeCoordsToFile) diff --git a/src/Executables/ReduceCceWorldtube/CMakeLists.txt b/src/Executables/PreprocessCceWorldtube/CMakeLists.txt similarity index 85% rename from src/Executables/ReduceCceWorldtube/CMakeLists.txt rename to src/Executables/PreprocessCceWorldtube/CMakeLists.txt index 5fd6a312f648..8d71f4dfb81f 100644 --- a/src/Executables/ReduceCceWorldtube/CMakeLists.txt +++ b/src/Executables/PreprocessCceWorldtube/CMakeLists.txt @@ -1,12 +1,12 @@ # Distributed under the MIT License. # See LICENSE.txt for details. -set(EXECUTABLE ReduceCceWorldtube) +set(EXECUTABLE PreprocessCceWorldtube) add_spectre_executable( ${EXECUTABLE} EXCLUDE_FROM_ALL - ReduceCceWorldtube.cpp + PreprocessCceWorldtube.cpp ) target_link_libraries( diff --git a/src/Executables/ReduceCceWorldtube/ReduceCceWorldtube.cpp b/src/Executables/PreprocessCceWorldtube/PreprocessCceWorldtube.cpp similarity index 99% rename from src/Executables/ReduceCceWorldtube/ReduceCceWorldtube.cpp rename to src/Executables/PreprocessCceWorldtube/PreprocessCceWorldtube.cpp index 80c4017f67f9..cc50206830ba 100644 --- a/src/Executables/ReduceCceWorldtube/ReduceCceWorldtube.cpp +++ b/src/Executables/PreprocessCceWorldtube/PreprocessCceWorldtube.cpp @@ -656,7 +656,7 @@ int main(int argc, char** argv) { ERROR_NO_TRACE( "Only a single input H5 file was supplied and the input data " "format is BondiModal. This means that no combination needs to be " - "done and running ReduceCceWorldtube is unnecessary."); + "done and running PreprocessCceWorldtube is unnecessary."); } if (tuples::get(inputs) and diff --git a/tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml b/tests/InputFiles/PreprocessCceWorldtube/PreprocessCceWorldtube.yaml similarity index 71% rename from tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml rename to tests/InputFiles/PreprocessCceWorldtube/PreprocessCceWorldtube.yaml index 63571caa9cb4..6ada009c7da5 100644 --- a/tests/InputFiles/ReduceCceWorldtube/ReduceCceWorldtube.yaml +++ b/tests/InputFiles/PreprocessCceWorldtube/PreprocessCceWorldtube.yaml @@ -1,7 +1,7 @@ # Distributed under the MIT License. # See LICENSE.txt for details. -# [reduce_cce_worldtube_yaml_doxygen_example] +# [preprocess_cce_worldtube_yaml_doxygen_example] InputH5File: InputFilename.h5 OutputH5File: ReducedWorldtubeR0292.h5 InputDataFormat: MetricModal @@ -9,4 +9,4 @@ ExtractionRadius: 292 FixSpecNormalization: False BufferDepth: Auto LMaxFactor: 3 -# [reduce_cce_worldtube_yaml_doxygen_example] +# [preprocess_cce_worldtube_yaml_doxygen_example] diff --git a/tests/Unit/Evolution/Systems/Cce/Test_WorldtubeData.cpp b/tests/Unit/Evolution/Systems/Cce/Test_WorldtubeData.cpp index f4a4f665e11e..ec1e8c84ae54 100644 --- a/tests/Unit/Evolution/Systems/Cce/Test_WorldtubeData.cpp +++ b/tests/Unit/Evolution/Systems/Cce/Test_WorldtubeData.cpp @@ -234,7 +234,7 @@ class DummyBufferUpdater // NOLINT }; template -class ReducedDummyBufferUpdater +class BondiBufferUpdater : public WorldtubeBufferUpdater, Tags::worldtube_boundary_tags_for_writing< @@ -247,13 +247,13 @@ class ReducedDummyBufferUpdater Spectral::Swsh::Tags::SwshTransform>, Tags::worldtube_boundary_tags_for_writing>; - ReducedDummyBufferUpdater() = default; - ReducedDummyBufferUpdater(DataVector time_buffer, - const gr::Solutions::KerrSchild& solution, - const std::optional extraction_radius, - const double coordinate_amplitude, - const double coordinate_frequency, - const size_t l_max, const bool /*unused*/ = false) + BondiBufferUpdater() = default; + BondiBufferUpdater(DataVector time_buffer, + const gr::Solutions::KerrSchild& solution, + const std::optional extraction_radius, + const double coordinate_amplitude, + const double coordinate_frequency, const size_t l_max, + const bool /*unused*/ = false) : time_buffer_{std::move(time_buffer)}, solution_{solution}, extraction_radius_{extraction_radius}, @@ -263,9 +263,9 @@ class ReducedDummyBufferUpdater // NOLINTNEXTLINE WRAPPED_PUPable_decl_base_template(WorldtubeBufferUpdater, - ReducedDummyBufferUpdater); + BondiBufferUpdater); - explicit ReducedDummyBufferUpdater(CkMigrateMessage* /*unused*/) {} + explicit BondiBufferUpdater(CkMigrateMessage* /*unused*/) {} double update_buffers_for_time( const gsl::not_null*> buffers, @@ -358,7 +358,7 @@ class ReducedDummyBufferUpdater } std::unique_ptr> get_clone() const override { - return std::make_unique(*this); + return std::make_unique(*this); } bool time_is_outside_range(const double time) const override { @@ -413,16 +413,16 @@ class ReducedDummyBufferUpdater template PUP::able::PUP_ID Cce::DummyBufferUpdater::my_PUP_ID = 0; // NOLINT template -PUP::able::PUP_ID Cce::ReducedDummyBufferUpdater::my_PUP_ID = 0; // NOLINT +PUP::able::PUP_ID Cce::BondiBufferUpdater::my_PUP_ID = 0; // NOLINT template class Cce::DummyBufferUpdater; template class Cce::DummyBufferUpdater; -template class Cce::ReducedDummyBufferUpdater; +template class Cce::BondiBufferUpdater; namespace { template -void test_data_manager_with_dummy_buffer_updater( +void test_data_manager_with_bondi_buffer_updater( const gsl::not_null gen, const bool apply_normalization_bug = false, const bool is_spec_input = true, const std::optional extraction_radius = std::nullopt) { @@ -545,7 +545,7 @@ void test_data_manager_with_dummy_buffer_updater( } template -void test_spec_worldtube_buffer_updater_impl( +void test_metric_worldtube_buffer_updater_impl( const gsl::not_null gen, const bool extraction_radius_in_filename, const bool time_varies_fastest) { constexpr bool is_modal = std::is_same_v; @@ -659,10 +659,10 @@ void test_spec_worldtube_buffer_updater_impl( approx(target_time - 1.5 + 0.1 * i)); } - const DummyBufferUpdater dummy_buffer_updater = serialize_and_deserialize( + const DummyBufferUpdater bondi_buffer_updater = serialize_and_deserialize( DummyBufferUpdater{time_buffer, solution, extraction_radius, amplitude, frequency, computation_l_max}); - dummy_buffer_updater.update_buffers_for_time( + bondi_buffer_updater.update_buffers_for_time( make_not_null(&expected_coefficients_buffers), make_not_null(&time_span_start), make_not_null(&time_span_end), target_time, computation_l_max, interpolator_length, buffer_size, @@ -685,7 +685,7 @@ void test_spec_worldtube_buffer_updater_impl( } template -void test_reduced_spec_worldtube_buffer_updater_impl( +void test_bondi_worldtube_buffer_updater_impl( const gsl::not_null gen, const bool extraction_radius_in_filename, const bool time_varies_fastest) { constexpr bool is_modal = std::is_same_v; @@ -863,11 +863,10 @@ void test_reduced_spec_worldtube_buffer_updater_impl( approx(target_time - 0.1 + 0.01 * i)); } - const ReducedDummyBufferUpdater dummy_buffer_updater = - serialize_and_deserialize(ReducedDummyBufferUpdater{ - time_buffer, solution, extraction_radius, amplitude, frequency, - computation_l_max}); - dummy_buffer_updater.update_buffers_for_time( + const BondiBufferUpdater bondi_buffer_updater = serialize_and_deserialize( + BondiBufferUpdater{time_buffer, solution, extraction_radius, amplitude, + frequency, computation_l_max}); + bondi_buffer_updater.update_buffers_for_time( make_not_null(&expected_coefficients_buffers), make_not_null(&time_span_start), make_not_null(&time_span_end), target_time, computation_l_max, interpolator_length, buffer_size, @@ -900,26 +899,25 @@ void test_reduced_spec_worldtube_buffer_updater_impl( } template -void test_spec_worldtube_buffer_updater(const gsl::not_null gen) { +void test_metric_worldtube_buffer_updater(const gsl::not_null gen) { INFO("SpEC worldtube (aka metric)"); for (const auto& [extraction_radius_in_filename, time_varies_fastest] : cartesian_product(std::array{true, false}, std::array{true, false})) { - test_spec_worldtube_buffer_updater_impl( + test_metric_worldtube_buffer_updater_impl( gen, extraction_radius_in_filename, time_varies_fastest); - test_spec_worldtube_buffer_updater_impl( + test_metric_worldtube_buffer_updater_impl( gen, extraction_radius_in_filename, time_varies_fastest); } } template -void test_reduced_spec_worldtube_buffer_updater( - const gsl::not_null gen) { +void test_bondi_worldtube_buffer_updater(const gsl::not_null gen) { INFO("Reduced SpEC worldtube (aka Bondi)"); for (const auto& [extraction_radius_in_filename, time_varies_fastest] : cartesian_product(std::array{true, false}, std::array{true, false})) { - test_reduced_spec_worldtube_buffer_updater_impl( + test_bondi_worldtube_buffer_updater_impl( gen, extraction_radius_in_filename, time_varies_fastest); - test_reduced_spec_worldtube_buffer_updater_impl( + test_bondi_worldtube_buffer_updater_impl( gen, extraction_radius_in_filename, time_varies_fastest); } } @@ -943,33 +941,32 @@ SPECTRE_TEST_CASE("Unit.Evolution.Systems.Cce.ReadBoundaryDataH5", MAKE_GENERATOR(gen); { INFO("Testing buffer updaters"); - test_spec_worldtube_buffer_updater(make_not_null(&gen)); - test_reduced_spec_worldtube_buffer_updater(make_not_null(&gen)); + test_metric_worldtube_buffer_updater(make_not_null(&gen)); + test_bondi_worldtube_buffer_updater(make_not_null(&gen)); } { INFO("Testing data managers"); using DummyBufferUpdater = DummyBufferUpdater; - using ReducedDummyBufferUpdater = - ReducedDummyBufferUpdater; - test_data_manager_with_dummy_buffer_updater; + test_data_manager_with_bondi_buffer_updater( make_not_null(&gen)); // with normalization bug applied: - test_data_manager_with_dummy_buffer_updater( make_not_null(&gen), true, true); - test_data_manager_with_dummy_buffer_updater( make_not_null(&gen), false, true); - test_data_manager_with_dummy_buffer_updater( make_not_null(&gen), false, false); // check the case for an explicitly provided extraction radius. - test_data_manager_with_dummy_buffer_updater( make_not_null(&gen), false, false, 200.0); - test_data_manager_with_dummy_buffer_updater( + test_data_manager_with_bondi_buffer_updater( make_not_null(&gen)); } } diff --git a/tests/Unit/Executables/CMakeLists.txt b/tests/Unit/Executables/CMakeLists.txt index 47724b87db99..b7f400289a7f 100644 --- a/tests/Unit/Executables/CMakeLists.txt +++ b/tests/Unit/Executables/CMakeLists.txt @@ -38,7 +38,7 @@ add_standalone_test( ) set( - TEST_REDUCECCEWORLDTUBE_LIBRARIES + TEST_PREPROCESSCCEWORLDTUBE_LIBRARIES Boost::boost Cce CceHelpers @@ -48,7 +48,7 @@ set( SpinWeightedSphericalHarmonics ) -set(EXECUTABLE "Test_ReduceCceWorldtube") +set(EXECUTABLE "Test_PreprocessCceWorldtube") add_standalone_test_executable(${EXECUTABLE}) @@ -61,15 +61,15 @@ target_compile_definitions( target_link_libraries( "${EXECUTABLE}" PRIVATE - "${TEST_REDUCECCEWORLDTUBE_LIBRARIES}") + "${TEST_PREPROCESSCCEWORLDTUBE_LIBRARIES}") add_test( - NAME "Unit.Executables.ReduceCceWorldtube" + NAME "Unit.Executables.PreprocessCceWorldtube" COMMAND ${SHELL_EXECUTABLE} -c - "rm -rf ReduceCceWorldtube && - mkdir -p ReduceCceWorldtube && \ - cd ReduceCceWorldtube && \ + "rm -rf PreprocessCceWorldtube && + mkdir -p PreprocessCceWorldtube && \ + cd PreprocessCceWorldtube && \ ${CMAKE_BINARY_DIR}/bin/${EXECUTABLE}" ) diff --git a/tests/Unit/Executables/Test_ReduceCceWorldtube.cpp b/tests/Unit/Executables/Test_PreprocessCceWorldtube.cpp similarity index 98% rename from tests/Unit/Executables/Test_ReduceCceWorldtube.cpp rename to tests/Unit/Executables/Test_PreprocessCceWorldtube.cpp index ef3db4243f35..dc9db28dfde8 100644 --- a/tests/Unit/Executables/Test_ReduceCceWorldtube.cpp +++ b/tests/Unit/Executables/Test_PreprocessCceWorldtube.cpp @@ -442,9 +442,9 @@ int main() { if (not executable.ends_with("/")) { executable += "/"; } - executable += "bin/ReduceCceWorldtube"; + executable += "bin/PreprocessCceWorldtube"; - const auto call_reduce_cce_worldtube = + const auto call_preprocess_cce_worldtube = [&](const std::string& input_data_format) { const std::string to_execute = executable + " --input-file " + input_data_format + ".yaml > " + @@ -456,11 +456,11 @@ int main() { (void)exit_code; }; - // Call ReduceCceWorldtube in a shell - call_reduce_cce_worldtube("MetricModal"); - call_reduce_cce_worldtube("MetricNodal"); - call_reduce_cce_worldtube("BondiModal"); - call_reduce_cce_worldtube("BondiNodal"); + // Call PreprocessCceWorldtube in a shell + call_preprocess_cce_worldtube("MetricModal"); + call_preprocess_cce_worldtube("MetricNodal"); + call_preprocess_cce_worldtube("BondiModal"); + call_preprocess_cce_worldtube("BondiNodal"); // Create the expected bondi modal data const auto expected_data = create_expected_data(