diff --git a/CMakeLists.txt b/CMakeLists.txt index 6665682dd05..f9f7eab6d5a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -445,7 +445,8 @@ list(APPEND libopenmc_SOURCES # Add bundled external dependencies list(APPEND libopenmc_SOURCES src/external/quartic_solver.cpp - src/external/Faddeeva.cc) + src/external/Faddeeva.cc + src/external/LambertW.cpp) # For Visual Studio compilers if(MSVC) diff --git a/include/openmc/external/LambertW.hpp b/include/openmc/external/LambertW.hpp new file mode 100644 index 00000000000..d7f86880e7b --- /dev/null +++ b/include/openmc/external/LambertW.hpp @@ -0,0 +1,40 @@ +/* +MIT License + +Copyright (c) 2025 GuySten + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Available at https://github.com/GuySten/LambertW +*/ + +#ifndef LAMBERTW_H +#define LAMBERTW_H + +namespace LambertW { + +// Compute principal branch of the lambert-w function +extern double lambert_w0(double x); + +// Compute secondary negative branch of the lambert-w function +extern double lambert_wm1(double x); + +} // namespace LambertW + +#endif diff --git a/include/openmc/math_functions.h b/include/openmc/math_functions.h index c30ef75585a..2b93d21e3d4 100644 --- a/include/openmc/math_functions.h +++ b/include/openmc/math_functions.h @@ -200,5 +200,29 @@ std::complex faddeeva(std::complex z); //! \return Derivative of Faddeeva function evaluated at z std::complex w_derivative(std::complex z, int order); +//! Evaluate relative exponential function +//! +//! \param x Real argument +//! \return (exp(x)-1)/x without loss of precision near 0 +double exprel(double x); + +//! Evaluate relative logarithm function +//! +//! \param x Real argument +//! \return log(1+x)/x without loss of precision near 0 +double log1prel(double x); + +//! Evaluate principal branch of lambert_w function +//! +//! \param x Real argument +//! \return principal branch of lambert_w function +double lambert_w0(double x); + +//! Evaluate secondary branch of lambert_w function +//! +//! \param x Real argument +//! \return secondary branch of lambert_w function +double lambert_wm1(double x); + } // namespace openmc #endif // OPENMC_MATH_FUNCTIONS_H diff --git a/openmc/stats/univariate.py b/openmc/stats/univariate.py index e0475bf78fb..c36f324a8b2 100644 --- a/openmc/stats/univariate.py +++ b/openmc/stats/univariate.py @@ -10,6 +10,7 @@ import lxml.etree as ET import numpy as np from scipy.integrate import trapezoid +from scipy.special import exprel, hyp1f1, lambertw import openmc.checkvalue as cv from .._xml import get_text @@ -24,6 +25,16 @@ } +def exprel2(x): + """Evaluate 2*(exp(x)-1-x)/x^2 without loss of precision near 0""" + return hyp1f1(1, 3, x) + + +def log1prel(x): + """Evaluate log(1+x)/x without loss of precision near 0""" + return np.where(np.abs(x) < 1e-16, 1.0, np.log1p(x) / x) + + class Univariate(EqualityMixin, ABC): """Probability distribution of a single random variable. @@ -971,44 +982,76 @@ def cdf(self): c[1:] = p[:x.size-1] * np.diff(x) elif self.interpolation == 'linear-linear': c[1:] = 0.5 * (p[:-1] + p[1:]) * np.diff(x) + elif self.interpolation == "linear-log": + m = np.diff(p) / np.diff(np.log(x)) + c[1:] = p[:-1] * np.diff(x) + m * ( + x[1:] * (np.diff(np.log(x)) - 1.0) + x[:-1] + ) + elif self.interpolation == "log-linear": + m = np.diff(np.log(p)) / np.diff(x) + c[1:] = p[:-1] * np.diff(x) * exprel(m * np.diff(x)) + elif self.interpolation == "log-log": + m = np.diff(np.log(x * p)) / np.diff(np.log(x)) + c[1:] = (x * p)[:-1] * np.diff(np.log(x)) * exprel(m * np.diff(np.log(x))) else: - raise NotImplementedError('Can only generate CDFs for tabular ' - 'distributions using histogram or ' - 'linear-linear interpolation') - + raise NotImplementedError( + f"Cannot generate CDFs for tabular " + f"distributions using {self.interpolation} interpolation" + ) return np.cumsum(c) def mean(self): """Compute the mean of the tabular distribution""" - if self.interpolation == 'linear-linear': - mean = 0.0 - for i in range(1, len(self.x)): - y_min = self.p[i-1] - y_max = self.p[i] - x_min = self.x[i-1] - x_max = self.x[i] - - m = (y_max - y_min) / (x_max - x_min) - - exp_val = (1./3.) * m * (x_max**3 - x_min**3) - exp_val += 0.5 * m * x_min * (x_min**2 - x_max**2) - exp_val += 0.5 * y_min * (x_max**2 - x_min**2) - mean += exp_val - - elif self.interpolation == 'histogram': - x_l = self.x[:-1] - x_r = self.x[1:] - p_l = self.p[:self.x.size-1] - mean = (0.5 * (x_l + x_r) * (x_r - x_l) * p_l).sum() - else: - raise NotImplementedError('Can only compute mean for tabular ' - 'distributions using histogram ' - 'or linear-linear interpolation.') - - # Normalize for when integral of distribution is not 1 - mean /= self.integral() + # use normalized probabilities when computing mean + p = self.p / self.cdf().max() + x = self.x + x_min = x[:-1] + x_max = x[1:] + p_min = p[: x.size - 1] + p_max = p[1:] + + if self.interpolation == "linear-linear": + m = np.diff(p) / np.diff(x) + mean = ( + (1.0 / 3.0) * m * np.diff(x**3) + + 0.5 * (p_min - m * x_min) * np.diff(x**2) + ).sum() + elif self.interpolation == "linear-log": + m = np.diff(p) / np.diff(np.log(x)) + mean = ( + (1.0 / 4.0) + * m + * x_min**2 + * ((x_max / x_min) ** 2 * (2 * np.diff(np.log(x)) - 1) + 1) + + +0.5 * p_min * np.diff(x**2) + ).sum() + elif self.interpolation == "log-linear": + m = np.diff(np.log(p)) / np.diff(x) + mean = ( + p_min + * ( + np.diff(x) ** 2 + * ((0.5 * exprel2(m * np.diff(x)) * (m * np.diff(x) - 1) + 1)) + + np.diff(x) * x_min * exprel(m * np.diff(x)) + ) + ).sum() + elif self.interpolation == "log-log": + m = np.diff(np.log(p)) / np.diff(np.log(x)) + mean = ( + p_min + * x_min**2 + * np.diff(np.log(x)) + * exprel((m + 2) * np.diff(np.log(x))) + ).sum() + elif self.interpolation == "histogram": + mean = (0.5 * (x_min + x_max) * np.diff(x) * p_min).sum() + else: + raise NotImplementedError( + f"Cannot compute mean for tabular " + f"distributions using {self.interpolation} interpolation" + ) return mean def normalize(self): @@ -1067,11 +1110,56 @@ def sample(self, n_samples: int = 1, seed: int | None = None): quad[quad < 0.0] = 0.0 m[non_zero] = x_i[non_zero] + (np.sqrt(quad) - p_i[non_zero]) / m[non_zero] samples_out = m + elif self.interpolation == "linear-log": + # get variable and probability values for the + # next entry + x_i1 = self.x[cdf_idx + 1] + p_i1 = p[cdf_idx + 1] + # compute slope between entries + m = (p_i1 - p_i) / np.log(x_i1 / x_i) + # set values for zero slope + zero = m == 0.0 + m[zero] = x_i[zero] + (xi[zero] - c_i[zero]) / p_i[zero] + + positive = m > 0 + negative = m < 0 + a = p_i / m - 1 + m[positive] = ( + x_i + * ((xi - c_i) / (m * x_i) + a) + / np.real(lambertw((((xi - c_i) / (m * x_i) + a)) * np.exp(a))) + )[positive] + m[negative] = ( + x_i + * ((xi - c_i) / (m * x_i) + a) + / np.real(lambertw((((xi - c_i) / (m * x_i) + a)) * np.exp(a), -1.0)) + )[negative] + samples_out = m + elif self.interpolation == "log-linear": + # get variable and probability values for the + # next entry + x_i1 = self.x[cdf_idx + 1] + p_i1 = p[cdf_idx + 1] + # compute slope between entries + m = np.log(p_i1 / p_i) / (x_i1 - x_i) + f = (xi - c_i) / p_i + + samples_out = x_i + f * log1prel(m * f) + elif self.interpolation == "log-log": + # get variable and probability values for the + # next entry + x_i1 = self.x[cdf_idx + 1] + p_i1 = p[cdf_idx + 1] + # compute slope between entries + m = np.log((x_i1 * p_i1) / (x_i * p_i)) / np.log(x_i1 / x_i) + f = (xi - c_i) / (x_i * p_i) + samples_out = x_i * np.exp(f * log1prel(m * f)) else: - raise NotImplementedError('Can only sample tabular distributions ' - 'using histogram or ' - 'linear-linear interpolation') + raise NotImplementedError( + f"Cannot sample tabular distributions " + f"for {self.inteprolation} interpolation " + ) assert all(samples_out < self.x[-1]) return samples_out @@ -1135,6 +1223,23 @@ def integral(self): return np.sum(np.diff(self.x) * self.p[:self.x.size-1]) elif self.interpolation == 'linear-linear': return trapezoid(self.p, self.x) + elif self.interpolation == "linear-log": + m = np.diff(self.p) / np.diff(np.log(self.x)) + return np.sum( + self.p[:-1] * np.diff(self.x) + + m * (self.x[1:] * (np.diff(np.log(self.x)) - 1.0) + self.x[:-1]) + ) + elif self.interpolation == "log-linear": + m = np.diff(np.log(self.p)) / np.diff(self.x) + return np.sum(self.p[:-1] * np.diff(self.x) * exprel(m * np.diff(self.x))) + elif self.interpolation == "log-log": + m = np.diff(np.log(self.p)) / np.diff(np.log(self.x)) + return np.sum( + self.p[:-1] + * self.x[:-1] + * np.diff(np.log(self.x)) + * exprel((m + 1) * np.diff(np.log(self.x))) + ) else: raise NotImplementedError( f'integral() not supported for {self.inteprolation} interpolation') diff --git a/src/distribution.cpp b/src/distribution.cpp index ca00cbde4eb..3c7a9978c71 100644 --- a/src/distribution.cpp +++ b/src/distribution.cpp @@ -248,6 +248,12 @@ Tabular::Tabular(pugi::xml_node node) interp_ = Interpolation::histogram; } else if (temp == "linear-linear") { interp_ = Interpolation::lin_lin; + } else if (temp == "log-linear") { + interp_ = Interpolation::log_lin; + } else if (temp == "linear-log") { + interp_ = Interpolation::lin_log; + } else if (temp == "log-log") { + interp_ = Interpolation::log_log; } else { openmc::fatal_error( "Unsupported interpolation type for distribution: " + temp); @@ -282,13 +288,6 @@ void Tabular::init( std::copy(x, x + n, std::back_inserter(x_)); std::copy(p, p + n, std::back_inserter(p_)); - // Check interpolation parameter - if (interp_ != Interpolation::histogram && - interp_ != Interpolation::lin_lin) { - openmc::fatal_error("Only histogram and linear-linear interpolation " - "for tabular distribution is supported."); - } - // Calculate cumulative distribution function if (c) { std::copy(c, c + n, std::back_inserter(c_)); @@ -300,6 +299,22 @@ void Tabular::init( c_[i] = c_[i - 1] + p_[i - 1] * (x_[i] - x_[i - 1]); } else if (interp_ == Interpolation::lin_lin) { c_[i] = c_[i - 1] + 0.5 * (p_[i - 1] + p_[i]) * (x_[i] - x_[i - 1]); + } else if (interp_ == Interpolation::lin_log) { + double m = (p_[i] - p_[i - 1]) / std::log(x_[i] / x_[i - 1]); + c_[i] = c_[i - 1] + p_[i - 1] * (x_[i] - x_[i - 1]) + + m * (x_[i] * (std::log(x_[i] / x_[i - 1]) - 1.0) + x_[i - 1]); + } else if (interp_ == Interpolation::log_lin) { + double m = std::log(p_[i] / p_[i - 1]) / (x_[i] - x_[i - 1]); + c_[i] = c_[i - 1] + p_[i - 1] * (x_[i] - x_[i - 1]) * + exprel(m * (x_[i] - x_[i - 1])); + } else if (interp_ == Interpolation::log_log) { + double m = std::log((x_[i] * p_[i]) / (x_[i - 1] * p_[i - 1])) / + std::log(x_[i] / x_[i - 1]); + c_[i] = c_[i - 1] + x_[i - 1] * p_[i - 1] * + std::log(x_[i] / x_[i - 1]) * + exprel(m * std::log(x_[i] / x_[i - 1])); + } else { + UNREACHABLE(); } } } @@ -340,7 +355,7 @@ double Tabular::sample(uint64_t* seed) const } else { return x_i; } - } else { + } else if (interp_ == Interpolation::lin_lin) { // Linear-linear interpolation double x_i1 = x_[i + 1]; double p_i1 = p_[i + 1]; @@ -353,6 +368,40 @@ double Tabular::sample(uint64_t* seed) const (std::sqrt(std::max(0.0, p_i * p_i + 2 * m * (c - c_i))) - p_i) / m; } + } else if (interp_ == Interpolation::lin_log) { + // Linear-Log interpolation + double x_i1 = x_[i + 1]; + double p_i1 = p_[i + 1]; + + double m = (p_i1 - p_i) / std::log(x_i1 / x_i); + double a = p_i / m - 1; + if (m > 0.0) { + return x_i * ((c - c_i) / (m * x_i) + a) / + lambert_w0(((c - c_i) / (m * x_i) + a) * std::exp(a)); + } else if (m < 0.0) { + return x_i * ((c - c_i) / (m * x_i) + a) / + lambert_wm1(((c - c_i) / (m * x_i) + a) * std::exp(a)); + } else { + return x_i + (c - c_i) / p_i; + } + } else if (interp_ == Interpolation::log_lin) { + // Log-linear interpolation + double x_i1 = x_[i + 1]; + double p_i1 = p_[i + 1]; + + double m = std::log(p_i1 / p_i) / (x_i1 - x_i); + double f = (c - c_i) / p_i; + return x_i + f * log1prel(m * f); + } else if (interp_ == Interpolation::log_log) { + // Log-Log interpolation + double x_i1 = x_[i + 1]; + double p_i1 = p_[i + 1]; + + double m = std::log((x_i1 * p_i1) / (x_i * p_i)) / std::log(x_i1 / x_i); + double f = (c - c_i) / (p_i * x_i); + return x_i * std::exp(f * log1prel(m * f)); + } else { + UNREACHABLE(); } } diff --git a/src/external/LambertW.cpp b/src/external/LambertW.cpp new file mode 100644 index 00000000000..86cfa3c4ee9 --- /dev/null +++ b/src/external/LambertW.cpp @@ -0,0 +1,872 @@ +/* +MIT License + +Copyright (c) 2025 GuySten + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Available at https://github.com/GuySten/LambertW +*/ + +#include "openmc/external/LambertW.hpp" + +#include + +namespace LambertW { + +static constexpr double INV_E = std::exp(-1.0); + +static constexpr double SQ_INV_E = std::exp(-0.5); + +static double zk0[19] = { + +2.1820144653320312500, + +4.3246045021497925573E+1, + +5.9808565427761132714E+2, + +8.0491241056345904686E+3, + +1.1112458624177664276E+5, + +1.5870426133287885398E+6, + +2.3414708033996018338E+7, + +3.5576474271222021108E+8, + +5.5501716292484833443E+9, + +8.8674704839289895890E+10, + +1.4477791865269224022E+12, + +2.4111458632511484051E+13, + +4.0897036442600808776E+14, + +7.0555901476789968723E+15, + +1.2366607557976727250E+17, + +2.1999373487930999771E+18, + +3.9685392198344016155E+19, + +1.4127075145274652069E+104, + +2.8134195736211426913E+618, +}; + +static double zkm1[11] = { + -SQ_INV_E, + -1.8872688282289434049E-1, + -6.0497597226958343647E-2, + -1.7105334740676008194E-2, + -4.5954962127943706433E-3, + -1.2001610672197724173E-3, + -3.0728805932191499844E-4, + -7.7447159838062184354E-5, + -4.5808119698158173174E-17, + -6.1073672236594792982E-79, + -2.3703540064502081009E-453, +}; + +static double p01[9] = { + -9.9999999999999988900E-1, + -2.7399668668203659304, + +2.6164207726990399347E-2, + +6.3709168078949009170, + +7.1013286517854026680, + +2.9800826783006852573, + +4.8819596813789865043E-1, + +2.3753035787333611915E-2, + +7.7365760093772431000E-5, +}; + +static double q01[8] = { + +1, + +5.0716108484174280050, + +9.9868388183545283370, + +9.6607551922078869080, + +4.7943728991336119052, + +1.1629703477704522300, + +1.1849462500733755233E-1, + +3.4326525132402226488E-3, +}; + +static double p02[8] = { + -9.9997801800578916749E-1, + -7.0415751590483602272E-1, + +2.1232260832802529071, + +2.3896760702935718341, + +7.7765311805029175244E-1, + +8.9686698993644741433E-2, + +3.3062485753746403559E-3, + +2.5106760479132851033E-5, +}; + +static double q02[8] = { + +1, + +3.0356026828085410884, + +3.1434530151286777057, + +1.3723156566592447275, + +2.5844697415744211142E-1, + +1.9551162251819044265E-2, + +4.8775933244530123101E-4, + +2.3165116841073152717E-6, +}; + +static double p03[8] = { + -9.8967420337273506393E-1, + +5.9587680606394382748E-1, + +1.4225083018151943148, + +4.4882889168323809798E-1, + +4.4504943332390033511E-2, + +1.5218794835419578554E-3, + +1.6072263556502220023E-5, + +3.3723373020306510843E-8, +}; + +static double q03[8] = { + +1, + +1.6959402394626198052, + +8.0968573415500900896E-1, + +1.4002034999817021955E-1, + +9.3571878493790164480E-3, + +2.3251487593389773464E-4, + +1.8060170751502988645E-6, + +2.5750667337015924224E-9, +}; + +static double p04[8] = { + -7.7316491997206225517E-1, + +1.1391333504296703783, + +4.3116117255217074492E-1, + +3.5773078319037507449E-2, + +9.6441640580559092740E-4, + +8.9723854598675864757E-6, + +2.5623503144117723217E-8, + +1.4348813778416631453E-11, +}; + +static double q04[8] = { + +1 + 7.4657287456514418083E-1, + +1.2629777033419350576E-1, + +6.9741512959563184881E-3, + +1.4089339244355354892E-4, + +1.0257432883152943078E-6, + +2.2902687190119230940E-9, + +9.2794231013264501664E-13, +}; + +static double p05[8] = { + +1.2007101671553688430E-1, + +8.3352640829912822896E-1, + +7.0142775916948337582E-2, + +1.4846357985475124849E-3, + +1.0478757366110155290E-5, + +2.5715892987071038527E-8, + +1.9384214479606474749E-11, + +2.8447049039139409652E-15, +}; + +static double q05[8] = { + +1, + +2.5396738845619126630E-1, + +1.2839238907330317393E-2, + +2.0275375632510997371E-4, + +1.1482956073449141384E-6, + +2.3188370605674263647E-9, + +1.4271994165742563419E-12, + +1.5884836942394796961E-16, +}; + +static double p06[8] = { + +1.7221104439937710112, + +3.9919594286484275605E-1, + +7.9885540140685028937E-3, + +4.2889742253257920541E-5, + +7.8146828180529864981E-8, + +4.9819638764354682359E-11, + +9.7650889714265294606E-15, + +3.7052997281721724439E-19, +}; + +static double q06[8] = { + +1, + +7.4007438118020543008E-2, + +1.0333501506697740545E-3, + +4.4360858035727508506E-6, + +6.7822912316371041570E-9, + +3.6834356707639492021E-12, + +6.0836159560266041168E-16, + +1.8149869335981225316E-20, +}; + +static double p07[8] = { + +3.7529314023434544256, + +1.5491342690357806525E-1, + +7.5663140675900784505E-4, + +1.0271609235969979059E-6, + +4.7853247675930066150E-10, + +7.8328040770275474410E-14, + +3.9433033758391036653E-18, + +3.8232862205660283978E-23, +}; + +static double q07[8] = { + +1, + +2.0112985338854443555E-2, + +7.4712286154830141768E-5, + +8.4800598003693837469E-8, + +3.4182424130376911762E-11, + +4.8866259139690957899E-15, + +2.1223373626834634178E-19, + +1.6642985671260582515E-24, +}; + +static double p08[8] = { + +6.0196542055606555577, + +5.3496672841797864762E-2, + +6.4340849275316501519E-5, + +2.1969090100095967485E-8, + +2.5927988937033061070E-12, + +1.0779198161801527308E-16, + +1.3780424091017898301E-21, + +3.3768973150742552802E-27, +}; + +static double q08[8] = { + +1, + +5.2809683704233371675E-3, + +5.1020501219389558082E-6, + +1.5018312292270832103E-9, + +1.5677706636413188379E-13, + +5.7992041238911878361E-18, + +6.5133170770320780259E-23, + +1.3205080139213406071E-28, +}; + +static double p09[8] = { + +8.4280268500989701597, + +1.7155758546279713315E-2, + +5.0836620669829321508E-6, + +4.3354903691832581802E-10, + +1.2841017145645583385E-14, + +1.3419106769745885927E-19, + +4.3101698455492225750E-25, + +2.6422433422088187549E-31, +}; + +static double q09[8] = { + +1, + +1.3572006754595300315E-3, + +3.3535243481426203694E-7, + +2.5206969246421264128E-11, + +6.7136226273060530496E-16, + +6.3324226680854686574E-21, + +1.8128167400013774194E-26, + +9.3662030058136796889E-33, +}; + +static double p010[8] = { + +1.0931063230472498189E+1, + +5.2224234540245532982E-3, + +3.7996105711810129682E-7, + +8.0305793533410355824E-12, + +5.9139785627090605866E-17, + +1.5382020359533028724E-22, + +1.2288944126268109432E-28, + +1.8665089270660122398E-35, +}; + +static double q010[8] = { + +1, + +3.4328702551197577797E-4, + +2.1395351518538844476E-8, + +4.0524170186631594159E-13, + +2.7181424315335710420E-18, + +6.4538986638355490894E-24, + +4.6494613785888987942E-30, + +6.0442024367299387616E-37, +}; + +static double p011[8] = { + +1.3502943080893871412E+1, + +1.5284636506346264572E-3, + +2.7156967358262346166E-8, + +1.4110394051242161772E-13, + +2.5605734311219728461E-19, + +1.6421293724425337463E-25, + +3.2324944691435843553E-32, + +1.2054662641251783155E-39, +}; + +static double q011[8] = { + +1, + +8.5701512879089462255E-5, + +1.3311244435752691563E-9, + +6.2788924440385347269E-15, + +1.0483788152252204824E-20, + +6.1943499966249160886E-27, + +1.1101567860340917294E-33, + +3.5897381128308962590E-41, +}; + +static double p012[8] = { + +1.6128076167439014775E+1, + +4.3360385176467069131E-4, + +1.8696403871820916466E-9, + +2.3691795766901486045E-15, + +1.0503191826963154893E-21, + +1.6461927573606764263E-28, + +7.9138276083474522931E-36, + +7.1845890343701668760E-44, +}; + +static double q012[8] = { + +1, + +2.1154255263102938752E-5, + +8.1006115442323280538E-11, + +9.4155986022169905738E-17, + +3.8725127902295302254E-23, + +5.6344651115570565066E-30, + +2.4860951084210029191E-37, + +1.9788304737427787405E-45, +}; + +static double p013[8] = { + +1.8796301105534486604E+1, + +1.1989443339646469157E-4, + +1.2463377528676863250E-10, + +3.8219456858010368172E-17, + +4.1055693930252083265E-24, + +1.5595231456048464246E-31, + +1.8157173553077986962E-39, + +3.9807997764326166245E-48, +}; + +static double q013[8] = { + +1, + +5.1691031988359922329E-6, + +4.8325571823313711932E-12, + +1.3707888746916928107E-18, + +1.3754560850024480337E-25, + +4.8811882975661805184E-33, + +5.2518641828170201894E-41, + +1.0192119593134756440E-49, +}; + +static double p014[8] = { + +2.1500582830667332906E+1, + +3.2441943237735273768E-5, + +8.0764963416837559148E-12, + +5.9488445506122883523E-19, + +1.5364106187215861531E-26, + +1.4033231297002386995E-34, + +3.9259872712305770430E-43, + +2.0629086382257737517E-52, +}; + +static double q014[8] = { + +1, + +1.2515317642433850197E-6, + +2.8310314214817074806E-13, + +1.9423666416123637998E-20, + +4.7128616004157359714E-28, + +4.0433347391839945960E-36, + +1.0515141443831187271E-44, + +4.9316490935436927307E-54, +}; + +static double p015[8] = { + +2.4235812532416977267E+1, + +8.6161505995776802509E-6, + +5.1033431561868273692E-13, + +8.9642393665849638164E-21, + +5.5254364181097420777E-29, + +1.2045072724050605792E-37, + +8.0372997176526840184E-47, + +1.0049140812146492611E-56, +}; + +static double q015[8] = { + +1, + +3.0046761844749477987E-7, + +1.6309104270855463223E-14, + +2.6842271030298931329E-22, + +1.5619672632458881195E-30, + +3.2131689030397984274E-39, + +2.0032396245307684134E-48, + +2.2520274554676331938E-58, +}; + +static double p016[8] = { + +2.6998134347987436511E+1, + +2.2512257767572285866E-6, + +3.1521230759866963941E-14, + +1.3114035719790631541E-22, + +1.9156784033962366146E-31, + +9.8967003053444799163E-41, + +1.5640423898448433548E-50, + +4.6216193040664872606E-61, +}; + +static double q016[8] = { + +1, + +7.1572676370907573898E-8, + +9.2500506091115760826E-16, + +3.6239819582787573031E-24, + +5.0187712493800424118E-33, + +2.4565861988218069039E-42, + +3.6435658433991660284E-52, + +9.7432490640155346004E-63, +}; + +static double p017[8] = { + +2.9784546702831970770E+1, + +5.7971764392171329944E-7, + +1.9069872792601950808E-15, + +1.8668700870858763312E-24, + +6.4200510953370940075E-34, + +7.8076624650818968559E-44, + +2.9029638696956315654E-54, + +2.0141870458566179853E-65, +}; + +static double q017[8] = { + +1, + +1.6924463180469706372E-8, + +5.1703934311254540111E-17, + +4.7871532721560069095E-26, + +1.5664405832545149368E-35, + +1.8113137982381331398E-45, + +6.3454150289495419529E-56, + +4.0072964025244397967E-67, +}; + +static double p018[8] = { + +7.4413499460126776143E-1, + +4.1403243618005911160E-1, + +2.6012564166773416170E-1, + +2.1450457095960295520E-2, + +5.1872377264705907577E-4, + +4.3574693568319975996E-6, + +1.2363066058921706716E-8, + +9.0194147766309957537E-12, +}; + +static double q018[8] = { + +1, + +3.3487811067467010907E-1, + +2.3756834394570626395E-2, + +5.4225633008907735160E-4, + +4.4378980052579623037E-6, + +1.2436585497668099330E-8, + +9.0225825867631852215E-12, + -4.2057836270109716654E-19, +}; + +static double p019[8] = { + -6.1514412812729761526E-1, + +6.7979310133630936580E-1, + +8.9685353704585808963E-2, + +1.5644941483989379249E-3, + +7.7349901878176351162E-6, + +1.2891647546699435229E-8, + +7.0890325988973812656E-12, + +9.8419790334279711453E-16, +}; + +static double q019[8] = { + +1, + +9.7300263710401439315E-2, + +1.6103672748442058651E-3, + +7.8247741003077000012E-6, + +1.2949261308971345209E-8, + +7.0986911219342827130E-12, + +9.8426285042227044979E-16, + -1.5960147252606055352E-24, +}; + +static double pm1[8] = { + -1.0000000000000001110, + +4.2963016178777127009, + -4.0991407924007457612, + -6.8442842200833309724, + +1.7084773793345271001E+1, + -1.3015133123886661124E+1, + +3.9303608629539851049, + -3.4636746512247457319E-1, + +}; + +static double qm1[8] = { + +1, + -6.6279455994747624059, + +1.7740962374121397994E+1, + -2.4446872319343475890E+1, + +1.8249006287190617068E+1, + -7.0580758756624790550, + +1.1978786762794003545, + -5.3875778140352599789E-2, + +}; + +static double pm2[8] = { + -8.2253155264446844854, + -8.1320706732001487178E+2, + -1.5270113237678509000E+4, + -7.9971585089674149237E+4, + -1.0366754215808376511E+5, + +4.2284755505061257427E+4, + +7.4953525397605484884E+4, + +1.0554369146366736811E+4, + +}; + +static double qm2[8] = { + +1, + +1.4636315161669567659E+2, + +3.9124761372539240712E+3, + +3.1912693749754847460E+4, + +9.2441293717108619527E+4, + +9.4918733120470346165E+4, + +2.9531165406571745340E+4, + +1.6416808960330370987E+3, + +}; + +static double pm3[8] = { + -9.6184127443354024295, + -3.5578569043018004121E+3, + -2.5401559311284381043E+5, + -5.3923893630670639391E+6, + -3.6638257417536896798E+7, + -6.1484319486226966213E+7, + +3.0421690377446134451E+7, + +3.9728139054879320452E+7, + +}; + +static double qm3[8] = { + +1, + +5.0740525628523300801E+2, + +4.6852747159777876192E+4, + +1.3168304640091436297E+6, + +1.3111690693712415242E+7, + +4.6142116445258015195E+7, + +4.8982268956208830876E+7, + +9.1959100987983855122E+6, + +}; + +static double pm4[8] = { + -1.1038489462297466388E+1, + -1.5575812882656619195E+4, + -4.2492947304897773433E+6, + -3.5170245938803423768E+8, + -9.8659163036611364640E+9, + -8.6195372303305003908E+10, + -1.3286335574027616000E+11, + +1.5989546434420660462E+11, + +}; + +static double qm4[8] = { + +1, + +1.8370770693017166818E+3, + +6.1284097585595092761E+5, + +6.2149181398465483037E+7, + +2.2304011314443083969E+9, + +2.8254232485273698021E+10, + +1.0770866639543156165E+11, + +7.1964698876049131992E+10, + +}; + +static double pm5[8] = { + -1.2474405916395746052E+1, + -6.8180335575543773385E+4, + -7.1846599845620093278E+7, + -2.3142688221759181151E+10, + -2.5801378337945295130E+12, + -9.5182748161386314616E+13, + -8.6073250986210321766E+14, + +1.4041941853339961439E+14, + +}; + +static double qm5[8] = { + +1, + +6.8525813734431100971E+3, + +8.5153001025466544379E+6, + +3.2146028239685694655E+9, + +4.2929807417453196113E+11, + +2.0234381161638084359E+13, + +2.8699933268233923842E+14, + +7.1210136651525477096E+14, + +}; + +static double pm6[8] = { + -1.3921651376890072595E+1, + -2.9878956482388065526E+5, + -1.2313019937322092334E+9, + -1.5556149081899508970E+12, + -6.8685341106772708734E+14, + -1.0290616275933266835E+17, + -4.1404683701619648471E+18, + -1.4423309998006368397E+19, + +}; + +static double qm6[8] = { + +1, + +2.6154955236499142433E+4, + +1.2393087277442041494E+8, + +1.7832922702470761113E+11, + +9.0772608163810850446E+13, + +1.6314734740054252741E+16, + +8.8371323861233504533E+17, + +8.4166620643385013384E+18, + +}; + +static double pm7[8] = { + -1.5377894224591557534E+1, + -1.3122312005096979952E+6, + -2.1408157022111737888E+10, + -1.0718287431557811808E+14, + -1.8849353524027734456E+17, + -1.1394858607309311995E+20, + -1.9261555088729141590E+22, + -3.9978452086676901296E+23, + +}; + +static double qm7[8] = { + +1, + +1.0171286771760620046E+5, + +1.8728545945050381188E+9, + +1.0469617416664402757E+13, + +2.0704349060120443049E+16, + +1.4464907902386074496E+19, + +3.0510432205608900949E+21, + +1.1397589139790739717E+23, + +}; + +static double pm8[8] = { + -1.6841701411264981596E+1, + -5.7790823257577138416E+6, + -3.7757230791256404116E+11, + -7.5712133742589860941E+15, + -5.3479338916011465685E+19, + -1.3082711732297865476E+23, + -9.1462777004521427440E+25, + -8.9602768119263629340E+27, + +}; + +static double qm8[8] = { + +1, + +401820.46666230725328E+5, + +2.9211518136900492046E+10, + +6.4456135373410289079E+14, + +5.0311809576499530281E+18, + +1.3879041239716289478E+22, + +1.1575146167513516225E+25, + +1.7199220185947756654E+27, + +}; + +static double pm9[8] = { + -2.0836260384016439265, + +1.6122436242271495710, + +5.4464264959637207619, + -3.0886331128317160105, + +4.6107829155370137880E-1, + -2.3553839118456381330E-2, + +4.0538904170253404780E-4, + -1.7948156922516825458E-6, + +}; + +static double qm9[8] = { + +1, + +2.3699648912703015610, + -2.1249449707404812847, + +3.8480980098588483913E-1, + -2.1720009380176605969E-2, + +3.9405862890608636876E-4, + -1.7909312066865957905E-6, + +3.1153673308133671452E-12, + +}; + +static double pm10[8] = { + +1.6045383766570541409E-1, + +2.2214182524461514029, + -9.4119662492050892971E-1, + +9.1921523818747869300E-2, + -2.9069760533171663224E-3, + +3.2707247990255961149E-5, + -1.2486672336889893018E-7, + +1.2247438279861785291E-10, + +}; + +static double qm10[8] = { + +1, + -7.0254996087870332289E-1, + +8.0974347786703195026E-2, + -2.7469850029563153939E-3, + +3.1943362385183657062E-5, + -1.2390620687321666439E-7, + +1.2241636115168201999E-10, + -1.0275718020546765400E-17, + +}; + +static double pm11[8] = { + -1.2742179703075440564, + +1.3696658805421383765, + -1.2519345387558783223E-1, + +2.5155722460763844737E-3, + -1.5748033750499977208E-5, + +3.4316085386913786410E-8, + -2.5025242885340438533E-11, + +4.6423885014099583351E-15, + +}; + +static double qm11[8] = { + +1, + -1.1420006474152465694E-1, + +2.4285233832122595942E-3, + -1.5520907512751723152E-5, + +3.4120534760396002260E-8, + -2.4981056186450274587E-11, + +4.6419768093059706079E-15, + -1.3608713936942602985E-23, + +}; + +double evaluate_polynomial(int deg, double coeffs[], double x) +{ + double val = 0.0; + for (int i = deg; i >= 0; i--) { + val = std::fma(val, x, coeffs[i]); + } + return val; +} + +double lambert_w0(double x) +{ + if (x < -INV_E) { + return std::numeric_limits::quiet_NaN(); + } else if (x < zk0[0]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(8, p01, xc) / evaluate_polynomial(7, q01, xc); + } else if (x < zk0[1]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p02, xc) / evaluate_polynomial(7, q02, xc); + } else if (x < zk0[2]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p03, xc) / evaluate_polynomial(7, q03, xc); + } else if (x < zk0[3]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p04, xc) / evaluate_polynomial(7, q04, xc); + } else if (x < zk0[4]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p05, xc) / evaluate_polynomial(7, q05, xc); + } else if (x < zk0[5]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p06, xc) / evaluate_polynomial(7, q06, xc); + } else if (x < zk0[6]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p07, xc) / evaluate_polynomial(7, q07, xc); + } else if (x < zk0[7]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p08, xc) / evaluate_polynomial(7, q08, xc); + } else if (x < zk0[8]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p09, xc) / evaluate_polynomial(7, q09, xc); + } else if (x < zk0[9]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p010, xc) / evaluate_polynomial(7, q010, xc); + } else if (x < zk0[10]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p011, xc) / evaluate_polynomial(7, q011, xc); + } else if (x < zk0[11]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p012, xc) / evaluate_polynomial(7, q012, xc); + } else if (x < zk0[12]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p013, xc) / evaluate_polynomial(7, q013, xc); + } else if (x < zk0[13]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p014, xc) / evaluate_polynomial(7, q014, xc); + } else if (x < zk0[14]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p015, xc) / evaluate_polynomial(7, q015, xc); + } else if (x < zk0[15]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p016, xc) / evaluate_polynomial(7, q016, xc); + } else if (x < zk0[16]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, p017, xc) / evaluate_polynomial(7, q017, xc); + } else if (x < zk0[17]) { + double xc = std::log(x); + return evaluate_polynomial(7, p018, xc) / evaluate_polynomial(7, q018, xc); + } else if (x < zk0[18]) { + double xc = std::log(x); + return evaluate_polynomial(7, p019, xc) / evaluate_polynomial(7, q019, xc); + } else { + return std::numeric_limits::infinity(); + } +} + +double lambert_wm1(double x) +{ + if (x < -INV_E) { + return std::numeric_limits::quiet_NaN(); + } else if (x < zkm1[0]) { + double xc = std::sqrt(x + INV_E); + return evaluate_polynomial(7, pm1, xc) / evaluate_polynomial(7, qm1, xc); + } else if (x < zkm1[1]) { + double xc = -x / (std::sqrt(x + INV_E) + SQ_INV_E); + return evaluate_polynomial(7, pm2, xc) / evaluate_polynomial(7, qm2, xc); + } else if (x < zkm1[2]) { + double xc = -x / (std::sqrt(x + INV_E) + SQ_INV_E); + return evaluate_polynomial(7, pm3, xc) / evaluate_polynomial(7, qm3, xc); + } else if (x < zkm1[3]) { + double xc = -x / (std::sqrt(x + INV_E) + SQ_INV_E); + return evaluate_polynomial(7, pm4, xc) / evaluate_polynomial(7, qm4, xc); + } else if (x < zkm1[4]) { + double xc = -x / (std::sqrt(x + INV_E) + SQ_INV_E); + return evaluate_polynomial(7, pm5, xc) / evaluate_polynomial(7, qm5, xc); + } else if (x < zkm1[5]) { + double xc = -x / (std::sqrt(x + INV_E) + SQ_INV_E); + return evaluate_polynomial(7, pm6, xc) / evaluate_polynomial(7, qm6, xc); + } else if (x < zkm1[6]) { + double xc = -x / (std::sqrt(x + INV_E) + SQ_INV_E); + return evaluate_polynomial(7, pm7, xc) / evaluate_polynomial(7, qm7, xc); + } else if (x < zkm1[7]) { + double xc = -x / (std::sqrt(x + INV_E) + SQ_INV_E); + return evaluate_polynomial(7, pm8, xc) / evaluate_polynomial(7, qm8, xc); + } else if (x < zkm1[8]) { + double xc = std::log(-x); + return evaluate_polynomial(7, pm9, xc) / evaluate_polynomial(7, qm9, xc); + } else if (x < zkm1[9]) { + double xc = std::log(-x); + return evaluate_polynomial(7, pm10, xc) / evaluate_polynomial(7, qm10, xc); + } else if (x < zkm1[10]) { + double xc = std::log(-x); + return evaluate_polynomial(7, pm11, xc) / evaluate_polynomial(7, qm11, xc); + } else { + return -std::numeric_limits::infinity(); + } +} + +} // namespace LambertW diff --git a/src/math_functions.cpp b/src/math_functions.cpp index 5469b56c87c..ce8ad84339f 100644 --- a/src/math_functions.cpp +++ b/src/math_functions.cpp @@ -1,6 +1,7 @@ #include "openmc/math_functions.h" #include "openmc/external/Faddeeva.hh" +#include "openmc/external/LambertW.hpp" #include "openmc/constants.h" #include "openmc/random_lcg.h" @@ -919,4 +920,32 @@ std::complex w_derivative(std::complex z, int order) } } +double exprel(double x) +{ + if (std::abs(x) < 1e-16) + return 1.0; + else { + return std::expm1(x) / x; + } +} + +double log1prel(double x) +{ + if (std::abs(x) < 1e-16) + return 1.0; + else { + return std::log1p(x) / x; + } +} + +double lambert_w0(double x) +{ + return LambertW::lambert_w0(x); +} + +double lambert_wm1(double x) +{ + return LambertW::lambert_wm1(x); +} + } // namespace openmc diff --git a/tests/cpp_unit_tests/test_math.cpp b/tests/cpp_unit_tests/test_math.cpp index 1ad7c4b7097..6c9257cc8fb 100644 --- a/tests/cpp_unit_tests/test_math.cpp +++ b/tests/cpp_unit_tests/test_math.cpp @@ -355,3 +355,32 @@ TEST_CASE("Test broaden_wmp_polynomials") REQUIRE_THAT(ref_val, Catch::Matchers::Approx(test_val)); } } + +TEST_CASE("Test lambert_w0") +{ + std::vector ref_inp; + std::vector ref_val {-1.0, -1e-2, -1e-4, -1e-6, -1e-8, -1e-16, 0.0, + 1e-16, 1e-8, 1e-4, 1e-2, 1.0, 1e2}; + for (double x : ref_val) { + ref_inp.push_back(x * std::exp(x)); + } + + for (int i = 0; i < ref_inp.size(); i++) { + REQUIRE_THAT(openmc::lambert_w0(ref_inp[i]), + Catch::Matchers::WithinAbs(ref_val[i], 1e-10)); + } +} + +TEST_CASE("Test lambert_wm1") +{ + std::vector ref_inp; + std::vector ref_val {-1e2, -1e1, -1e0}; + for (double x : ref_val) { + ref_inp.push_back(x * std::exp(x)); + } + + for (int i = 0; i < ref_inp.size(); i++) { + REQUIRE_THAT(openmc::lambert_wm1(ref_inp[i]), + Catch::Matchers::WithinAbs(ref_val[i], 1e-10)); + } +} diff --git a/tests/unit_tests/test_stats.py b/tests/unit_tests/test_stats.py index 386181f34d2..2e6fc9ef7eb 100644 --- a/tests/unit_tests/test_stats.py +++ b/tests/unit_tests/test_stats.py @@ -1,508 +1,504 @@ -from math import pi - -import numpy as np -import pytest -import openmc -import openmc.stats -from scipy.integrate import trapezoid - - -def assert_sample_mean(samples, expected_mean): - # Calculate sample standard deviation - std_dev = samples.std() / np.sqrt(samples.size - 1) - - # Means should agree within 4 sigma 99.993% of the time. Note that this is - # expected to fail about 1 out of 16,000 times - assert np.abs(expected_mean - samples.mean()) < 4*std_dev - - -def test_discrete(): - x = [0.0, 1.0, 10.0] - p = [0.3, 0.2, 0.5] - d = openmc.stats.Discrete(x, p) - elem = d.to_xml_element('distribution') - - d = openmc.stats.Discrete.from_xml_element(elem) - np.testing.assert_array_equal(d.x, x) - np.testing.assert_array_equal(d.p, p) - assert len(d) == len(x) - - d = openmc.stats.Univariate.from_xml_element(elem) - assert isinstance(d, openmc.stats.Discrete) - - # Single point - d2 = openmc.stats.Discrete(1e6, 1.0) - assert d2.x == [1e6] - assert d2.p == [1.0] - assert len(d2) == 1 - - vals = np.array([1.0, 2.0, 3.0]) - probs = np.array([0.1, 0.7, 0.2]) - - exp_mean = (vals * probs).sum() - - d3 = openmc.stats.Discrete(vals, probs) - - # sample discrete distribution and check that the mean of the samples is - # within 4 std. dev. of the expected mean - n_samples = 1_000_000 - samples = d3.sample(n_samples) - assert_sample_mean(samples, exp_mean) - - -def test_delta_function(): - d = openmc.stats.delta_function(14.1e6) - assert isinstance(d, openmc.stats.Discrete) - np.testing.assert_array_equal(d.x, [14.1e6]) - np.testing.assert_array_equal(d.p, [1.0]) - - -def test_merge_discrete(): - x1 = [0.0, 1.0, 10.0] - p1 = [0.3, 0.2, 0.5] - d1 = openmc.stats.Discrete(x1, p1) - - x2 = [0.5, 1.0, 5.0] - p2 = [0.4, 0.5, 0.1] - d2 = openmc.stats.Discrete(x2, p2) - - # Merged distribution should have x values sorted and probabilities - # appropriately combined. Duplicate x values should appear once. - merged = openmc.stats.Discrete.merge([d1, d2], [0.6, 0.4]) - assert merged.x == pytest.approx([0.0, 0.5, 1.0, 5.0, 10.0]) - assert merged.p == pytest.approx( - [0.6*0.3, 0.4*0.4, 0.6*0.2 + 0.4*0.5, 0.4*0.1, 0.6*0.5]) - assert merged.integral() == pytest.approx(1.0) - - # Probabilities add up but are not normalized - d1 = openmc.stats.Discrete([3.0], [1.0]) - triple = openmc.stats.Discrete.merge([d1, d1, d1], [1.0, 2.0, 3.0]) - assert triple.x == pytest.approx([3.0]) - assert triple.p == pytest.approx([6.0]) - assert triple.integral() == pytest.approx(6.0) - - -def test_clip_discrete(): - # Create discrete distribution with two points that are not important, one - # because the x value is very small, and one because the p value is very - # small - d = openmc.stats.Discrete([1e-8, 1.0, 2.0, 1000.0], [3.0, 2.0, 5.0, 1e-12]) - - # Clipping the distribution should result in two points - d_clip = d.clip(1e-6) - assert d_clip.x.size == 2 - assert d_clip.p.size == 2 - - # Make sure inplace returns same object - d_same = d.clip(1e-6, inplace=True) - assert d_same is d - - with pytest.raises(ValueError): - d.clip(-1.) - - with pytest.raises(ValueError): - d.clip(5) - - -def test_uniform(): - a, b = 10.0, 20.0 - d = openmc.stats.Uniform(a, b) - elem = d.to_xml_element('distribution') - - d = openmc.stats.Uniform.from_xml_element(elem) - assert d.a == a - assert d.b == b - assert len(d) == 2 - - t = d.to_tabular() - np.testing.assert_array_equal(t.x, [a, b]) - np.testing.assert_array_equal(t.p, [1/(b-a), 1/(b-a)]) - assert t.interpolation == 'histogram' - - # Sample distribution and check that the mean of the samples is within 4 - # std. dev. of the expected mean - exp_mean = 0.5 * (a + b) - n_samples = 1_000_000 - samples = d.sample(n_samples) - assert_sample_mean(samples, exp_mean) - - -def test_powerlaw(): - a, b, n = 10.0, 100.0, 2.0 - d = openmc.stats.PowerLaw(a, b, n) - elem = d.to_xml_element('distribution') - - d = openmc.stats.PowerLaw.from_xml_element(elem) - assert d.a == a - assert d.b == b - assert d.n == n - assert len(d) == 3 - - # Determine mean of distribution - exp_mean = (n+1)*(b**(n+2) - a**(n+2))/((n+2)*(b**(n+1) - a**(n+1))) - - # sample power law distribution and check that the mean of the samples is - # within 4 std. dev. of the expected mean - n_samples = 1_000_000 - samples = d.sample(n_samples) - assert_sample_mean(samples, exp_mean) - - -def test_maxwell(): - theta = 1.2895e6 - d = openmc.stats.Maxwell(theta) - elem = d.to_xml_element('distribution') - - d = openmc.stats.Maxwell.from_xml_element(elem) - assert d.theta == theta - assert len(d) == 1 - - exp_mean = 3/2 * theta - - # sample maxwell distribution and check that the mean of the samples is - # within 4 std. dev. of the expected mean - n_samples = 1_000_000 - samples = d.sample(n_samples) - assert_sample_mean(samples, exp_mean) - - # A second sample starting from a different seed - samples_2 = d.sample(n_samples) - assert_sample_mean(samples_2, exp_mean) - assert samples_2.mean() != samples.mean() - - -def test_watt(): - a, b = 0.965e6, 2.29e-6 - d = openmc.stats.Watt(a, b) - elem = d.to_xml_element('distribution') - - d = openmc.stats.Watt.from_xml_element(elem) - assert d.a == a - assert d.b == b - assert len(d) == 2 - - # mean value form adapted from - # "Prompt-fission-neutron average energy for 238U(n, f ) from - # threshold to 200 MeV" Ethvignot et. al. - # https://doi.org/10.1016/j.physletb.2003.09.048 - exp_mean = 3/2 * a + a**2 * b / 4 - - # sample Watt distribution and check that the mean of the samples is within - # 4 std. dev. of the expected mean - n_samples = 1_000_000 - samples = d.sample(n_samples) - assert_sample_mean(samples, exp_mean) - - -def test_tabular(): - # test linear-linear sampling - x = np.array([0.0, 5.0, 7.0, 10.0]) - p = np.array([10.0, 20.0, 5.0, 6.0]) - d = openmc.stats.Tabular(x, p, 'linear-linear') - n_samples = 100_000 - samples = d.sample(n_samples) - assert_sample_mean(samples, d.mean()) - - # test linear-linear normalization - d.normalize() - assert d.integral() == pytest.approx(1.0) - - # test histogram sampling - d = openmc.stats.Tabular(x, p, interpolation='histogram') - samples = d.sample(n_samples) - assert_sample_mean(samples, d.mean()) - - d.normalize() - assert d.integral() == pytest.approx(1.0) - - # ensure that passing a set of probabilities shorter than x works - # for histogram interpolation - d = openmc.stats.Tabular(x, p[:-1], interpolation='histogram') - d.cdf() - d.mean() - assert_sample_mean(d.sample(n_samples), d.mean()) - - # passing a shorter probability set should raise an error for linear-linear - with pytest.raises(ValueError): - d = openmc.stats.Tabular(x, p[:-1], interpolation='linear-linear') - d.cdf() - - # Use probabilities of correct length for linear-linear interpolation and - # call the CDF method - d = openmc.stats.Tabular(x, p, interpolation='linear-linear') - d.cdf() - - -def test_tabular_from_xml(): - x = np.array([0.0, 5.0, 7.0, 10.0]) - p = np.array([10.0, 20.0, 5.0, 6.0]) - d = openmc.stats.Tabular(x, p, 'linear-linear') - elem = d.to_xml_element('distribution') - - d = openmc.stats.Tabular.from_xml_element(elem) - assert all(d.x == x) - assert all(d.p == p) - assert d.interpolation == 'linear-linear' - assert len(d) == len(x) - - # Make sure XML roundtrip works with len(x) == len(p) + 1 - x = np.array([0.0, 5.0, 7.0, 10.0]) - p = np.array([10.0, 20.0, 5.0]) - d = openmc.stats.Tabular(x, p, 'histogram') - elem = d.to_xml_element('distribution') - d = openmc.stats.Tabular.from_xml_element(elem) - assert all(d.x == x) - assert all(d.p == p) - - -def test_legendre(): - # Pu239 elastic scattering at 100 keV - coeffs = [1.000e+0, 1.536e-1, 1.772e-2, 5.945e-4, 3.497e-5, 1.881e-5] - d = openmc.stats.Legendre(coeffs) - assert d.coefficients == pytest.approx(coeffs) - assert len(d) == len(coeffs) - - # Integrating distribution should yield one - mu = np.linspace(-1., 1., 1000) - assert trapezoid(d(mu), mu) == pytest.approx(1.0, rel=1e-4) - - with pytest.raises(NotImplementedError): - d.to_xml_element('distribution') - - -def test_mixture(): - d1 = openmc.stats.Uniform(0, 5) - d2 = openmc.stats.Uniform(3, 7) - p = [0.5, 0.5] - mix = openmc.stats.Mixture(p, [d1, d2]) - np.testing.assert_allclose(mix.probability, p) - assert mix.distribution == [d1, d2] - assert len(mix) == 4 - - # Sample and make sure sample mean is close to expected mean - n_samples = 1_000_000 - samples = mix.sample(n_samples) - assert_sample_mean(samples, (2.5 + 5.0)/2) - - elem = mix.to_xml_element('distribution') - - d = openmc.stats.Mixture.from_xml_element(elem) - np.testing.assert_allclose(d.probability, p) - assert d.distribution == [d1, d2] - assert len(d) == 4 - - -def test_mixture_clip(): - # Create mixture distribution containing a discrete distribution with two - # points that are not important, one because the x value is very small, and - # one because the p value is very small - d1 = openmc.stats.Discrete([1e-8, 1.0, 2.0, 1000.0], [3.0, 2.0, 5.0, 1e-12]) - d2 = openmc.stats.Uniform(0, 5) - mix = openmc.stats.Mixture([0.5, 0.5], [d1, d2]) - - # Clipping should reduce the contained discrete distribution to 2 points - mix_clip = mix.clip(1e-6) - assert mix_clip.distribution[0].x.size == 2 - assert mix_clip.distribution[0].p.size == 2 - - # Make sure inplace returns same object - mix_same = mix.clip(1e-6, inplace=True) - assert mix_same is mix - - # Make sure clip removes low probability distributions - d_small = openmc.stats.Uniform(0., 1.) - d_large = openmc.stats.Uniform(2., 5.) - mix = openmc.stats.Mixture([1e-10, 1.0], [d_small, d_large]) - mix_clip = mix.clip(1e-3) - assert mix_clip.distribution == [d_large] - - # Make sure warning is raised if tolerance is exceeded - d1 = openmc.stats.Discrete([1.0, 1.001], [1.0, 0.7e-6]) - d2 = openmc.stats.Tabular([0.0, 1.0], [0.7e-6], interpolation='histogram') - mix = openmc.stats.Mixture([1.0, 1.0], [d1, d2]) - with pytest.warns(UserWarning): - mix_clip = mix.clip(1e-6) - - -def test_polar_azimuthal(): - # default polar-azimuthal should be uniform in mu and phi - d = openmc.stats.PolarAzimuthal() - assert isinstance(d.mu, openmc.stats.Uniform) - assert d.mu.a == -1. - assert d.mu.b == 1. - assert isinstance(d.phi, openmc.stats.Uniform) - assert d.phi.a == 0. - assert d.phi.b == 2*pi - - mu = openmc.stats.Discrete(1., 1.) - phi = openmc.stats.Discrete(0., 1.) - d = openmc.stats.PolarAzimuthal(mu, phi) - assert d.mu == mu - assert d.phi == phi - - elem = d.to_xml_element() - assert elem.tag == 'angle' - assert elem.attrib['type'] == 'mu-phi' - assert elem.find('mu') is not None - assert elem.find('phi') is not None - - d = openmc.stats.PolarAzimuthal.from_xml_element(elem) - assert d.mu.x == [1.] - assert d.mu.p == [1.] - assert d.phi.x == [0.] - assert d.phi.p == [1.] - - d = openmc.stats.UnitSphere.from_xml_element(elem) - assert isinstance(d, openmc.stats.PolarAzimuthal) - - -def test_isotropic(): - d = openmc.stats.Isotropic() - elem = d.to_xml_element() - assert elem.tag == 'angle' - assert elem.attrib['type'] == 'isotropic' - - d = openmc.stats.Isotropic.from_xml_element(elem) - assert isinstance(d, openmc.stats.Isotropic) - - -def test_monodirectional(): - d = openmc.stats.Monodirectional((1., 0., 0.)) - elem = d.to_xml_element() - assert elem.tag == 'angle' - assert elem.attrib['type'] == 'monodirectional' - - d = openmc.stats.Monodirectional.from_xml_element(elem) - assert d.reference_uvw == pytest.approx((1., 0., 0.)) - - -def test_cartesian(): - x = openmc.stats.Uniform(-10., 10.) - y = openmc.stats.Uniform(-10., 10.) - z = openmc.stats.Uniform(0., 20.) - d = openmc.stats.CartesianIndependent(x, y, z) - - elem = d.to_xml_element() - assert elem.tag == 'space' - assert elem.attrib['type'] == 'cartesian' - assert elem.find('x') is not None - assert elem.find('y') is not None - - d = openmc.stats.CartesianIndependent.from_xml_element(elem) - assert d.x == x - assert d.y == y - assert d.z == z - - d = openmc.stats.Spatial.from_xml_element(elem) - assert isinstance(d, openmc.stats.CartesianIndependent) - - -def test_box(): - lower_left = (-10., -10., -10.) - upper_right = (10., 10., 10.) - d = openmc.stats.Box(lower_left, upper_right) - - elem = d.to_xml_element() - assert elem.tag == 'space' - assert elem.attrib['type'] == 'box' - assert elem.find('parameters') is not None - - d = openmc.stats.Box.from_xml_element(elem) - assert d.lower_left == pytest.approx(lower_left) - assert d.upper_right == pytest.approx(upper_right) - - -def test_point(): - p = (-4., 2., 10.) - d = openmc.stats.Point(p) - - elem = d.to_xml_element() - assert elem.tag == 'space' - assert elem.attrib['type'] == 'point' - assert elem.find('parameters') is not None - - d = openmc.stats.Point.from_xml_element(elem) - assert d.xyz == pytest.approx(p) - - -def test_normal(): - mean = 10.0 - std_dev = 2.0 - d = openmc.stats.Normal(mean,std_dev) - - elem = d.to_xml_element('distribution') - assert elem.attrib['type'] == 'normal' - - d = openmc.stats.Normal.from_xml_element(elem) - assert d.mean_value == pytest.approx(mean) - assert d.std_dev == pytest.approx(std_dev) - assert len(d) == 2 - - # sample normal distribution - n_samples = 100_000 - samples = d.sample(n_samples) - assert_sample_mean(samples, mean) - - -def test_muir(): - mean = 10.0 - mass = 5.0 - temp = 20000. - d = openmc.stats.muir(mean, mass, temp) - assert isinstance(d, openmc.stats.Normal) - - elem = d.to_xml_element('energy') - assert elem.attrib['type'] == 'normal' - - d = openmc.stats.Univariate.from_xml_element(elem) - assert isinstance(d, openmc.stats.Normal) - - # sample muir distribution - n_samples = 100_000 - samples = d.sample(n_samples) - assert_sample_mean(samples, mean) - - -def test_combine_distributions(): - # Combine two discrete (same data as in test_merge_discrete) - x1 = [0.0, 1.0, 10.0] - p1 = [0.3, 0.2, 0.5] - d1 = openmc.stats.Discrete(x1, p1) - x2 = [0.5, 1.0, 5.0] - p2 = [0.4, 0.5, 0.1] - d2 = openmc.stats.Discrete(x2, p2) - - # Merged distribution should have x values sorted and probabilities - # appropriately combined. Duplicate x values should appear once. - merged = openmc.stats.combine_distributions([d1, d2], [0.6, 0.4]) - assert isinstance(merged, openmc.stats.Discrete) - assert merged.x == pytest.approx([0.0, 0.5, 1.0, 5.0, 10.0]) - assert merged.p == pytest.approx( - [0.6*0.3, 0.4*0.4, 0.6*0.2 + 0.4*0.5, 0.4*0.1, 0.6*0.5]) - - # Probabilities add up but are not normalized - d1 = openmc.stats.Discrete([3.0], [1.0]) - triple = openmc.stats.combine_distributions([d1, d1, d1], [1.0, 2.0, 3.0]) - assert triple.x == pytest.approx([3.0]) - assert triple.p == pytest.approx([6.0]) - - # Combine discrete and tabular - t1 = openmc.stats.Tabular(x2, p2) - mixed = openmc.stats.combine_distributions([d1, t1], [0.5, 0.5]) - assert isinstance(mixed, openmc.stats.Mixture) - assert len(mixed.distribution) == 2 - assert len(mixed.probability) == 2 - - # Combine 1 discrete and 2 tabular -- the tabular distributions should - # combine to produce a uniform distribution with mean 0.5. The combined - # distribution should have a mean of 0.25. - t1 = openmc.stats.Tabular([0., 1.], [2.0, 0.0]) - t2 = openmc.stats.Tabular([0., 1.], [0.0, 2.0]) - d1 = openmc.stats.Discrete([0.0], [1.0]) - combined = openmc.stats.combine_distributions([t1, t2, d1], [0.25, 0.25, 0.5]) - assert combined.integral() == pytest.approx(1.0) - - # Sample the combined distribution and make sure the sample mean is within - # uncertainty of the expected value - samples = combined.sample(10_000) - assert_sample_mean(samples, 0.25) +from math import pi + +import numpy as np +import pytest +import openmc +import openmc.stats +from openmc.stats.univariate import _INTERPOLATION_SCHEMES +from scipy.integrate import trapezoid + + +def assert_sample_mean(samples, expected_mean): + # Calculate sample standard deviation + std_dev = samples.std() / np.sqrt(samples.size - 1) + + # Means should agree within 4 sigma 99.993% of the time. Note that this is + # expected to fail about 1 out of 16,000 times + assert np.abs(expected_mean - samples.mean()) < 4*std_dev + + +def test_discrete(): + x = [0.0, 1.0, 10.0] + p = [0.3, 0.2, 0.5] + d = openmc.stats.Discrete(x, p) + elem = d.to_xml_element('distribution') + + d = openmc.stats.Discrete.from_xml_element(elem) + np.testing.assert_array_equal(d.x, x) + np.testing.assert_array_equal(d.p, p) + assert len(d) == len(x) + + d = openmc.stats.Univariate.from_xml_element(elem) + assert isinstance(d, openmc.stats.Discrete) + + # Single point + d2 = openmc.stats.Discrete(1e6, 1.0) + assert d2.x == [1e6] + assert d2.p == [1.0] + assert len(d2) == 1 + + vals = np.array([1.0, 2.0, 3.0]) + probs = np.array([0.1, 0.7, 0.2]) + + exp_mean = (vals * probs).sum() + + d3 = openmc.stats.Discrete(vals, probs) + + # sample discrete distribution and check that the mean of the samples is + # within 4 std. dev. of the expected mean + n_samples = 1_000_000 + samples = d3.sample(n_samples) + assert_sample_mean(samples, exp_mean) + + +def test_delta_function(): + d = openmc.stats.delta_function(14.1e6) + assert isinstance(d, openmc.stats.Discrete) + np.testing.assert_array_equal(d.x, [14.1e6]) + np.testing.assert_array_equal(d.p, [1.0]) + + +def test_merge_discrete(): + x1 = [0.0, 1.0, 10.0] + p1 = [0.3, 0.2, 0.5] + d1 = openmc.stats.Discrete(x1, p1) + + x2 = [0.5, 1.0, 5.0] + p2 = [0.4, 0.5, 0.1] + d2 = openmc.stats.Discrete(x2, p2) + + # Merged distribution should have x values sorted and probabilities + # appropriately combined. Duplicate x values should appear once. + merged = openmc.stats.Discrete.merge([d1, d2], [0.6, 0.4]) + assert merged.x == pytest.approx([0.0, 0.5, 1.0, 5.0, 10.0]) + assert merged.p == pytest.approx( + [0.6*0.3, 0.4*0.4, 0.6*0.2 + 0.4*0.5, 0.4*0.1, 0.6*0.5]) + assert merged.integral() == pytest.approx(1.0) + + # Probabilities add up but are not normalized + d1 = openmc.stats.Discrete([3.0], [1.0]) + triple = openmc.stats.Discrete.merge([d1, d1, d1], [1.0, 2.0, 3.0]) + assert triple.x == pytest.approx([3.0]) + assert triple.p == pytest.approx([6.0]) + assert triple.integral() == pytest.approx(6.0) + + +def test_clip_discrete(): + # Create discrete distribution with two points that are not important, one + # because the x value is very small, and one because the p value is very + # small + d = openmc.stats.Discrete([1e-8, 1.0, 2.0, 1000.0], [3.0, 2.0, 5.0, 1e-12]) + + # Clipping the distribution should result in two points + d_clip = d.clip(1e-6) + assert d_clip.x.size == 2 + assert d_clip.p.size == 2 + + # Make sure inplace returns same object + d_same = d.clip(1e-6, inplace=True) + assert d_same is d + + with pytest.raises(ValueError): + d.clip(-1.) + + with pytest.raises(ValueError): + d.clip(5) + + +def test_uniform(): + a, b = 10.0, 20.0 + d = openmc.stats.Uniform(a, b) + elem = d.to_xml_element('distribution') + + d = openmc.stats.Uniform.from_xml_element(elem) + assert d.a == a + assert d.b == b + assert len(d) == 2 + + t = d.to_tabular() + np.testing.assert_array_equal(t.x, [a, b]) + np.testing.assert_array_equal(t.p, [1/(b-a), 1/(b-a)]) + assert t.interpolation == 'histogram' + + # Sample distribution and check that the mean of the samples is within 4 + # std. dev. of the expected mean + exp_mean = 0.5 * (a + b) + n_samples = 1_000_000 + samples = d.sample(n_samples) + assert_sample_mean(samples, exp_mean) + + +def test_powerlaw(): + a, b, n = 10.0, 100.0, 2.0 + d = openmc.stats.PowerLaw(a, b, n) + elem = d.to_xml_element('distribution') + + d = openmc.stats.PowerLaw.from_xml_element(elem) + assert d.a == a + assert d.b == b + assert d.n == n + assert len(d) == 3 + + # Determine mean of distribution + exp_mean = (n+1)*(b**(n+2) - a**(n+2))/((n+2)*(b**(n+1) - a**(n+1))) + + # sample power law distribution and check that the mean of the samples is + # within 4 std. dev. of the expected mean + n_samples = 1_000_000 + samples = d.sample(n_samples) + assert_sample_mean(samples, exp_mean) + + +def test_maxwell(): + theta = 1.2895e6 + d = openmc.stats.Maxwell(theta) + elem = d.to_xml_element('distribution') + + d = openmc.stats.Maxwell.from_xml_element(elem) + assert d.theta == theta + assert len(d) == 1 + + exp_mean = 3/2 * theta + + # sample maxwell distribution and check that the mean of the samples is + # within 4 std. dev. of the expected mean + n_samples = 1_000_000 + samples = d.sample(n_samples) + assert_sample_mean(samples, exp_mean) + + # A second sample starting from a different seed + samples_2 = d.sample(n_samples) + assert_sample_mean(samples_2, exp_mean) + assert samples_2.mean() != samples.mean() + + +def test_watt(): + a, b = 0.965e6, 2.29e-6 + d = openmc.stats.Watt(a, b) + elem = d.to_xml_element('distribution') + + d = openmc.stats.Watt.from_xml_element(elem) + assert d.a == a + assert d.b == b + assert len(d) == 2 + + # mean value form adapted from + # "Prompt-fission-neutron average energy for 238U(n, f ) from + # threshold to 200 MeV" Ethvignot et. al. + # https://doi.org/10.1016/j.physletb.2003.09.048 + exp_mean = 3/2 * a + a**2 * b / 4 + + # sample Watt distribution and check that the mean of the samples is within + # 4 std. dev. of the expected mean + n_samples = 1_000_000 + samples = d.sample(n_samples) + assert_sample_mean(samples, exp_mean) + + +def test_tabular(): + # test linear-linear sampling + x = np.array([0.001, 5.0, 7.0, 10.0]) + p = np.array([10.0, 20.0, 5.0, 6.0]) + + for scheme in _INTERPOLATION_SCHEMES: + # test sampling + d = openmc.stats.Tabular(x, p, scheme) + n_samples = 100_000 + samples = d.sample(n_samples) + assert_sample_mean(samples, d.mean()) + + # test normalization + d.normalize() + assert d.integral() == pytest.approx(1.0) + + # ensure that passing a set of probabilities shorter than x works + # for histogram interpolation + d = openmc.stats.Tabular(x, p[:-1], interpolation='histogram') + d.cdf() + d.mean() + assert_sample_mean(d.sample(n_samples), d.mean()) + + # passing a shorter probability set should raise an error for linear-linear + with pytest.raises(ValueError): + d = openmc.stats.Tabular(x, p[:-1], interpolation='linear-linear') + d.cdf() + + # Use probabilities of correct length for linear-linear interpolation and + # call the CDF method + d = openmc.stats.Tabular(x, p, interpolation='linear-linear') + d.cdf() + + +def test_tabular_from_xml(): + x = np.array([0.0, 5.0, 7.0, 10.0]) + p = np.array([10.0, 20.0, 5.0, 6.0]) + d = openmc.stats.Tabular(x, p, 'linear-linear') + elem = d.to_xml_element('distribution') + + d = openmc.stats.Tabular.from_xml_element(elem) + assert all(d.x == x) + assert all(d.p == p) + assert d.interpolation == 'linear-linear' + assert len(d) == len(x) + + # Make sure XML roundtrip works with len(x) == len(p) + 1 + x = np.array([0.0, 5.0, 7.0, 10.0]) + p = np.array([10.0, 20.0, 5.0]) + d = openmc.stats.Tabular(x, p, 'histogram') + elem = d.to_xml_element('distribution') + d = openmc.stats.Tabular.from_xml_element(elem) + assert all(d.x == x) + assert all(d.p == p) + + +def test_legendre(): + # Pu239 elastic scattering at 100 keV + coeffs = [1.000e+0, 1.536e-1, 1.772e-2, 5.945e-4, 3.497e-5, 1.881e-5] + d = openmc.stats.Legendre(coeffs) + assert d.coefficients == pytest.approx(coeffs) + assert len(d) == len(coeffs) + + # Integrating distribution should yield one + mu = np.linspace(-1., 1., 1000) + assert trapezoid(d(mu), mu) == pytest.approx(1.0, rel=1e-4) + + with pytest.raises(NotImplementedError): + d.to_xml_element('distribution') + + +def test_mixture(): + d1 = openmc.stats.Uniform(0, 5) + d2 = openmc.stats.Uniform(3, 7) + p = [0.5, 0.5] + mix = openmc.stats.Mixture(p, [d1, d2]) + np.testing.assert_allclose(mix.probability, p) + assert mix.distribution == [d1, d2] + assert len(mix) == 4 + + # Sample and make sure sample mean is close to expected mean + n_samples = 1_000_000 + samples = mix.sample(n_samples) + assert_sample_mean(samples, (2.5 + 5.0)/2) + + elem = mix.to_xml_element('distribution') + + d = openmc.stats.Mixture.from_xml_element(elem) + np.testing.assert_allclose(d.probability, p) + assert d.distribution == [d1, d2] + assert len(d) == 4 + + +def test_mixture_clip(): + # Create mixture distribution containing a discrete distribution with two + # points that are not important, one because the x value is very small, and + # one because the p value is very small + d1 = openmc.stats.Discrete([1e-8, 1.0, 2.0, 1000.0], [3.0, 2.0, 5.0, 1e-12]) + d2 = openmc.stats.Uniform(0, 5) + mix = openmc.stats.Mixture([0.5, 0.5], [d1, d2]) + + # Clipping should reduce the contained discrete distribution to 2 points + mix_clip = mix.clip(1e-6) + assert mix_clip.distribution[0].x.size == 2 + assert mix_clip.distribution[0].p.size == 2 + + # Make sure inplace returns same object + mix_same = mix.clip(1e-6, inplace=True) + assert mix_same is mix + + # Make sure clip removes low probability distributions + d_small = openmc.stats.Uniform(0., 1.) + d_large = openmc.stats.Uniform(2., 5.) + mix = openmc.stats.Mixture([1e-10, 1.0], [d_small, d_large]) + mix_clip = mix.clip(1e-3) + assert mix_clip.distribution == [d_large] + + # Make sure warning is raised if tolerance is exceeded + d1 = openmc.stats.Discrete([1.0, 1.001], [1.0, 0.7e-6]) + d2 = openmc.stats.Tabular([0.0, 1.0], [0.7e-6], interpolation='histogram') + mix = openmc.stats.Mixture([1.0, 1.0], [d1, d2]) + with pytest.warns(UserWarning): + mix_clip = mix.clip(1e-6) + + +def test_polar_azimuthal(): + # default polar-azimuthal should be uniform in mu and phi + d = openmc.stats.PolarAzimuthal() + assert isinstance(d.mu, openmc.stats.Uniform) + assert d.mu.a == -1. + assert d.mu.b == 1. + assert isinstance(d.phi, openmc.stats.Uniform) + assert d.phi.a == 0. + assert d.phi.b == 2*pi + + mu = openmc.stats.Discrete(1., 1.) + phi = openmc.stats.Discrete(0., 1.) + d = openmc.stats.PolarAzimuthal(mu, phi) + assert d.mu == mu + assert d.phi == phi + + elem = d.to_xml_element() + assert elem.tag == 'angle' + assert elem.attrib['type'] == 'mu-phi' + assert elem.find('mu') is not None + assert elem.find('phi') is not None + + d = openmc.stats.PolarAzimuthal.from_xml_element(elem) + assert d.mu.x == [1.] + assert d.mu.p == [1.] + assert d.phi.x == [0.] + assert d.phi.p == [1.] + + d = openmc.stats.UnitSphere.from_xml_element(elem) + assert isinstance(d, openmc.stats.PolarAzimuthal) + + +def test_isotropic(): + d = openmc.stats.Isotropic() + elem = d.to_xml_element() + assert elem.tag == 'angle' + assert elem.attrib['type'] == 'isotropic' + + d = openmc.stats.Isotropic.from_xml_element(elem) + assert isinstance(d, openmc.stats.Isotropic) + + +def test_monodirectional(): + d = openmc.stats.Monodirectional((1., 0., 0.)) + elem = d.to_xml_element() + assert elem.tag == 'angle' + assert elem.attrib['type'] == 'monodirectional' + + d = openmc.stats.Monodirectional.from_xml_element(elem) + assert d.reference_uvw == pytest.approx((1., 0., 0.)) + + +def test_cartesian(): + x = openmc.stats.Uniform(-10., 10.) + y = openmc.stats.Uniform(-10., 10.) + z = openmc.stats.Uniform(0., 20.) + d = openmc.stats.CartesianIndependent(x, y, z) + + elem = d.to_xml_element() + assert elem.tag == 'space' + assert elem.attrib['type'] == 'cartesian' + assert elem.find('x') is not None + assert elem.find('y') is not None + + d = openmc.stats.CartesianIndependent.from_xml_element(elem) + assert d.x == x + assert d.y == y + assert d.z == z + + d = openmc.stats.Spatial.from_xml_element(elem) + assert isinstance(d, openmc.stats.CartesianIndependent) + + +def test_box(): + lower_left = (-10., -10., -10.) + upper_right = (10., 10., 10.) + d = openmc.stats.Box(lower_left, upper_right) + + elem = d.to_xml_element() + assert elem.tag == 'space' + assert elem.attrib['type'] == 'box' + assert elem.find('parameters') is not None + + d = openmc.stats.Box.from_xml_element(elem) + assert d.lower_left == pytest.approx(lower_left) + assert d.upper_right == pytest.approx(upper_right) + + +def test_point(): + p = (-4., 2., 10.) + d = openmc.stats.Point(p) + + elem = d.to_xml_element() + assert elem.tag == 'space' + assert elem.attrib['type'] == 'point' + assert elem.find('parameters') is not None + + d = openmc.stats.Point.from_xml_element(elem) + assert d.xyz == pytest.approx(p) + + +def test_normal(): + mean = 10.0 + std_dev = 2.0 + d = openmc.stats.Normal(mean,std_dev) + + elem = d.to_xml_element('distribution') + assert elem.attrib['type'] == 'normal' + + d = openmc.stats.Normal.from_xml_element(elem) + assert d.mean_value == pytest.approx(mean) + assert d.std_dev == pytest.approx(std_dev) + assert len(d) == 2 + + # sample normal distribution + n_samples = 100_000 + samples = d.sample(n_samples) + assert_sample_mean(samples, mean) + + +def test_muir(): + mean = 10.0 + mass = 5.0 + temp = 20000. + d = openmc.stats.muir(mean, mass, temp) + assert isinstance(d, openmc.stats.Normal) + + elem = d.to_xml_element('energy') + assert elem.attrib['type'] == 'normal' + + d = openmc.stats.Univariate.from_xml_element(elem) + assert isinstance(d, openmc.stats.Normal) + + # sample muir distribution + n_samples = 100_000 + samples = d.sample(n_samples) + assert_sample_mean(samples, mean) + + +def test_combine_distributions(): + # Combine two discrete (same data as in test_merge_discrete) + x1 = [0.0, 1.0, 10.0] + p1 = [0.3, 0.2, 0.5] + d1 = openmc.stats.Discrete(x1, p1) + x2 = [0.5, 1.0, 5.0] + p2 = [0.4, 0.5, 0.1] + d2 = openmc.stats.Discrete(x2, p2) + + # Merged distribution should have x values sorted and probabilities + # appropriately combined. Duplicate x values should appear once. + merged = openmc.stats.combine_distributions([d1, d2], [0.6, 0.4]) + assert isinstance(merged, openmc.stats.Discrete) + assert merged.x == pytest.approx([0.0, 0.5, 1.0, 5.0, 10.0]) + assert merged.p == pytest.approx( + [0.6*0.3, 0.4*0.4, 0.6*0.2 + 0.4*0.5, 0.4*0.1, 0.6*0.5]) + + # Probabilities add up but are not normalized + d1 = openmc.stats.Discrete([3.0], [1.0]) + triple = openmc.stats.combine_distributions([d1, d1, d1], [1.0, 2.0, 3.0]) + assert triple.x == pytest.approx([3.0]) + assert triple.p == pytest.approx([6.0]) + + # Combine discrete and tabular + t1 = openmc.stats.Tabular(x2, p2) + mixed = openmc.stats.combine_distributions([d1, t1], [0.5, 0.5]) + assert isinstance(mixed, openmc.stats.Mixture) + assert len(mixed.distribution) == 2 + assert len(mixed.probability) == 2 + + # Combine 1 discrete and 2 tabular -- the tabular distributions should + # combine to produce a uniform distribution with mean 0.5. The combined + # distribution should have a mean of 0.25. + t1 = openmc.stats.Tabular([0., 1.], [2.0, 0.0]) + t2 = openmc.stats.Tabular([0., 1.], [0.0, 2.0]) + d1 = openmc.stats.Discrete([0.0], [1.0]) + combined = openmc.stats.combine_distributions([t1, t2, d1], [0.25, 0.25, 0.5]) + assert combined.integral() == pytest.approx(1.0) + + # Sample the combined distribution and make sure the sample mean is within + # uncertainty of the expected value + samples = combined.sample(10_000) + assert_sample_mean(samples, 0.25)