diff --git a/CHANGELOG.md b/CHANGELOG.md index d9fe6909d..5288d3ca8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,11 @@ ## 3.X.Y – YYYY-MM-DD +### General + - Documented uses of `Model` class in general contexts (previously only cosmo) - `Model` methods to compute log-probabilities and derived parameters now have an `as_dict` keyword (default `False`), for more informative return value. - Added ``--minimize`` flag to ``cobaya-run`` for quick minimization (replaces sampler, uses previous output). +- Add COBAYA_USE_FILE_LOCKING environment variable to allow disabling of file locks. Warning not to use --test with MPI. ### Cosmological likelihoods and theory codes @@ -13,6 +16,9 @@ - Removed problematic `zrei: zre` alias (fixes #199, thanks @pcampeti) - Added `Omega_b|cdm|nu_massive(z)` and `angular_diameter_distance_2` - Returned values for `get_sigma_R` changed from `R, z, sigma(z, R)` to `z, R, sigma(z, R)`. +- Support setting individual Accuracy parameters, e.g. Accuracy.AccurateBB +- Calculate accurate BB when tensors are requested +- Fix for using derived parameters with post-processing #### CLASS @@ -21,7 +27,7 @@ #### BAO -- Added Boss DR16 likelihood (#185, by @Pablo-Lemos) +- Added Boss DR16 likelihoods (#185, by @Pablo-Lemos) #### BICEP-Keck diff --git a/cobaya/collection.py b/cobaya/collection.py index 190fef9f0..acc19a90b 100644 --- a/cobaya/collection.py +++ b/cobaya/collection.py @@ -95,9 +95,9 @@ class SampleCollection(BaseCollection): (returns a copy, not a view). Note for developers: when expanding this class or inheriting from it, always access - the underlying DataFrame as `self.data` and not `self._data`, to ensure the cache has - been dumped. If you really need to access the actual attribute `self._data` in a - method, make sure to decorate it with `@ensure_cache_dumped`. + the underlying DataFrame as ``self.data`` and not ``self._data``, to ensure the cache + has been dumped. If you really need to access the actual attribute ``self._data`` in a + method, make sure to decorate it with ``@ensure_cache_dumped``. """ def __init__(self, model, output=None, cache_size=_default_cache_size, name=None, @@ -233,7 +233,7 @@ def _check_before_adding(self, values: Union[Sequence[float], np.ndarray], if derived is not None: # A simple np.allclose is not enough, because np.allclose([1], []) = True! if len(derived) != len(logpost.derived) or \ - not np.allclose(derived, logpost.derived): + not np.allclose(derived, logpost.derived): raise LoggedError( self.log, "derived params not consistent with those of LogPosterior object " @@ -285,7 +285,7 @@ def _cache_add_row(self, pos: int, values: Union[Sequence[float], np.ndarray], for name, value in zip(self.chi2_names, logposterior.loglikes): self._cache[pos, self._icol[name]] = -2 * value self._cache[pos, self._icol[OutPar.chi2]] = -2 * logposterior.loglike - if logposterior.derived != []: + if len(logposterior.derived): for name, value in zip(self.derived_params, logposterior.derived): self._cache[pos, self._icol[name]] = value diff --git a/cobaya/conventions.py b/cobaya/conventions.py index 0b890367e..006dacaab 100644 --- a/cobaya/conventions.py +++ b/cobaya/conventions.py @@ -40,9 +40,9 @@ def get_version(): # Names for the samples' fields internally and in the output class OutPar: weight = "weight" # sample weight - # log-posterior, or in general the total log-probability + # minus log-posterior, or in general the total minus log-probability minuslogpost = "minuslogpost" - minuslogprior = "minuslogprior" # log-prior + minuslogprior = "minuslogprior" # minus log-prior chi2 = "chi2" # chi^2 = -2 * loglike (not always normalized to be useful) diff --git a/cobaya/model.py b/cobaya/model.py index 1453ab71b..9fe365cb6 100644 --- a/cobaya/model.py +++ b/cobaya/model.py @@ -119,7 +119,7 @@ def make_finite(self): object.__setattr__(self, 'loglikes', np.nan_to_num(self.loglikes)) object.__setattr__(self, 'loglike', np.nan_to_num(self.loglike)) - def as_dict(self, model: "Model") -> Dict[str, float]: + def as_dict(self, model: "Model") -> Dict[str, Union[float, Dict[str, float]]]: """ Given a :class:`~model.Model`, returns a more informative version of itself, containing the names of priors, likelihoods and derived parameters. diff --git a/cobaya/theories/classy/classy.py b/cobaya/theories/classy/classy.py index 2eed34d44..dec541cbf 100644 --- a/cobaya/theories/classy/classy.py +++ b/cobaya/theories/classy/classy.py @@ -576,8 +576,8 @@ def get_can_provide_params(self): return names def get_can_support_params(self): - # non-exhaustive list of supported input parameters that will be assigne do classy - # if they are varied + # non-exhaustive list of supported input parameters that will be assigned to + # classy if they are varied return ['H0'] def get_version(self): diff --git a/docs/theories_and_dependencies.rst b/docs/theories_and_dependencies.rst index fa4094069..aef17d8c0 100644 --- a/docs/theories_and_dependencies.rst +++ b/docs/theories_and_dependencies.rst @@ -31,7 +31,7 @@ The theory code also needs to tell other theory codes and likelihoods the things Use a ``get_X`` method when you need to add optional arguments to provide different outputs from the computed quantity. Quantities returned by :meth:`~.theory.Theory.get_can_provide` should be stored in the state dictionary by the calculate function -or returned by the ``get_results(X)`` for each quantity ``X`` (which by default just returns the value stored in the current state dictionary). +or returned by the ``get_result(X)`` for each quantity ``X`` (which by default just returns the value stored in the current state dictionary). The results stored by calculate for a given set of input parameters are cached, and ``self.current_state`` is set to the current state whenever ``get_X``, ``get_param`` etc are called. @@ -136,4 +136,3 @@ sampling settings for 'Xin' so that configuring it is transparent to the user, e If multiple theory codes can provide the same quantity, it may be ambiguous which to use to compute which. When this happens use the ``provides`` input .yaml keyword to specify that a specific theory computes a specific quantity. -