Skip to content

Commit

Permalink
Merge branch 'master' into profile_sampler
Browse files Browse the repository at this point in the history
  • Loading branch information
ggalloni authored Sep 6, 2024
2 parents 8a313a8 + d14d6c3 commit 5fc20e2
Show file tree
Hide file tree
Showing 26 changed files with 357 additions and 181 deletions.
116 changes: 116 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
name: CI

# for the moment only runs on "action* branches". Coverage/pypi not yet set up.

on:
push:
branches:
- 'action*'
tags:
- '*'
pull_request:
branches:
- '*'

env:
COBAYA_INSTALL_SKIP: polychord,planck_2015,CamSpec2021,2018_highl_CamSpec,unbinned,keck,classy
COBAYA_PACKAGES_PATH: ../packages

jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- name: "Anaconda: jammy LTS Python (fast; pip CAMB)"
os: ubuntu-latest
pydist: "ANACONDA"
- name: "Latest Python 3.12"
os: ubuntu-latest
python-version: 3.12
mpi: openmpi
- name: "OS X Python 3.8"
os: macos-latest
python-version: 3.8
mpi: openmpi
- name: "Windows Python 3.12"
os: windows-latest
python-version: 3.12
mpi: intelmpi
steps:
- run: ln -s $(which gfortran-14) /usr/local/bin/gfortran
if: matrix.os == 'macos-latest'

- run: gfortran --version

- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up Python ${{ matrix.python-version }}
if: matrix.pydist != 'ANACONDA'
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Set up Miniconda
if: matrix.pydist == 'ANACONDA'
uses: conda-incubator/setup-miniconda@v3
with:
auto-activate-base: false
activate-environment: tests-environment
environment-file: tests-environment.yml

- name: Install mpi
if: matrix.pydist != 'ANACONDA'
uses: mpi4py/setup-mpi@v1
with:
mpi: ${{ matrix.mpi }}

# - name: Cache dependencies
# uses: actions/cache@v2
# with:
# path: |
# ${{ github.workspace }}/packages/data/planck_supp_data_and_covmats
# ${{ github.workspace }}/packages/data/bao_data
# ${{ github.workspace }}/packages/data/sn_data
# ${{ github.workspace }}/packages/data/des_data
# ${{ github.workspace }}/packages/data/planck_2018_pliklite_native
# key: ${{ runner.os }}-build-${{ matrix.python-version }}}

- name: Install dependencies (pip)
if: matrix.pydist != 'ANACONDA'
run: |
pip install mpi4py -i https://pypi.anaconda.org/mpi4py/simple
pip install -r requirements.txt pytest-xdist pytest-cov flaky matplotlib dill coverage flake8 iminuit numba camb
- name: Run flake8
shell: bash -el {0}
run: |
flake8 cobaya --select=E713,E704,E703,E714,E741,E10,E11,E20,E22,E23,E25,E27,E301,E302,E304,E9,F405,F406,F5,F6,F7,F8,W1,W2,W3,W6 --show-source --statistics
- name: Run cobaya install and tests
shell: bash -el {0}
run: |
coverage run --parallel-mode -m cobaya.install polychord --debug
coverage run --parallel-mode -m pytest tests/ -n auto -k "not cosmo" --skip-not-installed --no-flaky-report
coverage run --parallel-mode -m cobaya.install cosmo-tests --no-progress-bars --debug --skip-global
pytest tests/ --cov -vv -s -k "cosmo" -n 2 --skip-not-installed --no-flaky-report
- name: Run MPI tests
shell: bash -el {0}
run: |
mpiexec -np 2 coverage run --parallel-mode -m pytest -x -m mpi tests/ --no-flaky-report
- name: Run external likelihood tests
shell: bash -el {0}
run: |
git clone --depth=1 https://github.com/CobayaSampler/example_external_likelihood
pip install ./example_external_likelihood --quiet
coverage run --parallel-mode -m unittest test_package.tests.test
# - name: Upload coverage to Codecov
# uses: codecov/codecov-action@v1


46 changes: 20 additions & 26 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ env:
global:
- COBAYA_INSTALL_SKIP=polychord,planck_2015,CamSpec2021,2018_highl_CamSpec,unbinned,keck

if: (type = pull_request) OR (branch = master) OR (branch =~ ^test.*) OR (tag IS present)
if: (type = pull_request) OR (branch = master) OR (branch =~ ^test.*) OR (branch =~ ^alltest.*) OR (tag IS present)

#Large CamSpec folders tend to hang, so exclude non-base likelihoods from cache
cache:
Expand All @@ -21,8 +21,6 @@ cache:
- /home/travis/build/CosmoPars/packages/data/sn_data
- /home/travis/build/CosmoPars/packages/data/des_data
- /home/travis/build/CosmoPars/packages/data/planck_2018_pliklite_native
- /home/travis/build/CosmoPars/packages/data/planck_2018_lowE_native
- /home/travis/build/CosmoPars/packages/data/planck_2018_lowT_native

# (Pre)Installation
jobs:
Expand All @@ -46,29 +44,29 @@ jobs:
apt:
packages:
- gfortran
install:
- pip install -r requirements.txt camb
env:
- GCC_VERSION="ubuntu"
python: "3.10"
- if: branch !~ ^test.*
name: "Anaconda: jammy LTS Python 3.11"
- name: "Anaconda: jammy LTS Python (fast; pip CAMB)"
dist: jammy
addons:
apt:
packages:
- gfortran
env:
- GCC_VERSION="ubuntu"
- PYDIST="ANACONDA"
- ANACONDA_CHANNEL="defaults"
python: "3.11"
- name: "Latest jammy Python 3.12"
- COBAYA_INSTALL_SKIP="$COBAYA_INSTALL_SKIP,classy"
language: minimal
- if: branch !~ ^test.*
name: "Latest jammy Python 3.12"
dist: jammy
addons:
apt:
packages:
- gfortran
env:
- GCC_VERSION="ubuntu"
- COBAYA_INSTALL_SKIP="$COBAYA_INSTALL_SKIP,classy"
python: "3.12"


Expand All @@ -81,27 +79,25 @@ before_install:
ln -s /usr/bin/g++-$GCC_VERSION gcc-symlinks/g++;
export PATH=$PWD/gcc-symlinks:$PATH;
fi
- if [[ "$GCC_VERSION" == "11" ]]; then
export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libgfortran.so.5;
fi
- gfortran --version
- which gfortran >/dev/null 2>&1 && gfortran --version || echo "gfortran not installed"
# Install rest of system requisites
- sudo apt install openmpi-bin openmpi-common libopenmpi-dev libopenblas-dev liblapack-dev
# - sudo apt install openmpi-bin openmpi-common libopenmpi-dev libopenblas-dev liblapack-dev
# Python requisites
- if [[ "$PYDIST" == "ANACONDA" ]]; then
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
wget -q https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
bash miniconda.sh -b -p $HOME/miniconda;
export PATH="$HOME/miniconda/bin:$PATH";
hash -r;
conda config --set always_yes yes --set changeps1 no;
conda info -a;
conda create -q -n test-environment -c $ANACONDA_CHANNEL python=$TRAVIS_PYTHON_VERSION scipy matplotlib cython PyYAML dill coverage pytest;
conda create -q -n test-environment -c $ANACONDA_CHANNEL scipy matplotlib cython PyYAML dill coverage pytest pandas;
source activate test-environment;
conda install -c conda-forge mpi4py openmpi iminuit;
pip install flake8 flaky pytest-forked pytest-cov;
pip install -r requirements.txt flake8 flaky pytest-xdist pytest-cov camb;
else
python -m pip install --upgrade pip setuptools wheel;
pip install mpi4py "numpy<2" pytest-forked pytest-cov flaky matplotlib dill coverage flake8 iminuit;
pip install openmpi mpi4py -i https://pypi.anaconda.org/mpi4py/simple;
pip install pytest-xdist pytest-cov flaky matplotlib dill coverage flake8 iminuit numba;
fi
- python --version

Expand All @@ -112,11 +108,9 @@ script:
# General tests:
- export COBAYA_PACKAGES_PATH="../packages"
- coverage run --parallel-mode -m cobaya.install polychord --debug
- coverage run --parallel-mode -m pytest tests/ -k "not cosmo" --skip-not-installed --no-flaky-report
# numba can speed things, but makes for some messy logs
# - pip install numba
- coverage run --parallel-mode -m pytest tests/ -n auto -k "not cosmo" --skip-not-installed --no-flaky-report
# Cosmology tests:
- coverage run --parallel-mode -m cobaya.install cosmo-tests --no-progress-bars --debug
- coverage run --parallel-mode -m cobaya.install cosmo-tests --no-progress-bars --debug --skip-global
- if [ -n "${CAMB_BRANCH}" ]; then
rm -rf $COBAYA_PACKAGES_PATH/code/CAMB ;
git clone --recursive --depth 1 -b $CAMB_BRANCH https://github.com/cmbant/CAMB $COBAYA_PACKAGES_PATH/code/CAMB ;
Expand All @@ -125,10 +119,10 @@ script:
# mpi tests
- mpiexec -np 2 --mca orte_base_help_aggregate 0 --mca btl ^openib --oversubscribe coverage run --parallel-mode -m pytest -x -m mpi tests/ --no-flaky-report ;
- mkdir covers; mv .coverage.* covers; ls -ltra covers
- pytest tests/ --cov -vv -s -k "cosmo" --forked --skip-not-installed --no-flaky-report
- pytest tests/ --cov -vv -s -k "cosmo" -n 1 --skip-not-installed --no-flaky-report
- mv .coverage .coverage.pytest; mv covers/.cov* .
# Test external cosmological likelihoods
- pip install -e $COBAYA_PACKAGES_PATH/code/CAMB --quiet
#- pip install -e $COBAYA_PACKAGES_PATH/code/CAMB --quiet
#- git clone --depth=1 https://github.com/CobayaSampler/planck_lensing_external
#- pip install ./planck_lensing_external --quiet
#- coverage run --parallel-mode -m unittest plancklensing.tests.test_likes
Expand Down
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 3.5.x

- Detect and fix incomplete last lines when resuming or minimizing from existing runs (#306, #378)
- Added functions module and refactored some numerical functions into it

## 3.5.4

- Allow classes to have both yaml and class attributes as long as no duplicate keys
Expand Down
2 changes: 1 addition & 1 deletion DEVEL.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ Development flow for contributors
1. Fork and clone the repo from github.
2. From its folder, install in editable mode: ``pip install -e .``
3. Modify stuff.
4. Test with pytest (first "pip install pytest pytest-forked pytest-cov flaky dill")
4. Test with pytest (first "pip install pytest pytest-xdist pytest-cov flaky dill")
5. Make a pull requests and check (about about 15 minutes) if the tests have passed.
6. Iterate until tests pass, then wait for or request feedback/merge

Expand Down
2 changes: 1 addition & 1 deletion TODO.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# cosmetic/consistency/speed

## min/max bounds enforced on derived parameters (more generally, "bounds" as well as priors)
## make portalocker/numba/dill a requirement?
## make numba a requirement?
## version attribute should be in all components not just theory (samplers can have versions) [done for samplers; missing: likelihoods]
## In the docs "Bases" (and UML diagram) not hyperlinked correctly (not sure how to fix)
## dump log info along with each chain file if saving to file (currently in stdout)
Expand Down
2 changes: 1 addition & 1 deletion cobaya/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def run_command():
prefix = "cobaya-"
console_scripts = (
metadata.entry_points().select(group="console_scripts")
if sys.version_info > (3, 9)
if sys.version_info >= (3, 10)
else metadata.entry_points()["console_scripts"]
)
for script in console_scripts:
Expand Down
10 changes: 5 additions & 5 deletions cobaya/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def compute_temperature(logpost, logprior, loglike, check=True, extra_tolerance=
"""
Returns the temperature of a sample.
If ``check=True`` and the log-probabilites passed are arrays, checks consistency
If ``check=True`` and the log-probabilities passed are arrays, checks consistency
of the sample temperature, and raises ``AssertionError`` if inconsistent.
"""
temp = (logprior + loglike) / logpost
Expand Down Expand Up @@ -286,8 +286,7 @@ def __init__(self, model, output=None, cache_size=_default_cache_size, name=None
self.reset()
# If loaded, check sample weights, consistent logp sums,
# and temperature (ignores the given one)
samples_loaded = len(self) > 0
if samples_loaded:
if len(self) > 0:
try:
try:
self.temperature = self._check_logps(extra_tolerance=False)
Expand Down Expand Up @@ -499,8 +498,9 @@ def _check_logps(self, temperature_only=False, extra_tolerance=False):
check=True, extra_tolerance=extra_tolerance)
except AssertionError as excpt:
raise LoggedError(
self.log, "The sample seems to have an inconsistent temperature.") \
from excpt
self.log, "The sample seems to have an inconsistent temperature. "
"This could be due to input file truncation on the last line "
"due to crash/being killed before complete.") from excpt
if not temperature_only:
tols = {
"rtol": 1e-4 * (10 if extra_tolerance else 1),
Expand Down
15 changes: 8 additions & 7 deletions cobaya/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,9 +444,7 @@ def compare_versions(cls, version_a, version_b, equal=True):
:return: bool
"""
va, vb = version.parse(version_a), version.parse(version_b)
if va >= vb if equal else va > vb:
return True
return False
return va >= vb if equal else va > vb

def __exit__(self, exception_type, exception_value, traceback):
if self.timer and self.timer.n:
Expand Down Expand Up @@ -482,6 +480,7 @@ def dump_timing(self):
def get_versions(self, add_version_field=False) -> InfoDict:
"""
Get version dictionary
:return: dictionary of versions for all components
"""

Expand All @@ -494,6 +493,7 @@ def format_version(x):
def get_speeds(self, ignore_sub=False) -> InfoDict:
"""
Get speeds dictionary
:return: dictionary of versions for all components
"""
from cobaya.theory import HelperTheory
Expand Down Expand Up @@ -695,8 +695,7 @@ def module_class_for_name(m, name):
for cls in classes_in_module(m, subclass_of=CobayaComponent):
if cls.__name__.lower() in valid_names:
if result is not None:
raise ValueError('More than one class with same lowercase name %s',
name)
raise ValueError(f'More than one class with same lowercase name {name}')
result = cls
return result

Expand Down Expand Up @@ -777,7 +776,7 @@ def _bare_load_external_module(name, path=None, min_version=None, reload=False,

def load_external_module(module_name=None, path=None, install_path=None, min_version=None,
get_import_path=None, reload=False, logger=None,
not_installed_level=None):
not_installed_level=None, default_global=False):
"""
Tries to load an external module at initialisation, dealing with explicit paths
and Cobaya's installation path.
Expand Down Expand Up @@ -807,12 +806,14 @@ def load_external_module(module_name=None, path=None, install_path=None, min_ver
found. If this exception will be handled at a higher level, you may pass
`not_installed_level='debug'` to prevent printing non-important messages at
error-level logging.
If default_global=True, always attempts to load from the global path if not
installed at path (e.g. pip install).
"""
if not logger:
logger = get_logger(__name__)
load_kwargs = {"name": module_name, "path": path, "get_import_path": get_import_path,
"min_version": min_version, "reload": reload, "logger": logger}
default_global = False
if isinstance(path, str):
if path.lower() == "global":
msg_tried = "global import (`path='global'` given)"
Expand Down
2 changes: 1 addition & 1 deletion cobaya/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
# Python requisites -- LC_ALL=C: Necessary just for pip <= 8.1.2 (Xenial version)
ENV LC_ALL C
RUN python -m pip install --upgrade pip
RUN python -m pip install pytest-forked matplotlib cython astropy --upgrade
RUN python -m pip install pytest-xdist matplotlib cython astropy --upgrade
# Prepare environment and tree for external packages -------------------------
ENV LD_LIBRARY_PATH $LD_LIBRARY_PATH:/usr/local/lib
ENV CONTAINED TRUE
Expand Down
2 changes: 1 addition & 1 deletion cobaya/cosmo_input/autoselect_covmat.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def score_likes(_key: CovmatFileKey, covmat):
best_p_l = get_best_score(best_p, score_likes)
if is_debug(log):
log.debug("Subset based on params + likes:\n - " +
"\n - ".join([b["name"] for b in best_p_l]))
"\n - ".join([b["name"] for b in best_p_l.values()]))

if key_tuple:
def score_left_params(_key, _covmat):
Expand Down
Loading

0 comments on commit 5fc20e2

Please sign in to comment.