Skip to content

Commit

Permalink
Merge branch 'main' of github.com:cms-analysis/HiggsAnalysis-Combined…
Browse files Browse the repository at this point in the history
…Limit into anigamova-patch-2
  • Loading branch information
Aliya Nigamova committed Jun 11, 2024
2 parents 1849740 + 33b171c commit b897f05
Show file tree
Hide file tree
Showing 402 changed files with 31,914 additions and 4,616 deletions.
6 changes: 6 additions & 0 deletions .clang-tidy
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
Checks: -*,
,modernize-use-override,
WarningsAsErrors: '*'
HeaderFilterRegex: ''
...
11 changes: 6 additions & 5 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,18 @@ jobs:
strategy:
matrix:
python: ["3.8"]
root: ["6.22"]
root: ["6.22"]
include:
# to deprecate (11_2_X py2)
- python: "2.7.18"
root: "6.22.0"
# python 3.9 root 6.24 (12_3_X)
- python: "3.9"
root: "6.24"
root: "6.22.0"
# python 3.10 root 6.26
- python: "3.10"
root: "6.26.4"
# python 3.10 root 6.32 (not available yet)
#- python: "3.10"
# root: "6.32" # version 6.30.07 where the conflicting classes were removed from root is not available, so compiling with 6.32.00 where this issue is resolved

runs-on: ubuntu-latest
name: Compile (py${{ matrix.python }}, root${{ matrix.root }})

Expand Down
61 changes: 61 additions & 0 deletions .github/workflows/cvmfs-ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: CI with CVMFS

on:
workflow_dispatch:
pull_request:
push:
branches:
- main

jobs:
test_workflow:
runs-on: ubuntu-latest
strategy:
matrix:
IMAGE: ["cmscloud/al9-cms"]
CMSSW_VERSION: ["CMSSW_14_1_0_pre4"] # ROOT v6.30/07
SCRAM_ARCH: ["el9_amd64_gcc12"]
include:
- IMAGE: "cmscloud/cc7-cms"
CMSSW_VERSION: "CMSSW_11_3_4" # ROOT v6.22
SCRAM_ARCH: "slc7_amd64_gcc900"
- IMAGE: "cmscloud/al9-cms"
CMSSW_VERSION: "CMSSW_14_0_0_pre1" # ROOT v6.26/11
SCRAM_ARCH: "el9_amd64_gcc12"

name: Test with ${{ matrix.CMSSW_VERSION }}
steps:
# checkout the files of this repository
- uses: actions/checkout@v4
- uses: cvmfs-contrib/github-action-cvmfs@v4
with:
cvmfs_repositories: 'cms.cern.ch'
- uses: rhaschke/docker-run-action@v5
with:
image: ${{ matrix.IMAGE }}
shell: bash
options: -v /cvmfs:/cvmfs:shared -v ${{ github.workspace }}:/work/CombinedLimit -w /home/cmsusr -e CMSSW_VERSION=${{ matrix.CMSSW_VERSION }} -e SCRAM_ARCH=${{ matrix.SCRAM_ARCH }}
run: |
ls /work/CombinedLimit
ls /cvmfs/cms.cern.ch | grep common
source /cvmfs/cms.cern.ch/cmsset_default.sh
scram project ${CMSSW_VERSION}
source /cvmfs/cms.cern.ch/cmsset_default.sh
cd ${CMSSW_VERSION}/src
cmsenv
mkdir -p HiggsAnalysis
cp -r /work/CombinedLimit HiggsAnalysis/
scramv1 b -j$(nproc)
echo ${PATH}
root --version
combine --help
combine HiggsAnalysis/CombinedLimit/data/tutorials/CAT23001/datacard-2-template-analysis.txt -M HybridNew --LHCmode LHC-limits --rMax 2.0 --clsAcc 0.01
combine HiggsAnalysis/CombinedLimit/data/tutorials/CAT23001/datacard-3-parametric-analysis.txt -M HybridNew --LHCmode LHC-significance -T 100 --mass 125
combine HiggsAnalysis/CombinedLimit/data/tutorials/CAT23001/datacard-3-parametric-analysis.txt -M Significance --mass 125
combine HiggsAnalysis/CombinedLimit/data/tutorials/CAT23001/datacard-1-counting-experiment.txt -M MarkovChainMC --tries 100
text2workspace.py HiggsAnalysis/CombinedLimit/data/tutorials/CAT23001/datacard-5-multi-signal.txt -P HiggsAnalysis.CombinedLimit.PhysicsModel:floatingXSHiggs --PO modes=ggH,qqH -o datacard-5-multi-signal.root --mass 125
combine datacard-5-multi-signal.root -M MultiDimFit --algo singles --mass 125
combine HiggsAnalysis/CombinedLimit/data/tutorials/CAT23001/datacard-5-multi-signal.txt -M ChannelCompatibilityCheck --mass 125
13 changes: 5 additions & 8 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,8 @@ jobs:
python-version: 3.8
- name: Build documentation
run: |
python3 -m pip install mkdocs pymdown-extensions mkdocs-cinder
mkdocs build --verbose --clean
- name: Deploy documentation
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./site

python3 -m pip install mkdocs pymdown-extensions mkdocs-material mike
git fetch origin gh-pages --depth=1
git config user.name ci-bot
git config user.email [email protected]
mike deploy --push v9.2.X latest
1 change: 1 addition & 0 deletions BuildFile.xml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
<lib name="Smatrix"/>
<use name="boost_program_options"/>
<use name="boost_filesystem"/>
<flags CXXFLAGS="-DBOOST_BIND_GLOBAL_PLACEHOLDERS -Wno-free-nonheap-object"/>
<export>
<lib name="1"/>
</export>
138 changes: 138 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
cmake_minimum_required( VERSION 3.11 FATAL_ERROR )
set(CMAKE_CXX_STANDARD 17)
project(CMSCombine VERSION 0.0.1)

option( MODIFY_ROOTMAP "Modify generated Rootmap to take out classes already bundled in StatAnalysis" FALSE )

# Can build with CMake after e.g. setting up StatAnalysis release like this:
# export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
# source $ATLAS_LOCAL_ROOT_BASE/user/atlasLocalSetup.sh; asetup StatAnalysis,0.3.2
# mkdir build; cd build
# cmake path/to/source # change this path to where-ever you cloned Combine repo to
# make -j4

list(APPEND CMAKE_PREFIX_PATH $ENV{ROOTSYS})
find_package( ROOT REQUIRED COMPONENTS Core MathMore RooFitCore RooFit RooStats Minuit HistFactory RooFitHS3)
find_package(Eigen3 REQUIRED)
find_package(Vdt)
find_package(LCG QUIET) # only used for FindBoost in StatAnalysis
find_package( Boost REQUIRED COMPONENTS program_options filesystem )

message(STATUS "Using ROOT From: ${ROOT_INCLUDE_DIRS}")
include(${ROOT_USE_FILE})

include_directories(${ROOT_INCLUDE_DIRS})
include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR})
add_definitions(${ROOT_CXX_FLAGS})

set(LIBNAME CMSCombine)

file(GLOB HEADERS RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} interface/*.h*)
file(GLOB SOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} src/*.c*)

# includes require "HiggsAnalysis/CombinedLimit" prefix in many places
# so create a symlink in the build dir
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/HiggsAnalysis)
execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_BINARY_DIR}/HiggsAnalysis/CombinedLimit" )
include_directories(${CMAKE_CURRENT_BINARY_DIR})

ROOT_GENERATE_DICTIONARY(G__${LIBNAME} HiggsAnalysis/CombinedLimit/src/classes.h LINKDEF src/classes_def.xml
MODULE ${LIBNAME}
OPTIONS --deep)
add_library(${LIBNAME} SHARED ${SOURCES} G__${LIBNAME}.cxx)
set_target_properties(${LIBNAME} PROPERTIES PUBLIC_HEADER "${HEADERS}")
target_link_libraries (${LIBNAME} Eigen3::Eigen ${ROOT_LIBRARIES} ${Boost_LIBRARIES} VDT::VDT)

add_executable(combine bin/combine.cpp)
target_link_libraries(combine PUBLIC ${LIBNAME})


# Create empty __init__.py file in the build directory that will be installed
# in the Python library directories.
set(empty_init_py "${CMAKE_CURRENT_BINARY_DIR}/__init__.py")

if(MODIFY_ROOTMAP)
# edit the generated rootmap in-situ before installation

# Define the path to the generated file
set(GENERATED_FILE_PATH "${CMAKE_CURRENT_BINARY_DIR}/lib${LIBNAME}.rootmap")

# Define the custom command to search and replace in-place
add_custom_command(
OUTPUT "${GENERATED_FILE_PATH}.bak" # Declare the generated file as output
DEPENDS "${GENERATED_FILE_PATH}" # Ensure it depends on the original generated file
COMMAND sed -i.bak -e "/class RooParamKeysPdf/d"
-e "/class RooStarMomentMorph/d"
-e "/class RooStats::HistFactory::RooBSpline/d"
-e "/class RooStats::HistFactory::RooBSplineBases/d"
-e "/class ResponseFunction/d"
-e "/class RooTwoSidedCBShape/d"
"${GENERATED_FILE_PATH}"
COMMENT "Removing conflicting classes from generated rootmap"
)

# Define a custom target that depends on the custom command output
add_custom_target(
ModifyRootmapFile # Name of the custom target
DEPENDS "${GENERATED_FILE_PATH}.bak" # Ensure the custom command is executed
)

# Add the custom target as a dependency to another target to ensure it's built
add_dependencies(combine ModifyRootmapFile)
endif()

#####################
# Installation part #
#####################


# Install the dictionaries.
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/lib${LIBNAME}_rdict.pcm
${CMAKE_CURRENT_BINARY_DIR}/lib${LIBNAME}.rootmap
DESTINATION lib)

# Install the libraries and header files.
install(TARGETS ${LIBNAME}
LIBRARY DESTINATION lib
PUBLIC_HEADER DESTINATION include/HiggsAnalysis/CombinedLimit/interface
)
# dictionary requires classes.h to be in include path exactly as it was specified
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/HiggsAnalysis/CombinedLimit/src/classes.h
DESTINATION include/HiggsAnalysis/CombinedLimit/src)

# Install the "combine" executable in the bin directory.
install(TARGETS combine DESTINATION bin)

# Install the scripts like "text2workspace" to the bin directory.
install(DIRECTORY scripts/ DESTINATION bin)

# This block is commented out for now, while using the less sophisticated location below
# Check if the Python library installation directory is outside the install
# prefix. If it is, we error out because CMake should not install files outside
# the prefix. In the future, one can imagine to let the user choose where the
# Python libraries get installed in the prefix with a CMake configuration flag.
#find_package(Python COMPONENTS Interpreter Development) # To get the Python library install directory into Python_SITELIB
#cmake_path(IS_PREFIX CMAKE_INSTALL_PREFIX "${Python_SITELIB}" sitelib_in_prefix)
#if(NOT ${sitelib_in_prefix})
# message( FATAL_ERROR "Your Python library installation directory ${Python_SITELIB} "
# "is outside the install prefix ${CMAKE_INSTALL_PREFIX}! "
# "This is not supported for now. Consider changing the install prefix "
# "with the -DCMAKE_INSTALL_PREFIX:PATH=<path> cmake configuration option.")
#endif()
#
## The the Python library installation directory relative to the install prefix.
#file(RELATIVE_PATH Python_SITELIB_IN_PREFIX ${CMAKE_INSTALL_PREFIX} ${Python_SITELIB})

set(Python_SITELIB_IN_PREFIX "python")


message (STATUS "Using Python install location:" ${Python_SITELIB_IN_PREFIX})
# The python package will be installed in such a way that the original
# CMSSW-style directory structure is kept, for maximal compatibility.
install(DIRECTORY python/ DESTINATION ${Python_SITELIB_IN_PREFIX}/HiggsAnalysis/CombinedLimit)

# Create empty __init__.py files in the Python package subdirectories such that
# the Python imports work.
file(TOUCH ${empty_init_py})
INSTALL(FILES ${empty_init_py} DESTINATION ${Python_SITELIB_IN_PREFIX}/HiggsAnalysis)
INSTALL(FILES ${empty_init_py} DESTINATION ${Python_SITELIB_IN_PREFIX}/HiggsAnalysis/CombinedLimit)
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ HiggsAnalysis-CombinedLimit
### Official documentation

All documentation, including installation instructions, is hosted at
http://cms-analysis.github.io/HiggsAnalysis-CombinedLimit/
http://cms-analysis.github.io/HiggsAnalysis-CombinedLimit/latest

The source code of this documentation can be found in the `docs/` folder in this repository.

### Publication

The `Combine` tool publication can be found [here](https://arxiv.org/abs/2404.06614). Please consider citing this reference if you use the `Combine` tool.
16 changes: 8 additions & 8 deletions bin/combine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,11 @@ int main(int argc, char **argv) {
combiner.statOptions().add_options()
("toys,t", po::value<int>(&runToys)->default_value(0), "Number of Toy MC extractions")
("seed,s", po::value<int>(&seed)->default_value(123456), "Toy MC random seed")
("hintMethod,H", po::value<string>(&whichHintMethod)->default_value(""), "Run first this method to provide a hint on the result")
("hintMethod,H", po::value<string>(&whichHintMethod)->default_value(""), "First run this method to provide a hint on the result")
;
combiner.ioOptions().add_options()
("name,n", po::value<string>(&name)->default_value("Test"), "Name of the job, affects the name of the output tree")
("mass,m", po::value<float>(&iMass)->default_value(120.), "Higgs mass to store in the output tree")
("mass,m", po::value<float>(&iMass)->default_value(120.), "Mass to store in the output tree")
("dataset,D", po::value<string>(&dataset)->default_value("data_obs"), "Name of the dataset for observed limit - use this to replace dataset in workspace for example with a toy dataset. Format as file:workspace:object or file:object")
("dataMapName", po::value<string>(&dataMapName)->default_value("data_obs"), "Name of the dataset for observed limit pattern in the datacard")
("toysFile", po::value<string>(&toysFile)->default_value(""), "Read toy mc or other intermediate results from this file")
Expand Down Expand Up @@ -186,15 +186,15 @@ int main(int argc, char **argv) {
}

if (seed == -1) {
if (verbose > 0) std::cout << ">>> Using OpenSSL to get a really random seed " << std::endl;
if (verbose > 0) std::cout << ">>> Using OpenSSL to get a truly random seed " << std::endl;
FILE *rpipe = popen("openssl rand 8", "r");
if (rpipe == 0) { std::cout << "Error when running 'openssl rand 8'" << std::endl; return 2101; }
if (fread(&seed, sizeof(int), 1, rpipe) != 1) {
std::cout << "Error when reading from 'openssl rand 8'" << std::endl; return 2102;
}
std::cout << ">>> Used OpenSSL to get a really random seed " << seed << std::endl;
std::cout << ">>> Used OpenSSL to get a truly random seed " << seed << std::endl;
} else {
std::cout << ">>> random number generator seed is " << seed << std::endl;
std::cout << ">>> Random number generator seed is " << seed << std::endl;
}
RooRandom::randomGenerator()->SetSeed(seed);

Expand Down Expand Up @@ -232,7 +232,7 @@ int main(int argc, char **argv) {
combiner.applyOptions(vm);
CascadeMinimizer::applyOptions(vm);
} catch (std::exception &ex) {
cerr << "Error when configuring the combiner:\n\t" << ex.what() << std::endl;
cerr << "Error when configuring combine:\n\t" << ex.what() << std::endl;
return 2001;
}

Expand All @@ -243,7 +243,7 @@ int main(int argc, char **argv) {
cerr << "Error when configuring the algorithm " << whichMethod << ":\n\t" << ex.what() << std::endl;
return 2002;
}
cout << ">>> method used is " << whichMethod << endl;
cout << ">>> Method used is " << whichMethod << endl;

if (!whichHintMethod.empty()) {
map<string, LimitAlgo *>::const_iterator it_hint = methods.find(whichHintMethod);
Expand All @@ -255,7 +255,7 @@ int main(int argc, char **argv) {
}
hintAlgo = it_hint->second;
hintAlgo->applyDefaultOptions();
cout << ">>> method used to hint where the upper limit is " << whichHintMethod << endl;
cout << ">>> Method used to hint where the upper limit is " << whichHintMethod << endl;
}

TString fileName = "higgsCombine" + name + "."+whichMethod+"."+massName+toyName+"root";
Expand Down
31 changes: 29 additions & 2 deletions contributing.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,10 @@ The documentation files are all under the `docs/` folder.
Which pages get included in the site, and other configuration details are set in the `mkdocs.yml` file.

In order to check the documentation rendering (features such as latex math rendering, etc) locally, you can generate the site on your local computer and check it in your browser.
To do so, after [installing mkdocs](https://www.mkdocs.org/getting-started/) and [pymdown extensions](https://facelessuser.github.io/pymdown-extensions/installation/) and the [cinder theme](https://sourcefoundry.org/cinder/) via:
To do so, after [installing mkdocs](https://www.mkdocs.org/getting-started/), [pymdown extensions](https://facelessuser.github.io/pymdown-extensions/installation/), the [material theme](https://squidfunk.github.io/mkdocs-material/), and [mike](https://github.com/jimporter/mike) (for versioning) via:

```
python -m pip install mkdocs pymdown-extensions mkdocs-cinder
python -m pip install mkdocs pymdown-extensions mkdocs-cinder mike
```

you can do:
Expand All @@ -97,10 +97,37 @@ mkdocs serve

from the main repository directory. mkdocs will then print a link you can open to check the page generated in your browser.

This will serve the current version of the documentation. For the versioned documentation, where the documentation corresponds to a certain tag of the code, mike is used. Mike performs pushes to the `gh-pages` branch of the repository, and sets up certain commits to be accessible as the tagged version of the docs. For adding a new tagged version to the documentation you need to do:

```
mike deploy <version> <alias>
```

Note that this will only affect your *local* copy of the gh-pages documentation. In order to view the pages locally you can use `mike serve` instead of `mkdocs serve`.

**NOTE:** mkdocs builds that use internal links (or images, etc.) with absolute paths will work for local deployment, but will break when deployed to the public documentations pages.
Please ensure you use relative paths. Currently, this is the only known feature where the behvaiour differs between local mkdocs and public page deployment.
If you'd like to test the deployment directly, the suggested method is to set up a docs page using your personal github account; this should mimic the exact settings of the official page.

## Github Actions

There are several automated processes defined in the [`.github/workflows`](https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit/tree/main/.github/workflows) folder which are triggered for every pull request or when a new commit is pushed into the `main` branch.

- `CI with CVMFS` workflow defined in [.github/workflows/cvmfs-ci.yml](https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit/blob/main/.github/workflows/cvmfs-ci.yml) compiles <span style="font-variant:small-caps;">Combine</span> with the CMSSW which is setup from `cvmfs`. This will also run several test <span style="font-variant:small-caps;">Combine</span> commands using tutorial datacards contained in the repository.
- `CI/CD` workflow defined in [.github/workflows/ci.yml](https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit/blob/main/.github/workflows/ci.yml) runs:

1. [Linting checks](https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit/blob/main/.github/workflows/ci.yml#L11) with`flake8` and `black`.
2. <span style="font-variant:small-caps;">Combine</span> compilation using conda environment with several combinations of (ROOT, python) versions: (6.26.4, 3.10), (6.24,3.9), (6.22, 3.9) and (6.22, 2.7.18)

- `Docs` workflow defined in [.github/workflows/docs.yml](https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit/blob/main/.github/workflows/docs.yml) manages the documentation deployment with `mike` as described above.

In addition there are two conditional workflows that can be triggered with [github labels](https://docs.github.com/en/issues/using-labels-and-milestones-to-track-work/managing-labels):

- `trigger gitlab job` workflow defined in [.github/workflows/gitlabci.yml](https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit/blob/main/.github/workflows/gitlabci.yml) can be triggered with the "safe to test" label. Please note that this gitlab workflow uses `CMSSW_10_2_13`, therefore it might fail with later versions of <span style="font-variant:small-caps;">Combine</span> and it is likely to be deprecated soon.

- `Port to <branch>` workflows defined in [.github/workflows/port_to_branch.yml](https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit/blob/main/.github/workflows/port_to_branch.yml), e.g. the `Port to 112x-comb2022` can be triggered with the "port to 112x-comb2022" label to port the changes from a pull request to the `112x-comb2022` branch and create a new pull request.


## Big Contributions

We welcome large contributions to <span style="font-variant:small-caps;">Combine</span>.
Expand Down
Loading

0 comments on commit b897f05

Please sign in to comment.