Skip to content

Commit

Permalink
update FCC-LLP
Browse files Browse the repository at this point in the history
  • Loading branch information
jalimena committed Apr 18, 2023
2 parents ba9f4c0 + 8addd23 commit abdab42
Show file tree
Hide file tree
Showing 31 changed files with 3,613 additions and 3,062 deletions.
24 changes: 2 additions & 22 deletions .github/workflows/format.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ on: [push, pull_request]
jobs:
format:
runs-on: ubuntu-latest
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v3
with:
Expand All @@ -15,21 +13,6 @@ jobs:
- name: Start container
run: |
docker run -it --name CI_container -v ${GITHUB_WORKSPACE}:/Package -v /cvmfs:/cvmfs:shared -d ghcr.io/aidasoft/centos7:latest /bin/bash
- name: Setup Spack
run: |
docker exec CI_container /bin/bash -c 'mkdir Spack
cd Spack
git clone https://github.com/key4hep/spack
git clone https://github.com/key4hep/key4hep-spack'
- name: Setup LLVM
run: |
docker exec CI_container /bin/bash -c 'cd ./Package
source ../Spack/spack/share/spack/setup-env.sh
source ../Spack/key4hep-spack/environments/key4hep-release-user/setup_clingo_centos7.sh
spack env activate ../Spack/key4hep-spack/environments/key4hep-release-user
spack add llvm
spack concretize -f'
# spack install'
- name: Add upstream
run: |
docker exec CI_container /bin/bash -c 'cd ./Package
Expand All @@ -38,12 +21,9 @@ jobs:
- name: Run formatter
run: |
docker exec CI_container /bin/bash -c 'cd ./Package
source ../Spack/spack/share/spack/setup-env.sh
source ../Spack/key4hep-spack/environments/key4hep-release-user/setup_clingo_centos7.sh
spack env activate ../Spack/key4hep-spack/environments/key4hep-release-user
spack load llvm
source /cvmfs/sft.cern.ch/lcg/contrib/clang/14.0.6/x86_64-centos7/setup.sh
git clang-format --style=file $(git merge-base upstream/master HEAD)'
- name: Check cleanliness
run: |
docker exec CI_container /bin/bash -c 'cd ./Package
git diff --exit-code'
git diff'
2 changes: 1 addition & 1 deletion .zenodo.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"creators": [
{
"affiliation": "CERN",
"affiliation": "Ecole Polytechnique Fédérale de Lausanne",
"name": "Helsens, Clement",
"orcid": "0000-0002-9243-7554"
},
Expand Down
15 changes: 14 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ include(CTest)
#--- options ------------------------------------------------------------------


set(WITH_ACTS AUTO CACHE STRING "Build analyzers that need Acts")
set(WITH_ACTS OFF CACHE STRING "Build analyzers that need Acts")
set_property(CACHE WITH_ACTS PROPERTY STRINGS AUTO ON OFF)

set(WITH_DD4HEP AUTO CACHE STRING "Build analyzers that need DD4hep")
Expand Down Expand Up @@ -62,6 +62,8 @@ find_package(podio)
set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake ${CMAKE_MODULE_PATH})
find_package(FastJet)

find_package( Delphes REQUIRED )


if(WITH_ACTS)
find_package( Acts COMPONENTS Core )
Expand All @@ -88,6 +90,17 @@ if(WITH_DD4HEP)
endif()
endif()

if(WITH_ONNX)
find_package(ONNXRuntime)
if(ONNXRuntime_FOUND)

elseif(WITH_ONNX STREQUAL AUTO)
message(WARNING "ONNXRuntime not found. Skipping ONNX-dependent analyzers.")
set(WITH_ONNX OFF)
else()
message(FATAL_ERROR "Failed to locate ONNXRuntime!")
endif()
endif()

if(WITH_ONNX AND BUILD_TESTING) # currently these files are only needed by ONNX-parts
# Grab the test files into a cached directory
Expand Down
44 changes: 39 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,18 @@ ROOT dataframe documentation is available

## Getting started

In order to use the FCC analysers within ROOT dataframe, a dictionary needs to
be built and put into `LD_LIBRARY_PATH` (this happens in `setup.sh`). The
following needs to be done when running local code and for developers.
In order to use the FCC analyzers within ROOT RDataFrame, a dictionary needs to
be built and put into `LD_LIBRARY_PATH`. In order to build and load FCCAnalyses
with default options one needs to run following two commands:

```shell
source ./setup.sh
fccanalysis build
```

The FCCAnalyses is a CMake based project and any customizations can be provided
in classic CMake style, the following commands are equivalent to default version
of FCCAnalyses:

```shell
source ./setup.sh
Expand All @@ -65,6 +74,21 @@ cd ..
> Each time changes are made in the C++ code, for example in
> `analyzers/dataframe/` please do not forget to re-compile :)
>
> To cleanly recompile the default version of FCCAnalyses one can use
> `fccanalysis build --clean-build`.
In order to provide the possibility to keep developing an analysis with well
defined Key4hep stack, the sub-command `fccanalysis pin` is provided. One can
pin his/her analysis with
```
source setup.sh
fccanalysis pin
```

To remove the pin run
```
fccanalysis pin --clear
```


## Generalities
Expand Down Expand Up @@ -206,6 +230,16 @@ In an attempt to ease the development of new physics case studies, such as for t
The preferred style of the C++ code in the FCCAnalyses is LLVM which is checked
by CI job.

Currently `clang-format` is not available in the Key4hep stack, but one way to
obtain a recent version of it is through downloading of the
Currently `clang-format` is not available in the Key4hep stack, but one can
obtain a suitable version of it from CVMFS thanks to LCG:
```
source /cvmfs/sft.cern.ch/lcg/contrib/clang/14.0.6/x86_64-centos7/setup.sh
```

Then to apply formatting to a given file:
```
clang-format -i -style=file /path/to/file.cpp
```

Another way to obtain a recent version of `clang-format` is through downloading
[Key4hep Spack instance](https://key4hep.github.io/key4hep-doc/spack-build-instructions-for-librarians/spack-setup.html#downloading-a-spack-instance).
180 changes: 90 additions & 90 deletions addons/FastJet/python/jetClusteringHelper.py
Original file line number Diff line number Diff line change
@@ -1,90 +1,90 @@
import json
import ROOT


class ExclusiveJetClusteringHelper:
def __init__(self, coll, njets, tag=""):

self.input_coll = coll
self.njets = njets

self.tag = tag
if tag != "":
self.tag = "_{}".format(tag)

part_px = "part{}_px".format(self.tag)
part_py = "part{}_py".format(self.tag)
part_pz = "part{}_pz".format(self.tag)
part_e = "part{}_e".format(self.tag)
part_m = "part{}_m".format(self.tag)
part_q = "part{}_q".format(self.tag)

pjetc = "pjetc{}".format(self.tag)

_jet = "_jet{}".format(self.tag)
jet = "jet{}".format(self.tag)
_jetc = "_jetc{}".format(self.tag)
jetc = "jetc{}".format(self.tag)

# compute jet observables

observables = ["p", "e", "mass", "phi", "theta", "nconst"]

self.jet_obs = dict()
for obs in observables:
self.jet_obs[obs] = "jet_{}{}".format(obs, self.tag)
event_njet = "event_njet{}".format(self.tag)

self.jets = jet
self.constituents = jetc

self.definition = dict()

# get single particle properties
self.definition[part_px] = "ReconstructedParticle::get_px({})".format(self.input_coll)
self.definition[part_py] = "ReconstructedParticle::get_py({})".format(self.input_coll)
self.definition[part_pz] = "ReconstructedParticle::get_pz({})".format(self.input_coll)
self.definition[part_e] = "ReconstructedParticle::get_e({})".format(self.input_coll)
self.definition[part_m] = "ReconstructedParticle::get_mass({})".format(self.input_coll)
self.definition[part_q] = "ReconstructedParticle::get_charge({})".format(self.input_coll)

# form fastjet pseudo jets
self.definition[pjetc] = "JetClusteringUtils::set_pseudoJets({}, {}, {}, {})".format(
part_px, part_py, part_pz, part_e
)

# run jet clustering with all reconstructed particles. ee_kt_algorithm, R=1.5, inclusive clustering, E-scheme
self.definition[_jet] = "JetClustering::clustering_ee_kt(2, {}, 1, 0)({})".format(njets, pjetc)

# get the jets out of the struct
self.definition[jet] = "JetClusteringUtils::get_pseudoJets({})".format(_jet)

# get the jets constituents out of the struct
self.definition[_jetc] = "JetClusteringUtils::get_constituents({})".format(_jet)

# get constituents
self.definition[jetc] = "JetConstituentsUtils::build_constituents_cluster({}, {})".format(
self.input_coll, _jetc
)

# compute jet observables
self.definition[self.jet_obs["p"]] = "JetClusteringUtils::get_p({})".format(self.jets)
self.definition[self.jet_obs["e"]] = "JetClusteringUtils::get_e({})".format(self.jets)
self.definition[self.jet_obs["mass"]] = "JetClusteringUtils::get_m({})".format(self.jets)
self.definition[self.jet_obs["phi"]] = "JetClusteringUtils::get_phi({})".format(self.jets)
self.definition[self.jet_obs["theta"]] = "JetClusteringUtils::get_theta({})".format(self.jets)
self.definition[self.jet_obs["nconst"]] = "JetConstituentsUtils::count_consts({})".format(self.constituents)
self.definition[event_njet] = "JetConstituentsUtils::count_jets({})".format(self.constituents)

def define(self, df):

for var, call in self.definition.items():
df = df.Define(var, call)

return df

def outputBranches(self):

out = list(self.jet_obs.values())
out += [obs for obs in self.definition.keys() if "event_" in obs]
return out
import json
import ROOT


class ExclusiveJetClusteringHelper:
def __init__(self, coll, njets, tag=""):

self.input_coll = coll
self.njets = njets

self.tag = tag
if tag != "":
self.tag = "_{}".format(tag)

part_px = "part{}_px".format(self.tag)
part_py = "part{}_py".format(self.tag)
part_pz = "part{}_pz".format(self.tag)
part_e = "part{}_e".format(self.tag)
part_m = "part{}_m".format(self.tag)
part_q = "part{}_q".format(self.tag)

pjetc = "pjetc{}".format(self.tag)

_jet = "_jet{}".format(self.tag)
jet = "jet{}".format(self.tag)
_jetc = "_jetc{}".format(self.tag)
jetc = "jetc{}".format(self.tag)

# compute jet observables

observables = ["p", "e", "mass", "phi", "theta", "nconst"]

self.jet_obs = dict()
for obs in observables:
self.jet_obs[obs] = "jet_{}{}".format(obs, self.tag)
event_njet = "event_njet{}".format(self.tag)

self.jets = jet
self.constituents = jetc

self.definition = dict()

# get single particle properties
self.definition[part_px] = "ReconstructedParticle::get_px({})".format(self.input_coll)
self.definition[part_py] = "ReconstructedParticle::get_py({})".format(self.input_coll)
self.definition[part_pz] = "ReconstructedParticle::get_pz({})".format(self.input_coll)
self.definition[part_e] = "ReconstructedParticle::get_e({})".format(self.input_coll)
self.definition[part_m] = "ReconstructedParticle::get_mass({})".format(self.input_coll)
self.definition[part_q] = "ReconstructedParticle::get_charge({})".format(self.input_coll)

# form fastjet pseudo jets
self.definition[pjetc] = "JetClusteringUtils::set_pseudoJets({}, {}, {}, {})".format(
part_px, part_py, part_pz, part_e
)

# run jet clustering with all reconstructed particles. ee_kt_algorithm, R=1.5, inclusive clustering, E-scheme
self.definition[_jet] = "JetClustering::clustering_ee_kt(2, {}, 1, 0)({})".format(njets, pjetc)

# get the jets out of the struct
self.definition[jet] = "JetClusteringUtils::get_pseudoJets({})".format(_jet)

# get the jets constituents out of the struct
self.definition[_jetc] = "JetClusteringUtils::get_constituents({})".format(_jet)

# get constituents
self.definition[jetc] = "JetConstituentsUtils::build_constituents_cluster({}, {})".format(
self.input_coll, _jetc
)

# compute jet observables
self.definition[self.jet_obs["p"]] = "JetClusteringUtils::get_p({})".format(self.jets)
self.definition[self.jet_obs["e"]] = "JetClusteringUtils::get_e({})".format(self.jets)
self.definition[self.jet_obs["mass"]] = "JetClusteringUtils::get_m({})".format(self.jets)
self.definition[self.jet_obs["phi"]] = "JetClusteringUtils::get_phi({})".format(self.jets)
self.definition[self.jet_obs["theta"]] = "JetClusteringUtils::get_theta({})".format(self.jets)
self.definition[self.jet_obs["nconst"]] = "JetConstituentsUtils::count_consts({})".format(self.constituents)
self.definition[event_njet] = "JetConstituentsUtils::count_jets({})".format(self.constituents)

def define(self, df):

for var, call in self.definition.items():
df = df.Define(var, call)

return df

def outputBranches(self):

out = list(self.jet_obs.values())
out += [obs for obs in self.definition.keys() if "event_" in obs]
return out
21 changes: 18 additions & 3 deletions analyzers/dataframe/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,19 @@ find_package(Vdt)

message(STATUS "includes-------------------------- dataframe edm4hep: ${EDM4HEP_INCLUDE_DIRS}")
message(STATUS "includes-------------------------- dataframe podio : ${podio_INCLUDE_DIR}")
message(STATUS "includes-------------------------- dataframe delphes: ${DELPHES_INCLUDE_DIR}")
message(STATUS "includes-------------------------- dataframe delphes EXt TrkCov: ${DELPHES_EXTERNALS_TKCOV_INCLUDE_DIR}")
message(STATUS "includes-------------------------- dataframe delphes EXt: ${DELPHES_EXTERNALS_INCLUDE_DIR}")

include_directories(${DELPHES_INCLUDE_DIR}
${DELPHES_EXTERNALS_INCLUDE_DIR}
${DELPHES_EXTERNALS_TKCOV_INCLUDE_DIR}
)


file(GLOB sources src/*.cc)
file(GLOB headers RELATIVE ${CMAKE_CURRENT_LIST_DIR} FCCAnalyses/*.h)

message(STATUS "includes headers ${headers}")
message(STATUS "includes sources ${sources}")

list(FILTER headers EXCLUDE REGEX "LinkDef.h")
if(NOT WITH_DD4HEP)
list(FILTER headers EXCLUDE REGEX "CaloNtupleizer.h")
Expand All @@ -22,6 +27,8 @@ endif()
if(NOT WITH_ONNX)
list(FILTER headers EXCLUDE REGEX "JetFlavourUtils.h")
list(FILTER sources EXCLUDE REGEX "JetFlavourUtils.cc")
list(FILTER headers EXCLUDE REGEX "WeaverUtils.h")
list(FILTER sources EXCLUDE REGEX "WeaverUtils.cc")
endif()

if(NOT WITH_ACTS)
Expand All @@ -31,6 +38,8 @@ if(NOT WITH_ACTS)
list(FILTER sources EXCLUDE REGEX "VertexFinderActs.cc")
endif()

message(STATUS "includes headers ${headers}")
message(STATUS "includes sources ${sources}")

message(STATUS "CMAKE_CURRENT_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}")
message(STATUS "CMAKE_INSTALL_INCLUDEDIR ${CMAKE_INSTALL_INCLUDEDIR}")
Expand All @@ -41,8 +50,13 @@ target_include_directories(FCCAnalyses PUBLIC
$<BUILD_INTERFACE:${CMAKE_SOURCE_DIR}/addons>
$<INSTALL_INTERFACE:include>
${VDT_INCLUDE_DIR}
${DELPHES_INCLUDE_DIR}
${DELPHES_EXTERNALS_INCLUDE_DIR}
${DELPHES_EXTERNALS_TKCOV_INCLUDE_DIR}
)

message(STATUS " ====== DELPHES LIBRARY = " ${DELPHES_LIBRARY} )
message(STATUS " ====== DELPHES_EXTERNALS_TKCOV_INCLUDE_DIR = " ${DELPHES_EXTERNALS_TKCOV_INCLUDE_DIR} )


target_link_libraries(FCCAnalyses
Expand All @@ -53,6 +67,7 @@ target_link_libraries(FCCAnalyses
EDM4HEP::edm4hep
EDM4HEP::edm4hepDict
podio::podio
${DELPHES_LIBRARY}
${ADDONS_LIBRARIES}
gfortran # todo: why necessary?
)
Expand Down
Loading

0 comments on commit abdab42

Please sign in to comment.