diff --git a/HEPML.bib b/HEPML.bib index 422d685..c11437b 100644 --- a/HEPML.bib +++ b/HEPML.bib @@ -1,4 +1,486 @@ # HEPML Papers +% October 31, 2023 +@inproceedings{Matchev:2023mii, + author = "Matchev, Konstantin T. and Matcheva, Katia and Ramond, Pierre and Verner, Sarunas", + title = "{Seeking Truth and Beauty in Flavor Physics with Machine Learning}", + booktitle = "{37th Conference on Neural Information Processing Systems}", + eprint = "2311.00087", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +% October 30, 2023 +@article{Choi:2023rqg, + author = "Choi, Eugene and Seong, Rak-Kyeong", + title = "{Machine Learning Regularization for the Minimum Volume Formula of Toric Calabi-Yau 3-folds}", + eprint = "2310.19276", + archivePrefix = "arXiv", + primaryClass = "hep-th", + reportNumber = "UNIST-MTH-23-RS-05", + month = "10", + year = "2023" +} + +@article{Halverson:2023ndu, + author = "Halverson, James and Ruehle, Fabian", + title = "{Metric Flows with Neural Networks}", + eprint = "2310.19870", + archivePrefix = "arXiv", + primaryClass = "hep-th", + month = "10", + year = "2023" +} + +% October 26, 2023 +@article{Wang:2023pqx, + author = "Wang, Daohan and Cho, Jin-Hwan and Kim, Jinheung and Lee, Soojin and Sanyal, Prasenjit and Song, Jeonghyeon", + title = "{Probing Light Fermiophobic Higgs Boson via diphoton jets at the HL-LHC}", + eprint = "2310.17741", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +% October 24, 2023 +@article{Devlin:2023jzp, + author = "Devlin, Peter and Qiu, Jian-Wei and Ringer, Felix and Sato, Nobuo", + title = "{Diffusion model approach to simulating electron-proton scattering events}", + eprint = "2310.16308", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "JLAB-THY-23-3945", + month = "10", + year = "2023" +} + +@inproceedings{Bogatskiy:2023fug, + author = "Bogatskiy, Alexander and Hoffman, Timothy and Offermann, Jan T.", + title = "{19 Parameters Is All You Need: Tiny Neural Networks for Particle Physics}", + booktitle = "{37th Conference on Neural Information Processing Systems}", + eprint = "2310.16121", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +@article{Alawadhi:2023gxa, + author = "Alawadhi, R. and Angella, D. and Leonardo, A. and Gherardini, T. Schettini", + title = "{Constructing and Machine Learning Calabi-Yau Five-folds}", + eprint = "2310.15966", + archivePrefix = "arXiv", + primaryClass = "hep-th", + month = "10", + year = "2023" +} + +% October 23, 2023 +@article{Grefsrud:2023dad, + author = "Grefsrud, Aurora Singstad and Buanes, Trygve and Koutroulis, Fotis and Lipniacka, Anna and Maselek, Rafal and Papaefstathiou, Andreas and Sakurai, Kazuki and Sjursen, Therese B. and Slazyk, Igor", + title = "{Machine Learning Classification of Sphalerons and Black Holes at the LHC}", + eprint = "2310.15227", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +% October 19, 2023 +@article{Freytsis:2023cjr, + author = "Freytsis, Marat and Perelstein, Maxim and San, Yik Chuen", + title = "{Anomaly Detection in Presence of Irrelevant Features}", + eprint = "2310.13057", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +@article{Smith:2023ssh, + author = "Smith, Rachel E. C. and Ochoa, In\^es and In\'acio, R\'uben and Shoemaker, Jonathan and Kagan, Michael", + title = "{Differentiable Vertex Fitting for Jet Flavour Tagging}", + eprint = "2310.12804", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "10", + year = "2023" +} + +@inproceedings{Tomiya:2023jdy, + author = "Tomiya, Akio and Nagai, Yuki", + title = "{Equivariant Transformer is all you need}", + booktitle = "{40th International Symposium on Lattice Field Theory}", + eprint = "2310.13222", + archivePrefix = "arXiv", + primaryClass = "hep-lat", + month = "10", + year = "2023" +} + +% October 18, 2023 +@article{BOREXINO:2023pcv, + author = "The BOREXINO Collaboration", + title = "{Novel techniques for alpha/beta pulse shape discrimination in Borexino}", + eprint = "2310.11826", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "10", + year = "2023" +} + +% October 16, 2023 +@inproceedings{Baron:2023yhw, + author = "Baro\v{n}, Petr and Kvita, Ji\v{r}\'\i{} and P\v{r}\'\i{}vara, Radek and Tome\v{c}ek, Jan and Vod\'ak, Rostislav", + title = "{Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes}", + booktitle = "{16th International Workshop on Top Quark Physics}", + eprint = "2310.13009", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "10", + year = "2023" +} + +% October 14, 2023 +@article{Song:2023ceh, + author = "Song, Siyuan and Chen, Jiyuan and Liu, Jianbei and Liu, Yong and Qi, Baohua and Shi, Yukun and Wang, Jiaxuan and Wang, Zhen and Yang, Haijun", + title = "{Study of residual artificial neural network for particle identification in the CEPC high-granularity calorimeter prototype}", + eprint = "2310.09489", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "10", + year = "2023" +} + +% October 13, 2023 +@article{Gavranovic:2023oam, + author = "Gavranovi\v{c}, Jan and Ker\v{s}evan, Borut Paul", + title = "{Systematic Evaluation of Generative Machine Learning Capability to Simulate Distributions of Observables at the Large Hadron Collider}", + eprint = "2310.08994", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +% October 12, 2023 +@article{Alvestad:2023jgl, + author = "Alvestad, Daniel and Rothkopf, Alexander and Sexty, D\'enes", + title = "{Lattice real-time simulations with learned optimal kernels}", + eprint = "2310.08053", + archivePrefix = "arXiv", + primaryClass = "hep-lat", + month = "10", + year = "2023" +} + +% October 11, 2023 +@inproceedings{Schroff:2023see, + author = "Schroff, Jaffae and Ju, Xiangyang", + title = "{Event Generator Tuning Incorporating Systematic Uncertainty}", + booktitle = "{26th International Conference on Computing in High Energy \& Nuclear Physics}", + eprint = "2310.07566", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +@inproceedings{Pham:2023bnl, + author = "Pham, Tuan Minh and Ju, Xiangyang", + title = "{Simulation of Hadronic Interactions with Deep Generative Models}", + booktitle = "{26th International Conference on Computing in High Energy \& Nuclear Physics}", + eprint = "2310.07553", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +@article{Heimel:2023mvw, + author = "Heimel, Theo and Huetsch, Nathan and Winterhalder, Ramon and Plehn, Tilman and Butter, Anja", + title = "{Precision-Machine Learning for the Matrix Element Method}", + eprint = "2310.07752", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "IRMP-CP3-23-55", + month = "10", + year = "2023" +} + +% October 10, 2023 +@article{Buhmann:2023acn, + author = "Buhmann, Erik and Ewen, Cedric and Kasieczka, Gregor and Mikuni, Vinicius and Nachman, Benjamin and Shih, David", + title = "{Full Phase Space Resonant Anomaly Detection}", + eprint = "2310.06897", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "10", + year = "2023" +} + +% October 9, 2023 +@article{Yang:2023rbg, + author = "Yang, Zekun and others", + title = "{First attempt of directionality reconstruction for atmospheric neutrinos in a large homogeneous liquid scintillator detector}", + eprint = "2310.06281", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "10", + year = "2023" +} + +% October 5, 2023 +@inproceedings{Albandea:2023ais, + author = "Albandea, David and Del Debbio, Luigi and Hern\'andez, Pilar and Kenway, Richard and Rossney, Joe Marsh and Ramos, Alberto", + title = "{Learning Trivializing Flows in a $\phi^4$ theory from coarser lattices}", + booktitle = "{40th International Symposium on Lattice Field Theory}", + eprint = "2310.03381", + archivePrefix = "arXiv", + primaryClass = "hep-lat", + reportNumber = "IFIC/23-44", + month = "10", + year = "2023" +} + +% October 3, 2023 +@article{Yoo:2023lxy, + author = "Yoo, Jieun and others", + title = "{Smart pixel sensors: towards on-sensor filtering of pixel clusters with deep learning}", + eprint = "2310.02474", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + reportNumber = "FERMILAB-PUB-23-577-PPD", + month = "10", + year = "2023" +} + +% October 2, 2023 +@article{Acosta:2023nuw, + author = "Acosta, Fernando Torales and Karki, Bishnu and Karande, Piyush and Angerami, Aaron and Arratia, Miguel and Barish, Kenneth and Milton, Ryan and Mor\'an, Sebasti\'an and Nachman, Benjamin and Sinha, Anshuman", + title = "{The Optimal use of Segmentation for Sampling Calorimeters}", + eprint = "2310.04442", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "10", + year = "2023" +} + +@article{Lay:2023boz, + author = "Lay, Daniel and Flynn, Eric and Giuliani, Samuel A. and Nazarewicz, Witold and Neufcourt, Le\'o", + title = "{Neural Network Emulation of Spontaneous Fission}", + eprint = "2310.01608", + archivePrefix = "arXiv", + primaryClass = "nucl-th", + month = "10", + year = "2023" +} + +% September 30, 2023 +@article{Aehle:2023wwi, + author = "Aehle, Max and others", + title = "{Progress in End-to-End Optimization of Detectors for Fundamental Physics with Differentiable Programming}", + eprint = "2310.05673", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + reportNumber = "FERMILAB-PUB-23-608-CSAID-PPD", + month = "9", + year = "2023" +} + +% September 29, 2023 +@article{Konar:2023ptv, + author = "Konar, Partha and Ngairangbam, Vishal S. and Spannowsky, Michael", + title = "{Hypergraphs in LHC Phenomenology -- The Next Frontier of IRC-Safe Feature Extraction}", + eprint = "2309.17351", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "IPPP/23/53", + month = "9", + year = "2023" +} + +@article{Buhmann:2023zgc, + author = "Buhmann, Erik and Ewen, Cedric and Faroughy, Darius A. and Golling, Tobias and Kasieczka, Gregor and Leigh, Matthew and Qu\'etant, Guillaume and Raine, John Andrew and Sengupta, Debajyoti and Shih, David", + title = "{EPiC-ly Fast Particle Cloud Generation with Flow-Matching and Diffusion}", + eprint = "2310.00049", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "9", + year = "2023" +} + +% September 27, 2023 +@article{Bright-Thonney:2023sqf, + author = "Bright-Thonney, Samuel and Harris, Philip and McCormack, Patrick and Rothman, Simon", + title = "{Chained Quantile Morphing with Normalizing Flows}", + eprint = "2309.15912", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "9", + year = "2023" +} + +% September 25, 2023 +@inproceedings{Mieskolainen:2023hkz, + author = "Mieskolainen, Mikael", + title = "{HyperTrack: Neural Combinatorics for High Energy Physics}", + booktitle = "{26th International Conference on Computing in High Energy \& Nuclear Physics}", + eprint = "2309.14113", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "9", + year = "2023" +} + +@article{Larkoski:2023xam, + author = "Larkoski, Andrew J.", + title = "{Binary Discrimination Through Next-to-Leading Order}", + eprint = "2309.14417", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "9", + year = "2023" +} + +% September 22, 2023 +@article{Bickendorf:2023nej, + author = "Bickendorf, Gerrit and Drees, Manuel and Kasieczka, Gregor and Krause, Claudius and Shih, David", + title = "{Combining Resonant and Tail-based Anomaly Detection}", + eprint = "2309.12918", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "9", + year = "2023" +} + +@article{Finke:2023ltw, + author = {Finke, Thorben and Hein, Marie and Kasieczka, Gregor and Kr\"amer, Michael and M\"uck, Alexander and Prangchaikul, Parada and Quadfasel, Tobias and Shih, David and Sommerhalder, Manuel}, + title = "{Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection}", + eprint = "2309.13111", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "TTK-23-26", + month = "9", + year = "2023" +} + +@inproceedings{Bein:2023ylt, + author = "Bein, Samuel and Connor, Patrick and Pedro, Kevin and Schleper, Peter and Wolf, Moritz", + collaboration = "CMS", + title = "{Refining fast simulation using machine learning}", + booktitle = "{26th International Conference on Computing in High Energy \& Nuclear Physics}", + eprint = "2309.12919", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + reportNumber = "CMS CR-2023/128, FERMILAB-CONF-23-537-CMS-CSAID-PPD", + month = "9", + year = "2023" +} + +% September 21, 2023 +@article{Tung:2023lkv, + author = "Tung, Y. -C. and others", + title = "{Suppression of Neutron Background using Deep Neural Network and Fourier Frequency Analysis at the KOTO Experiment}", + eprint = "2309.12063", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "9", + year = "2023" +} + +@article{Akar:2023puf, + author = "Akar, Simon and Elashri, Mohamed and Garg, Rocky Bala and Kauffman, Elliott and Peters, Michael and Schreiner, Henry and Sokoloff, Michael and Tepe, William and Tompkins, Lauren", + title = "{Advances in developing deep neural networks for finding primary vertices in proton-proton collisions at the LHC}", + eprint = "2309.12417", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "9", + year = "2023" +} + +% September 20, 2023 +@article{Belfkir:2023vpo, + author = "Belfkir, Mohamed and Jueid, Adil and Nasri, Salah", + title = "{Boosting dark matter searches at muon colliders with Machine Learning: the mono-Higgs channel as a case study}", + eprint = "2309.11241", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "CTPU-PTC-23-37", + month = "9", + year = "2023" +} + +@article{Guo:2023mhf, + author = "Guo, Ling-Jun and Xiong, Jia-Ying and Ma, Yao and Ma, Yong-Liang", + title = "{Insights into neutron star equation of state by machine learning}", + eprint = "2309.11227", + archivePrefix = "arXiv", + primaryClass = "nucl-th", + month = "9", + year = "2023" +} + +% September 19, 2023 +@article{Wojcik:2023usm, + author = "Wojcik, George N.", + title = "{BFBrain: Scalar Bounded-From-Below Conditions from Bayesian Active Learning}", + eprint = "2309.10959", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "9", + year = "2023" +} + +% September 18, 2023 +@article{Reyes-Gonzalez:2023oei, + author = "Reyes-Gonzalez, Humberto and Torre, Riccardo", + title = "{The NFLikelihood: an unsupervised DNNLikelihood from Normalizing Flows}", + eprint = "2309.09743", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "9", + year = "2023" +} + +@article{Dutta:2023jbz, + author = "Dutta, Bhaskar and Ghosh, Tathagata and Horne, Alyssa and Kumar, Jason and Palmer, Sean and Sandick, Pearl and Snedeker, Marcus and Stengel, Patrick and Walker, Joel W.", + title = "{Applying Machine Learning Techniques to Searches for Lepton-Partner Pair-Production with Intermediate Mass Gaps at the Large Hadron Collider}", + eprint = "2309.10197", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "MI-HET-810, HRI-RECAPP-2023-08, UH511-1330-2023, CETUP-2023-007", + month = "9", + year = "2023" +} + +@article{CMSECAL:2023fvz, + author = "Abadjiev, D. and others", + collaboration = "CMS ECAL", + title = "{Autoencoder-based Anomaly Detection System for Online Data Quality Monitoring of the CMS Electromagnetic Calorimeter}", + eprint = "2309.10157", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "9", + year = "2023" +} + +% September 15, 2023 +@article{Kaidisch:2023lwp, + author = "Kaidisch, Siegfried and Hilger, Thomas U. and Krassnigg, Andreas and Lucha, Wolfgang", + title = "{Pole-fitting for complex functions: Enhancing standard techniques by artificial-neural-network classifiers and regressors}", + eprint = "2309.08358", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + doi = "10.1016/j.cpc.2023.108998", + journal = "Comput. Phys. Commun.", + volume = "295", + pages = "108998", + year = "2024" +} + % September 15, 2023 @article{Ermann:2023unw, @@ -42,6 +524,18 @@ @article{Golling:2023mqx year = "2023" } +@inproceedings{Karwowska:2023dhl, + author = "Karwowska, Maja and Jakubowska, Monika and Graczykowski, \L{}ukasz and Deja, Kamil and Kasak, Mi\l{}osz", + collaboration = "ALICE", + title = "{Particle identification with machine learning in ALICE Run 3}", + booktitle = "{26th International Conference on Computing in High Energy \& Nuclear Physics}", + eprint = "2309.07768", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + month = "9", + year = "2023" +} + % September 13, 2023 @article{Buhmann:2023kdg, author = {Buhmann, Erik and Gaede, Frank and Kasieczka, Gregor and Korol, Anatolii and Korcari, William and Kr\"uger, Katja and McKeown, Peter}, @@ -698,6 +1192,20 @@ @article{Mandal:2023mck year = "2023" } +% July 01, 2023 +@article{XENONCollaboration:2023dar, + author = "{XENON Collaboration}", + title = "{Detector signal characterization with a Bayesian network in XENONnT}", + eprint = "2304.05428", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + doi = "10.1103/PhysRevD.108.012016", + journal = "Phys. Rev. D", + volume = "108", + number = "1", + pages = "012016", + year = "2023" +} % June 29, 2023 @article{Witkowski:2023htt, @@ -1213,6 +1721,16 @@ @article{Singh:2023yvj year = "2023" } +@article{Das:2023ktd, + author = "Das, Ranit and Favaro, Luigi and Heimel, Theo and Krause, Claudius and Plehn, Tilman and Shih, David", + title = "{How to Understand Limitations of Generative Networks}", + eprint = "2305.16774", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + %May 25, 2023 @article{Vaslin:2023lig, author = "Vaslin, Louis and Barra, Vincent and Donini, Julien", @@ -5244,6 +5762,21 @@ @article{Ghosh:2022zdz year = "2022" } +% March 22, 2022 +@article{MODE:2022znx, + author = "Dorigo, Tommaso and others", + collaboration = "MODE", + title = "{Toward the end-to-end optimization of particle physics instruments with differentiable programming}", + eprint = "2203.13818", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + doi = "10.1016/j.revip.2023.100085", + journal = "Rev. Phys.", + volume = "10", + pages = "100085", + year = "2023" +} + %March 17, 2022 @article{Raine:2022hht, author = "Raine, John Andrew and Klein, Samuel and Sengupta, Debajyoti and Golling, Tobias", @@ -10282,6 +10815,22 @@ @article{Kieseler:2020wcq } % Added many on before April 20, 2020 + +@article{Coccaro:2019lgs, + author = "Coccaro, Andrea and Pierini, Maurizio and Silvestrini, Luca and Torre, Riccardo", + title = "{The DNNLikelihood: enhancing likelihood distribution with Deep Learning}", + eprint = "1911.03305", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "CERN-TH-2019-187", + doi = "10.1140/epjc/s10052-020-8230-1", + journal = "Eur. Phys. J. C", + volume = "80", + number = "7", + pages = "664", + year = "2020" +} + @article{Zlokapa:2019lvv, author = "Zlokapa, Alexander and Mott, Alex and Job, Joshua and Vlimant, Jean-Roch and Lidar, Daniel and Spiropulu, Maria", title = "{Quantum adiabatic machine learning with zooming}", @@ -10728,6 +11277,15 @@ @article{Erbin:2018csv reportNumber = "LMU-ASC 58/18", SLACcitation = "%%CITATION = ARXIV:1809.02612;%%" } +@article{Mieskolainen:2018fhf, + author = "Mieskolainen, Mikael", + title = "{DeepEfficiency - optimal efficiency inversion in higher dimensions at the LHC}", + eprint = "1809.06101", + archivePrefix = "arXiv", + primaryClass = "physics.data-an", + month = "9", + year = "2018" +} @article{Erdmann:2018jxd, author = "Erdmann, Martin and Glombitza, Jonas and Quast, Thorben", title = "{Precise simulation of electromagnetic calorimeter showers using a Wasserstein Generative Adversarial Network}", @@ -11030,6 +11588,19 @@ @article{DAgnolo:2019vbw primaryClass = "hep-ph", SLACcitation = "%%CITATION = ARXIV:1912.12155;%%" } +@article{CMS:2019dqq, + author = "CMS Collaboration", + title = "{A deep neural network to search for new long-lived particles decaying to jets}", + eprint = "1912.12238", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + reportNumber = "CMS-EXO-19-011, CERN-EP-2019-281", + doi = "10.1088/2632-2153/ab9023", + journal = "Mach. Learn. Sci. Tech.", + volume = "1", + pages = "035012", + year = "2020" +} @article{Farina:2018fyg, author = "Farina, Marco and Nakai, Yuichiro and Shih, David", title = "{Searching for New Physics with Deep Autoencoders}", @@ -12660,7 +13231,7 @@ @article{Paganini:2017hrr % April 24, 2017 @article{Caron:2017hku, author = {Caron, Sascha and Kim, Jong Soo and Rolbiecki, Krzysztof and de Austri, Roberto Ruiz and Stienen, Bob}, - doi = {10.1140/epjc/s10052-017-4814-9}, + doi = "10.1140/epjc/s10052-017-4814-9", issn = {1434-6052}, journal = {The European Physical Journal C}, number = {4}, @@ -12669,7 +13240,11 @@ @article{Caron:2017hku url = {http://dx.doi.org/10.1140/epjc/s10052-017-4814-9}, volume = {77}, year = {2017}, - bdsk-url-1 = {http://dx.doi.org/10.1140/epjc/s10052-017-4814-9} + bdsk-url-1 = {http://dx.doi.org/10.1140/epjc/s10052-017-4814-9}, + eprint = "1605.02797", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + } % March 10, 2017 diff --git a/HEPML.tex b/HEPML.tex index ae56f43..253e009 100644 --- a/HEPML.tex +++ b/HEPML.tex @@ -61,56 +61,56 @@ \\\textit{A grayscale image is a regular grid with a scalar value at each grid point. `Color' images have a fixed-length vector at each grid point. Many detectors are analogous to digital cameras and thus images are a natural representation. In other cases, images can be created by discretizing. Convolutional neural networks are natural tools for processing image data. One downside of the image representation is that high energy physics data tend to be sparse, unlike natural images.} \item \textbf{Sequences}~\cite{Guest:2016iqz,Nguyen:2018ugw,Bols:2020bkb,goto2021development,deLima:2021fwm,ATL-PHYS-PUB-2017-003} \\\textit{Data that have a variable with a particular order may be represented as a sequence. Recurrent neural networks are natural tools for processing sequence data. } - \item \textbf{Trees}~\cite{Louppe:2017ipp,Cheng:2017rdo,Jercic:2021bfc} + \item \textbf{Trees}~\cite{Louppe:2017ipp,Cheng:2017rdo,Jercic:2021bfc,Dutta:2023jbz,Belfkir:2023vpo,Finke:2023ltw} \\\textit{Recursive neural networks are natural tools for processing data in a tree structure.} - \item \textbf{Graphs}~\cite{Henrion:DLPS2017,Ju:2020xty,Abdughani:2018wrw,Martinez:2018fwc,Ren:2019xhp,Moreno:2019bmu,Qasim:2019otl,Chakraborty:2019imr,DiBello:2020bas,Chakraborty:2020yfc,1797439,1801423,1808887,Iiyama:2020wap,1811770,Choma:2020cry,alonsomonsalve2020graph,guo2020boosted,Heintz:2020soy,Verma:2020gnq,Dreyer:2020brq,Qian:2021vnh,Pata:2021oez,Biscarat:2021dlj,Rossi:2021tjf,Hewes:2021heg,Thais:2021qcb,Dezoort:2021kfk,Verma:2021ceh,Hariri:2021clz,Belavin:2021bxb,Atkinson:2021nlt,Konar:2021zdg,Atkinson:2021jnj,Tsan:2021brw,Elabd:2021lgo,Pata:2022wam,Gong:2022lye,Qasim:2022rww,Ma:2022bvt,Bogatskiy:2022czk,Builtjes:2022usj,DiBello:2022iwf,Mokhtar:2022pwm,Huang:2023ssr,Forestano:2023fpj,Anisha:2023xmh,Ehrke:2023cpn,Murnane:2023kfm,Yu:2023juh,Neu:2023sfh,Wang:2023cac,McEneaney:2023vwp,Liu:2023siw,GarciaPardinas:2023pmx,Duperrin:2023elp,BelleII:2023egc,Holmberg:2023rfr,Bhattacherjee:2023evs,Murnane:2023ksa} + \item \textbf{Graphs}~\cite{Henrion:DLPS2017,Ju:2020xty,Abdughani:2018wrw,Martinez:2018fwc,Ren:2019xhp,Moreno:2019bmu,Qasim:2019otl,Chakraborty:2019imr,DiBello:2020bas,Chakraborty:2020yfc,1797439,1801423,1808887,Iiyama:2020wap,1811770,Choma:2020cry,alonsomonsalve2020graph,guo2020boosted,Heintz:2020soy,Verma:2020gnq,Dreyer:2020brq,Qian:2021vnh,Pata:2021oez,Biscarat:2021dlj,Rossi:2021tjf,Hewes:2021heg,Thais:2021qcb,Dezoort:2021kfk,Verma:2021ceh,Hariri:2021clz,Belavin:2021bxb,Atkinson:2021nlt,Konar:2021zdg,Atkinson:2021jnj,Tsan:2021brw,Elabd:2021lgo,Pata:2022wam,Gong:2022lye,Qasim:2022rww,Ma:2022bvt,Bogatskiy:2022czk,Builtjes:2022usj,DiBello:2022iwf,Mokhtar:2022pwm,Huang:2023ssr,Forestano:2023fpj,Anisha:2023xmh,Ehrke:2023cpn,Murnane:2023kfm,Yu:2023juh,Neu:2023sfh,Wang:2023cac,McEneaney:2023vwp,Liu:2023siw,GarciaPardinas:2023pmx,Duperrin:2023elp,BelleII:2023egc,Holmberg:2023rfr,Bhattacherjee:2023evs,Murnane:2023ksa,Konar:2023ptv} \\\textit{A graph is a collection of nodes and edges. Graph neural networks are natural tools for processing data in a tree structure.} - \item \textbf{Sets (point clouds)}~\cite{Komiske:2018cqr,Qu:2019gqs,Mikuni:2020wpr,Shlomi:2020ufi,Dolan:2020qkr,Fenton:2020woz,Lee:2020qil,collado2021learning,Mikuni:2021pou,Shmakov:2021qdz,Shimmin:2021pkm,ATL-PHYS-PUB-2020-014,Qu:2022mxj,Kach:2022uzq,Onyisi:2022hdh,Athanasakos:2023fhq,Kach:2023rqw,Badea:2023jdb} + \item \textbf{Sets (point clouds)}~\cite{Komiske:2018cqr,Qu:2019gqs,Mikuni:2020wpr,Shlomi:2020ufi,Dolan:2020qkr,Fenton:2020woz,Lee:2020qil,collado2021learning,Mikuni:2021pou,Shmakov:2021qdz,Shimmin:2021pkm,ATL-PHYS-PUB-2020-014,Qu:2022mxj,Kach:2022uzq,Onyisi:2022hdh,Athanasakos:2023fhq,Kach:2023rqw,Badea:2023jdb,Buhmann:2023zgc,Acosta:2023nuw} \\\textit{A point cloud is a (potentially variable-size) set of points in space. Sets are distinguished from sequences in that there is no particular order (i.e. permutation invariance). Sets can also be viewed as graphs without edges and so graph methods that can parse variable-length inputs may also be appropriate for set learning, although there are other methods as well.} \item \textbf{Physics-inspired basis}~\cite{Datta:2019,Datta:2017rhs,Datta:2017lxt,Komiske:2017aww,Butter:2017cot,Grojean:2020ech,Kishimoto:2022eum,Larkoski:2023nye,Munoz:2023csn} \\\textit{This is a catch-all category for learning using other representations that use some sort of manual or automated physics-preprocessing.} \end{itemize} \item \textbf{Targets} \begin{itemize} - \item \textbf{$W/Z$ tagging}~\cite{deOliveira:2015xxd,Barnard:2016qma,Louppe:2017ipp,Sirunyan:2020lcu,Chen:2019uar,1811770,Dreyer:2020brq,Kim:2021gtv,Subba:2022czw,Aguilar-Saavedra:2023pde,Athanasakos:2023fhq,Grossi:2023fqq,Bogatskiy:2023nnw} + \item \textbf{$W/Z$ tagging}~\cite{deOliveira:2015xxd,Barnard:2016qma,Louppe:2017ipp,Sirunyan:2020lcu,Chen:2019uar,1811770,Dreyer:2020brq,Kim:2021gtv,Subba:2022czw,Aguilar-Saavedra:2023pde,Athanasakos:2023fhq,Grossi:2023fqq,Baron:2023yhw,Bogatskiy:2023nnw} \\\textit{Boosted, hadronically decaying $W$ and $Z$ bosons form jets that are distinguished from generic quark and gluon jets by their mass near the boson mass and their two-prong substructure.} \item \textbf{$H\rightarrow b\bar{b}$}~\cite{Datta:2019ndh,Lin:2018cin,Moreno:2019neq,Chakraborty:2019imr,Sirunyan:2020lcu,Chung:2020ysf,Tannenwald:2020mhq,guo2020boosted,Abbas:2020khd,Jang:2021eph,Khosa:2021cyk} \\\textit{Due to the fidelity of $b$-tagging, boosted, hadronically decaying Higgs bosons (predominantly decaying to $b\bar{b}$) has unique challenged and opportunities compared with $W/Z$ tagging.} \item \textbf{quarks and gluons}~\cite{ATL-PHYS-PUB-2017-017,Komiske:2016rsd,Cheng:2017rdo,Stoye:DLPS2017,Chien:2018dfn,Moreno:2019bmu,Kasieczka:2018lwf,1806025,Lee:2019ssx,Lee:2019cad,Dreyer:2020brq,Romero:2021qlf,Filipek:2021qbe,Dreyer:2021hhr,Bright-Thonney:2022xkx,CrispimRomao:2023ssj,Athanasakos:2023fhq,He:2023cfc,Shen:2023ofd} \\\textit{Quark jets tend to be narrower and have fewer particles than gluon jets. This classification task has been a benchmark for many new machine learning models.} - \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq,Keicher:2023mer,He:2023cfc,Bogatskiy:2023nnw,Shen:2023ofd,Isildak:2023dnf,Sahu:2023uwb} + \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq,Keicher:2023mer,He:2023cfc,Bogatskiy:2023nnw,Shen:2023ofd,Isildak:2023dnf,Sahu:2023uwb,Baron:2023yhw,Bogatskiy:2023fug} \\\textit{Boosted top quarks form jets that have a three-prong substructure ($t\rightarrow Wb,W\rightarrow q\bar{q}$).} \item \textbf{strange jets}~\cite{Nakai:2020kuu,Erdmann:2019blf,Erdmann:2020ovh,Subba:2023rpm} \\\textit{Strange quarks have a very similar fragmentation to generic quark and gluon jets, so this is a particularly challenging task.} \item \textbf{$b$-tagging}~\cite{Sirunyan:2017ezt,Guest:2016iqz,Keck:2018lcd,bielkov2020identifying,Bols:2020bkb,ATL-PHYS-PUB-2017-003,ATL-PHYS-PUB-2020-014,Liao:2022ufk,Stein:2023cnt,ATLAS:2023gog} \\\textit{Due to their long (but not too long) lifetime, the $B$-hadron lifetime is macroscopic and $b$-jet tagging has been one of the earliest adapters of modern machine learning tools.} - \item \textbf{Flavor physics}~\cite{1811097,Bahtiyar:2022une,Zhang:2023czx,Nishimura:2023wdu} + \item \textbf{Flavor physics}~\cite{1811097,Bahtiyar:2022une,Zhang:2023czx,Nishimura:2023wdu,Smith:2023ssh} \\\textit{This category is for studies related to exclusive particle decays, especially with bottom and charm hadrons.} - \item \textbf{BSM particles and models}~\cite{Datta:2019ndh,Baldi:2014kfa,Chakraborty:2019imr,10.1088/2632-2153/ab9023,1792136,1801423,Chang:2020rtc,Cogollo:2020afo,Grossi:2020orx,Ngairangbam:2020ksz,Englert:2020ntw,Freitas:2020ttd,Khosa:2019kxd,Freitas:2019hbk,Stakia:2021pvp,Arganda:2021azw,Jorge:2021vpo,Ren:2021prq,Barron:2021btf,Yang:2021gge,Alvestad:2021sje,Morais:2021ead,Jung:2021tym,Drees:2021oew,Cornell:2021gut,Vidal:2021oed,Beauchesne:2021qrw,Feng:2021eke,Konar:2022bgc,Badea:2022dzb,Freitas:2022cno,Goodsell:2022beo,Lv:2022pme,Ai:2022qvs,Yang:2022fhw,Alasfar:2022vqw,Barbosa:2022mmw,Chiang:2022lsn,Hall:2022bme,Faucett:2022zie,Bhattacharya:2022kje,Bardhan:2022sif,Bhattacharyya:2022umc,ATLAS:2022ihe,CMS:2022idi,Ballabene:2022fms,ATLAS:2023mcc,Palit:2023dvs,Liu:2023gpt,Pedro:2023sdp,MB:2023edk,Dong:2023nir,Guo:2023jkz,Lu:2023gjk,Flacke:2023eil,Bardhan:2023mia,Aguilar-Saavedra:2023pde,Cremer:2023gne,Esmail:2023axd,Choudhury:2023eje,Bhattacherjee:2023evs} + \item \textbf{BSM particles and models}~\cite{Datta:2019ndh,Baldi:2014kfa,Chakraborty:2019imr,10.1088/2632-2153/ab9023,1792136,1801423,Chang:2020rtc,Cogollo:2020afo,Grossi:2020orx,Ngairangbam:2020ksz,Englert:2020ntw,Freitas:2020ttd,Khosa:2019kxd,Freitas:2019hbk,Stakia:2021pvp,Arganda:2021azw,Jorge:2021vpo,Ren:2021prq,Barron:2021btf,Yang:2021gge,Alvestad:2021sje,Morais:2021ead,Jung:2021tym,Drees:2021oew,Cornell:2021gut,Vidal:2021oed,Beauchesne:2021qrw,Feng:2021eke,Konar:2022bgc,Badea:2022dzb,Freitas:2022cno,Goodsell:2022beo,Lv:2022pme,Ai:2022qvs,Yang:2022fhw,Alasfar:2022vqw,Barbosa:2022mmw,Chiang:2022lsn,Hall:2022bme,Faucett:2022zie,Bhattacharya:2022kje,Bardhan:2022sif,Bhattacharyya:2022umc,ATLAS:2022ihe,CMS:2022idi,Ballabene:2022fms,ATLAS:2023mcc,Palit:2023dvs,Liu:2023gpt,Pedro:2023sdp,MB:2023edk,Dong:2023nir,Guo:2023jkz,Lu:2023gjk,Flacke:2023eil,Bardhan:2023mia,Aguilar-Saavedra:2023pde,Cremer:2023gne,Esmail:2023axd,Choudhury:2023eje,Bhattacherjee:2023evs,Grefsrud:2023dad,Wang:2023pqx} \\\textit{There are many proposals to train classifiers to enhance the presence of particular new physics models.} - \item \textbf{Particle identification}~\cite{deOliveira:2018lqd,Paganini:DLPS2017,Hooberman:DLPS2017,Keck:2018lcd,Belayneh:2019vyx,Qasim:2019otl,Collado:2020fwm,Verma:2021ixg,Graziani:2021vai,Graczykowski:2022zae,Fanelli:2022ifa,Dimitrova:2022uum,Ryzhikov:2022lbu,Kushawaha:2023dms,Wu:2023pzn,Prasad:2023zdd,Lange:2023gbe,Novosel:2023cki,Charan:2023ldg,NA62:2023wzm} + \item \textbf{Particle identification}~\cite{deOliveira:2018lqd,Paganini:DLPS2017,Hooberman:DLPS2017,Keck:2018lcd,Belayneh:2019vyx,Qasim:2019otl,Collado:2020fwm,Verma:2021ixg,Graziani:2021vai,Graczykowski:2022zae,Fanelli:2022ifa,Dimitrova:2022uum,Ryzhikov:2022lbu,Kushawaha:2023dms,Wu:2023pzn,Prasad:2023zdd,Lange:2023gbe,Novosel:2023cki,Charan:2023ldg,NA62:2023wzm,Karwowska:2023dhl,Song:2023ceh} \\\textit{This is a generic category for direct particle identification and categorization using various detector technologies. Direct means that the particle directly interacts with the detector (in contrast with $b$-tagging).} \item \textbf{Neutrino Detectors}~\cite{Aurisano:2016jvx,Acciarri:2016ryt,Hertel:DLPS2017,Adams:2018bvi,Domine:2019zhm,Aiello:2020orq,Adams:2020vlj,Domine:2020tlx,DUNE:2020gpm,DeepLearnPhysics:2020hut,Koh:2020snv,Yu:2020wxu,Psihas:2020pby,alonsomonsalve2020graph,Abratenko:2020pbp,Clerbaux:2020ttg,Liu:2020pzv,Abratenko:2020ocq,Chen:2020zkj,Qian:2021vnh,abbasi2021convolutional,Drielsma:2021jdv,Rossi:2021tjf,Hewes:2021heg,Acciarri:2021oav,Belavin:2021bxb,Maksimovic:2021dmz,Gavrikov:2021ktt,Garcia-Mendez:2021vts,Carloni:2021zbc,MicroBooNE:2021nss,MicroBooNE:2021ojx,Elkarghli:2020owr,DUNE:2022fiy,Lutkus:2022eou,Chappell:2022yxd,Bachlechner:2022cvf,Sogaard:2022qgg,IceCube:2022njh,Bai:2022lbv,Biassoni:2023lih,Yu:2023ehc} \\\textit{Neutrino detectors are very large in order to have a sizable rate of neutrino detection. The entire neutrino interaction can be characterized to distinguish different neutrino flavors.} - \item \textbf{Direct Dark Matter Detectors}~\cite{Ilyasov_2020,Akerib:2020aws,Khosa:2019qgp,Golovatiuk:2021lqn,McDonald:2021hus,Coarasa:2021fpv,Herrero-Garcia:2021goa,Liang:2021nsz,Li:2022tvg,Biassoni:2023lih} + \item \textbf{Direct Dark Matter Detectors}~\cite{Ilyasov_2020,Akerib:2020aws,Khosa:2019qgp,Golovatiuk:2021lqn,McDonald:2021hus,Coarasa:2021fpv,Herrero-Garcia:2021goa,Liang:2021nsz,Li:2022tvg,Biassoni:2023lih,XENONCollaboration:2023dar} \\\textit{Dark matter detectors are similar to neutrino detectors, but aim to achieve `zero' background.} - \item \textbf{Cosmology, Astro Particle, and Cosmic Ray physics}~\cite{Ostdiek:2020cqz,Brehmer:2019jyt,Tsai:2020vcx,Verma:2020gnq,Aab:2021rcn,Balazs:2021uhg,gonzalez2021tackling,Conceicao:2021xgn,huang2021convolutionalneuralnetwork,Droz:2021wnh,Han:2021kjx,Arjona:2021hmg,1853992,Shih:2021kbt,Ikeda:2021sxm,Aizpuru:2021vhd,Vago:2021grx,List:2021aer,Kahlhoefer:2021sha,Sabiu:2021aea,Mishra-Sharma:2021nhh,Mishra-Sharma:2021oxe,Bister:2021arb,Chen:2019avc,De:2022sde,Montel:2022fhv,Glauch:2022xth,Sun:2022djj,Abel:2022nje,Zhang:2022djp,Nguyen:2022ldb,Goriely:2022upe,Kim:2023wuk,Zhou:2023cfs,Carvalho:2023ele,Cai:2023gol,Krastev:2023fnh,Hatefi:2023gpj} + \item \textbf{Cosmology, Astro Particle, and Cosmic Ray physics}~\cite{Ostdiek:2020cqz,Brehmer:2019jyt,Tsai:2020vcx,Verma:2020gnq,Aab:2021rcn,Balazs:2021uhg,gonzalez2021tackling,Conceicao:2021xgn,huang2021convolutionalneuralnetwork,Droz:2021wnh,Han:2021kjx,Arjona:2021hmg,1853992,Shih:2021kbt,Ikeda:2021sxm,Aizpuru:2021vhd,Vago:2021grx,List:2021aer,Kahlhoefer:2021sha,Sabiu:2021aea,Mishra-Sharma:2021nhh,Mishra-Sharma:2021oxe,Bister:2021arb,Chen:2019avc,De:2022sde,Montel:2022fhv,Glauch:2022xth,Sun:2022djj,Abel:2022nje,Zhang:2022djp,Nguyen:2022ldb,Goriely:2022upe,Kim:2023wuk,Zhou:2023cfs,Carvalho:2023ele,Cai:2023gol,Krastev:2023fnh,Hatefi:2023gpj,Guo:2023mhf} \\\textit{Machine learning is often used in astrophysics and cosmology in different ways than terrestrial particle physics experiments due to a general divide between Bayesian and Frequentist statistics. However, there are many similar tasks and a growing number of proposals designed for one domain that apply to the other. See also https://github.com/georgestein/ml-in-cosmology.} - \item \textbf{Tracking}~\cite{Farrell:DLPS2017,Farrell:2018cjr,Amrouche:2019wmx,Ju:2020xty,Akar:2020jti,Shlomi:2020ufi,Choma:2020cry,Siviero:2020tim,Fox:2020hfm,Amrouche:2021tlm,goto2021development,Biscarat:2021dlj,Akar:2021gns,Thais:2021qcb,Ju:2021ayy,Dezoort:2021kfk,Edmonds:2021lzd,Lavrik:2021zgt,Huth:2021zcm,Goncharov:2021wvd,Wang:2022oer,Alonso-Monsalve:2022zlm,Bakina:2022mhs,Akram:2022zmj,Sun:2022bxx,Abidi:2022ogh,Bae:2023eec,Knipfer:2023zrv,Akar:2023zhd} + \item \textbf{Tracking}~\cite{Farrell:DLPS2017,Farrell:2018cjr,Amrouche:2019wmx,Ju:2020xty,Akar:2020jti,Shlomi:2020ufi,Choma:2020cry,Siviero:2020tim,Fox:2020hfm,Amrouche:2021tlm,goto2021development,Biscarat:2021dlj,Akar:2021gns,Thais:2021qcb,Ju:2021ayy,Dezoort:2021kfk,Edmonds:2021lzd,Lavrik:2021zgt,Huth:2021zcm,Goncharov:2021wvd,Wang:2022oer,Alonso-Monsalve:2022zlm,Bakina:2022mhs,Akram:2022zmj,Sun:2022bxx,Abidi:2022ogh,Bae:2023eec,Knipfer:2023zrv,Akar:2023zhd,Mieskolainen:2023hkz} \\\textit{Charged particle tracking is a challenging pattern recognition task. This category is for various classification tasks associated with tracking, such as seed selection.} - \item \textbf{Heavy Ions / Nuclear Physics}~\cite{Pang:2016vdc,Chien:2018dfn,Du:2020pmp,Du:2019civ,Mallick:2021wop,Nagu:2021zho,Zhao:2021yjo,Sombillo:2021ifs,Zhou:2021bvw,Apolinario:2021olp,Brown:2021upr,Du:2021pqa,Kuttan:2021npg,Huang:2021iux,Shokr:2021ouh,He:2021uko,Habashy:2021orz,Zepeda:2021tzp,Mishra:2021eqb,Ng:2021ibr,Habashy:2021qku,Biro:2021zgm,Lai:2021ckt,Du:2021qwv,Du:2021brx,Xiang:2021ssj,Soma:2022qnv,Rahman:2022tfq,Boglione:2022gpv,Liyanage:2022byj,Liu:2022hzd,Fanelli:2022kro,Chen:2022shj,Saha:2022skj,Lee:2022kdn,Biro:2022zhl,Zhang:2022hjh,Yang:2022eag,Rigo:2022ces,Yang:2022rlw,Munoz:2022slm,Goriely:2022upe,Mallick:2022alr,Fore:2022ljl,Steffanic:2023cyx,Mallick:2023vgi,He:2023urp,Xu:2023fbs,Kanwar:2023otc,Mumpower:2023lch,Escher:2023oyy,Hirvonen:2023lqy,Biro:2023kyx,He:2023zin,Zhou:2023pti,CrispimRomao:2023ssj,Basak:2023wzq,Shi:2023xfz,Soleymaninia:2023dds,Lin:2023bmy,Dellen:2023avd,AlHammal:2023svo,Wang:2023muv,Wang:2023kcg,Ai:2023azx,Yiu:2023ido,Karmakar:2023mhy,Lasseri:2023dhi,Yoshida:2023wrb,Liu:2023xgl,Hizawa:2023plv,Wen:2023oju,Allaire:2023fgp,Bedaque:2023udu} + \item \textbf{Heavy Ions / Nuclear Physics}~\cite{Pang:2016vdc,Chien:2018dfn,Du:2020pmp,Du:2019civ,Mallick:2021wop,Nagu:2021zho,Zhao:2021yjo,Sombillo:2021ifs,Zhou:2021bvw,Apolinario:2021olp,Brown:2021upr,Du:2021pqa,Kuttan:2021npg,Huang:2021iux,Shokr:2021ouh,He:2021uko,Habashy:2021orz,Zepeda:2021tzp,Mishra:2021eqb,Ng:2021ibr,Habashy:2021qku,Biro:2021zgm,Lai:2021ckt,Du:2021qwv,Du:2021brx,Xiang:2021ssj,Soma:2022qnv,Rahman:2022tfq,Boglione:2022gpv,Liyanage:2022byj,Liu:2022hzd,Fanelli:2022kro,Chen:2022shj,Saha:2022skj,Lee:2022kdn,Biro:2022zhl,Zhang:2022hjh,Yang:2022eag,Rigo:2022ces,Yang:2022rlw,Munoz:2022slm,Goriely:2022upe,Mallick:2022alr,Fore:2022ljl,Steffanic:2023cyx,Mallick:2023vgi,He:2023urp,Xu:2023fbs,Kanwar:2023otc,Mumpower:2023lch,Escher:2023oyy,Hirvonen:2023lqy,Biro:2023kyx,He:2023zin,Zhou:2023pti,CrispimRomao:2023ssj,Basak:2023wzq,Shi:2023xfz,Soleymaninia:2023dds,Lin:2023bmy,Dellen:2023avd,AlHammal:2023svo,Wang:2023muv,Wang:2023kcg,Ai:2023azx,Yiu:2023ido,Karmakar:2023mhy,Lasseri:2023dhi,Yoshida:2023wrb,Liu:2023xgl,Hizawa:2023plv,Wen:2023oju,Allaire:2023fgp,Bedaque:2023udu,Lay:2023boz} \\\textit{Many tools in high energy nuclear physics are similar to high energy particle physics. The physics target of these studies are to understand collective properties of the strong force.} \end{itemize} \item \textbf{Learning strategies} \\\textit{There is no unique way to train a classifier and designing an effective learning strategy is often one of the biggest challenges for achieving optimality.} \begin{itemize} - \item \textbf{Hyperparameters}~\cite{Tani:2020dyi,Dudko:2021cie,Bevan:2017stx,DeZoort:2023dvb} + \item \textbf{Hyperparameters}~\cite{Tani:2020dyi,Dudko:2021cie,Bevan:2017stx,DeZoort:2023dvb,Schroff:2023see} \\\textit{In addition to learnable weights $w$, classifiers have a number of non-differentiable parameters like the number of layers in a neural network. These parameters are called hyperparameters.} \item \textbf{Weak/Semi supervision}~\cite{Dery:2017fap,Metodiev:2017vrx,Komiske:2018oaa,Collins:2018epr,Collins:2019jip,Borisyak:2019vbz,Cohen:2017exh,Komiske:2018vkc,Metodiev:2018ftz,collaboration2020dijet,Amram:2020ykb,Brewer:2020och,Dahbi:2020zjw,Lee:2019ssx,Lieberman:2021krq,Komiske:2022vxg,Li:2022omf,Finke:2022lsu,LeBlanc:2022bwd,Dolan:2022ikg,Bardhan:2023mia,Witkowski:2023htt} \\\textit{For supervised learning, the labels $y_i$ are known. In the case that the labels are noisy or only known with some uncertainty, then the learning is called weak supervision. Semi-supervised learning is the related case where labels are known for only a fraction of the training examples.} \item \textbf{Unsupervised}~\cite{Mackey:2015hwa,Komiske:2019fks,1797846,Dillon:2019cqt,Cai:2020vzx,Howard:2021pos,Dillon:2021gag,Huang:2023kgs,Kitouni:2023rct,Badea:2023jdb} \\\textit{When no labels are provided, the learning is called unsupervised.} - \item \textbf{Reinforcement Learning}~\cite{Carrazza:2019efs,Brehmer:2020brs,John:2020sak,Harvey:2021oue,Cranmer:2021gdt,Windisch:2021mem,Dersy:2022bym,Nishimura:2023wdu} + \item \textbf{Reinforcement Learning}~\cite{Carrazza:2019efs,Brehmer:2020brs,John:2020sak,Harvey:2021oue,Cranmer:2021gdt,Windisch:2021mem,Dersy:2022bym,Nishimura:2023wdu,Alvestad:2023jgl} \\\textit{Instead of learning to distinguish different types of examples, the goal of reinforcement learning is to learn a strategy (policy). The prototypical example of reinforcement learning in learning a strategy to play video games using some kind of score as a feedback during the learning.} \item \textbf{Quantum Machine Learning}~\cite{Mott:2017xdb,Zlokapa:2019lvv,Blance:2020nhl,Terashi:2020wfi,Chen:2020zkj,Wu:2020cye,Guan:2020bdl,Chen:2021ouz,Blance:2021gcs,Heredge:2021vww,Wu:2021xsj,Belis:2021zqi,Araz:2021ifk,Bravo-Prieto:2021ehz,Kim:2021wrr,Ngairangbam:2021yma,Gianelle:2022unu,Abel:2022lqr,Araz:2022haf,Delgado:2022aty,Alvi:2022fkk,Peixoto:2022zzk,Araz:2022zxk,Duckett:2022ccc,Rousselot:2023pcj,Schuhmacher:2023pro,Rehm:2023ovj} \\\textit{Quantum computers are based on unitary operations applied to quantum states. These states live in a vast Hilbert space which may have a usefully large information capacity for machine learning.} @@ -120,7 +120,7 @@ \\\textit{This is an ML tool for helping the network to focus on particularly useful features.} \item \textbf{Regularization}~\cite{Araz:2021wqm,Sforza:2013hua} \\\textit{This is a term referring to any learning strategy that improves the robustness of a classifier to statistical fluctuations in the data and in the model initialization.} - \item \textbf{Optimal Transport}~\cite{Komiske:2019fks,Cai:2020vzx,Romao:2020ojy,Pollard:2021fqv,Cai:2021hnn,Manole:2022bmi,Gouskos:2022xvn,ATLAS:2023mny} + \item \textbf{Optimal Transport}~\cite{Komiske:2019fks,Cai:2020vzx,Romao:2020ojy,Pollard:2021fqv,Cai:2021hnn,Manole:2022bmi,Gouskos:2022xvn,ATLAS:2023mny,Bright-Thonney:2023sqf} \\\textit{Optimal transport is a set of tools for transporting one probability density into another and can be combined with other strategies for classification, regression, etc. The above citation list does not yet include papers using optimal transport distances as part of generative model training.} \end{itemize} \item \textbf{Fast inference / deployment} @@ -128,7 +128,7 @@ \begin{itemize} \item \textbf{Software}~\cite{Strong:2020mge,Gligorov:2012qt,Weitekamp:DLPS2017,Nguyen:2018ugw,Bourgeois:2018nvk,1792136,Balazs:2021uhg,Rehm:2021zow,Mahesh:2021iph,Amrouche:2021tio,Pol:2021iqw,Goncharov:2021wvd,Saito:2021vpp,Jiang:2022zho,Garg:2022tal,Duarte:2022job,Guo:2023nfu,Tyson:2023zkx,DPHEP:2023blx,DiBello:2023kzc} \\\textit{Strategies for efficient inference for a given hardware architecture.} - \item \textbf{Hardware/firmware}~\cite{Duarte:2018ite,DiGuglielmo:2020eqx,Summers:2020xiy,1808088,Iiyama:2020wap,Mohan:2020vvi,Carrazza:2020qwu,Rankin:2020usv,Heintz:2020soy,Rossi:2020sbh,Aarrestad:2021zos,Hawks:2021ruw,Teixeira:2021yhl,Hong:2021snb,DiGuglielmo:2021ide,Migliorini:2021fuj,Govorkova:2021utb,Elabd:2021lgo,Jwa:2019zlh,Butter:2022lkf,Sun:2022bxx,Khoda:2022dwz,Carlson:2022vac,Abidi:2022ogh,MeyerzuTheenhausen:2022ffb,Cai:2023ldc,Herbst:2023lug,Coccaro:2023nol,Neu:2023sfh,Okabe:2023efz,Yaary:2023dvw,Schulte:2023gtt} + \item \textbf{Hardware/firmware}~\cite{Duarte:2018ite,DiGuglielmo:2020eqx,Summers:2020xiy,1808088,Iiyama:2020wap,Mohan:2020vvi,Carrazza:2020qwu,Rankin:2020usv,Heintz:2020soy,Rossi:2020sbh,Aarrestad:2021zos,Hawks:2021ruw,Teixeira:2021yhl,Hong:2021snb,DiGuglielmo:2021ide,Migliorini:2021fuj,Govorkova:2021utb,Elabd:2021lgo,Jwa:2019zlh,Butter:2022lkf,Sun:2022bxx,Khoda:2022dwz,Carlson:2022vac,Abidi:2022ogh,MeyerzuTheenhausen:2022ffb,Cai:2023ldc,Herbst:2023lug,Coccaro:2023nol,Neu:2023sfh,Okabe:2023efz,Yaary:2023dvw,Schulte:2023gtt,Yoo:2023lxy} \\\textit{Various accelerators have been studied for fast inference that is very important for latency-limited applications like the trigger at collider experiments.} \item \textbf{Deployment}~\cite{Kuznetsov:2020mcj,SunnebornGudnadottir:2021nhk,Holmberg:2023rfr} \\\textit{This category is for the deployment of machine learning interfaces, such as in the cloud.} @@ -139,23 +139,23 @@ \begin{itemize} \item \textbf{Pileup}~\cite{Komiske:2017ubm,ATL-PHYS-PUB-2019-028,Martinez:2018fwc,Carrazza:2019efs,Maier:2021ymx,Li:2022omf,CRESST:2022qor,Kim:2023koz} \\\textit{A given bunch crossing at the LHC will have many nearly simultaneous proton-proton collisions. Only one of those is usually interesting and the rest introduce a source of noise (pileup) that must be mitigating for precise final state reconstruction.} - \item \textbf{Calibration}~\cite{Cheong:2019upg,ATL-PHYS-PUB-2020-001,ATL-PHYS-PUB-2018-013,Hooberman:DLPS2017,Kasieczka:2020vlh,Sirunyan:2019wwa,Baldi:2020hjm,Du:2020pmp,Kieseler:2021jxc,Pollard:2021fqv,Akchurin:2021afn,Kieseler:2020wcq,Akchurin:2021ahx,Diefenthaler:2021rdj,Polson:2021kvr,Micallef:2021src,Arratia:2021tsq,Kronheim:2021hdb,Renteria-Estrada:2021zrd,Pata:2022wam,Chadeeva:2022kay,Dorigo:2022tfi,Alves:2022gnw,Qiu:2022xvr,Akchurin:2022apq,Gambhir:2022gua,Gambhir:2022dut,Valsecchi:2022rla,Leigh:2022lpn,Darulis:2022brn,Ge:2022xrv,Guglielmi:2022ftj,Aad:2023ula,Lee:2023jew,Schwenker:2023bih,Basak:2023wzq,Grosso:2023jxp,Grosso:2023ltd,Soleymaninia:2023dds,Raine:2023fko,Khozani:2023bql,ATLAS:2023tyv,ALICETPC:2023ojd,Meyer:2023ffd,Holmberg:2023rfr} + \item \textbf{Calibration}~\cite{Cheong:2019upg,ATL-PHYS-PUB-2020-001,ATL-PHYS-PUB-2018-013,Hooberman:DLPS2017,Kasieczka:2020vlh,Sirunyan:2019wwa,Baldi:2020hjm,Du:2020pmp,Kieseler:2021jxc,Pollard:2021fqv,Akchurin:2021afn,Kieseler:2020wcq,Akchurin:2021ahx,Diefenthaler:2021rdj,Polson:2021kvr,Micallef:2021src,Arratia:2021tsq,Kronheim:2021hdb,Renteria-Estrada:2021zrd,Pata:2022wam,Chadeeva:2022kay,Dorigo:2022tfi,Alves:2022gnw,Qiu:2022xvr,Akchurin:2022apq,Gambhir:2022gua,Gambhir:2022dut,Valsecchi:2022rla,Leigh:2022lpn,Darulis:2022brn,Ge:2022xrv,Guglielmi:2022ftj,Aad:2023ula,Lee:2023jew,Schwenker:2023bih,Basak:2023wzq,Grosso:2023jxp,Grosso:2023ltd,Soleymaninia:2023dds,Raine:2023fko,Khozani:2023bql,ATLAS:2023tyv,ALICETPC:2023ojd,Meyer:2023ffd,Holmberg:2023rfr,Bein:2023ylt,Acosta:2023nuw} \\\textit{The goal of calibration is to remove the bias (and reduce variance if possible) from detector (or related) effects.} \item \textbf{Recasting}~\cite{Caron:2017hku,Bertone:2016mdy,1806026,Hammad:2022wpq} \\\textit{Even though an experimental analysis may provide a single model-dependent interpretation of the result, the results are likely to have important implications for a variety of other models. Recasting is the task of taking a result and interpreting it in the context of a model that was not used for the original analysis.} - \item \textbf{Matrix elements}~\cite{Badger:2020uow,Bishara:2019iwh,1804325,Bury:2020ewi,Sombillo:2021yxe,Sombillo:2021rxv,Aylett-Bullock:2021hmo,Maitre:2021uaa,Danziger:2021eeg,Winterhalder:2021ngy,Karl:2022jda,Alnuqaydan:2022ncd,Dersy:2022bym,Badger:2022hwf,Janssen:2023ahv,Maitre:2023dqz} + \item \textbf{Matrix elements}~\cite{Badger:2020uow,Bishara:2019iwh,1804325,Bury:2020ewi,Sombillo:2021yxe,Sombillo:2021rxv,Aylett-Bullock:2021hmo,Maitre:2021uaa,Danziger:2021eeg,Winterhalder:2021ngy,Karl:2022jda,Alnuqaydan:2022ncd,Dersy:2022bym,Badger:2022hwf,Janssen:2023ahv,Maitre:2023dqz,Kaidisch:2023lwp} \\\textit{Regression methods can be used as surrogate models for functions that are too slow to evaluate. One important class of functions are matrix elements, which form the core component of cross section calculations in quantum field theory.} - \item \textbf{Parameter estimation}~\cite{Lei:2020ucb,1808105,Lazzarin:2020uvv,Kim:2021pcz,Alda:2021rgt,Craven:2021ems,Castro:2022zpq,Meng:2022lmd,Garg:2022tal,Qiu:2023ihi,AlHammal:2023svo,Shi:2023xfz,Goos:2023opq,Schroder:2023akt} + \item \textbf{Parameter estimation}~\cite{Lei:2020ucb,1808105,Lazzarin:2020uvv,Kim:2021pcz,Alda:2021rgt,Craven:2021ems,Castro:2022zpq,Meng:2022lmd,Garg:2022tal,Qiu:2023ihi,AlHammal:2023svo,Shi:2023xfz,Goos:2023opq,Schroder:2023akt,Yang:2023rbg} \\\textit{The target features could be parameters of a model, which can be learned directly through a regression setup. Other forms of inference are described in later sections (which could also be viewed as regression).} \item \textbf{Parton Distribution Functions (and related)}~\cite{DelDebbio:2020rgv,Grigsby:2020auv,Rossi:2020sbh,Carrazza:2021hny,Ball:2021leu,Ball:2021xlu,Khalek:2021gon,Iranipour:2022iak,Gao:2022uhg,Gao:2022srd,Candido:2023utz,Wang:2023nab,Kassabov:2023hbm,Wang:2023poi,Fernando:2023obn,Rabemananjara:2023xfq} \\\textit{Various machine learning models can provide flexible function approximators, which can be useful for modeling functions that cannot be determined easily from first principles such as parton distribution functions.} - \item \textbf{Lattice Gauge Theory}~\cite{Kanwar:2003.06413,Favoni:2020reg,Bulusu:2021rqz,Shi:2021qri,Hackett:2021idh,Yoon:2018krb,Zhang:2019qiq,Nguyen:2019gpo,Favoni:2021epq,Chen:2021jey,Bulusu:2021njs,Shi:2022yqw,Luo:2022jzl,Chen:2022ytr,Li:2022ozl,Kang:2022jbg,Albandea:2022fky,Khan:2022vot,Sale:2022snt,Kim:2022rna,Karsch:2022yka,Favoni:2022mcg,Chen:2022asj,Bacchio:2022vje,Bacchio:2022vje,Gao:2022uhg,Aguilar:2022thg,Lawrence:2022dba,Peng:2022wdl,Lehner:2023bba,Albandea:2023wgd,Nicoli:2023qsl,Aronsson:2023rli,Zhou:2023pti,Hudspith:2023loy,R:2023dcr,Bender:2023gwr,NarcisoFerreira:2023kak,Lehner:2023prf,Singha:2023xxq,Riberdy:2023awf,Buzzicotti:2023qdv,Caselle:2023mvh,Detmold:2023kjm,Kashiwa:2023dfx,Ermann:2023unw} + \item \textbf{Lattice Gauge Theory}~\cite{Kanwar:2003.06413,Favoni:2020reg,Bulusu:2021rqz,Shi:2021qri,Hackett:2021idh,Yoon:2018krb,Zhang:2019qiq,Nguyen:2019gpo,Favoni:2021epq,Chen:2021jey,Bulusu:2021njs,Shi:2022yqw,Luo:2022jzl,Chen:2022ytr,Li:2022ozl,Kang:2022jbg,Albandea:2022fky,Khan:2022vot,Sale:2022snt,Kim:2022rna,Karsch:2022yka,Favoni:2022mcg,Chen:2022asj,Bacchio:2022vje,Bacchio:2022vje,Gao:2022uhg,Aguilar:2022thg,Lawrence:2022dba,Peng:2022wdl,Lehner:2023bba,Albandea:2023wgd,Nicoli:2023qsl,Aronsson:2023rli,Zhou:2023pti,Hudspith:2023loy,R:2023dcr,Bender:2023gwr,NarcisoFerreira:2023kak,Lehner:2023prf,Singha:2023xxq,Riberdy:2023awf,Buzzicotti:2023qdv,Caselle:2023mvh,Detmold:2023kjm,Kashiwa:2023dfx,Ermann:2023unw,Albandea:2023ais,Alvestad:2023jgl,Tomiya:2023jdy} \\\textit{Lattice methods offer a complementary approach to perturbation theory. A key challenge is to create approaches that respect the local gauge symmetry (equivariant networks).} - \item \textbf{Function Approximation}~\cite{1853982,Haddadin:2021mmo,Chahrour:2021eiv,Wang:2021jou,Kitouni:2021fkh,Lei:2022dvn,Wang:2023nab,Fernando:2023obn} + \item \textbf{Function Approximation}~\cite{1853982,Coccaro:2019lgs,Haddadin:2021mmo,Chahrour:2021eiv,Wang:2021jou,Kitouni:2021fkh,Lei:2022dvn,Wang:2023nab,Fernando:2023obn,Reyes-Gonzalez:2023oei} \\\textit{Approximating functions that obey certain (physical) constraints.} \item \textbf{Symbolic Regression}~\cite{Butter:2021rvz,Zhang:2022uqk,Lu:2022joy,Wang:2023poi} \\\textit{Regression where the result is a (relatively) simple formula.} - \item \textbf{Monitoring}~\cite{Mukund:2023oyy,Matha:2023tmf,CMSMuon:2023czf,Joshi:2023btt,Chen:2023cim,Harilal:2023smf} + \item \textbf{Monitoring}~\cite{Mukund:2023oyy,Matha:2023tmf,CMSMuon:2023czf,Joshi:2023btt,Chen:2023cim,Harilal:2023smf,Das:2023ktd,CMSECAL:2023fvz} \\\textit{Regression models can be used to monitor experimental setups and sensors.} \end{itemize} \item \textbf{Equivariant networks}~\cite{Kanwar:2003.06413,Dolan:2020qkr,Favoni:2020reg,Bulusu:2021njs,Gong:2022lye,Shi:2022yqw,Bogatskiy:2022hub,Favoni:2022mcg,Bogatskiy:2022czk,Hao:2022zns,Lehner:2023bba,Forestano:2023fpj,Aronsson:2023rli,Buhmann:2023pmh,Forestano:2023qcy,Lehner:2023prf,Murnane:2023kfm,Bogatskiy:2023nnw} @@ -169,13 +169,13 @@ \\\textit{Generative Adversarial Networks~\cite{Goodfellow:2014upx} learn $p(x)$ implicitly through the minimax optimization of two networks: one that maps noise to structure $G(z)$ and one a classifier (called the discriminator) that learns to distinguish examples generated from $G(z)$ and those generated from the target process. When the discriminator is maximally `confused', then the generator is effectively mimicking $p(x)$.} \item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int,Anzalone:2023ugq,Lasseri:2023dhi,Chekanov:2023uot} \\\textit{An autoencoder consists of two functions: one that maps $x$ into a latent space $z$ (encoder) and a second one that maps the latent space back into the original space (decoder). The encoder and decoder are simultaneously trained so that their composition is nearly the identity. When the latent space has a well-defined probability density (as in variational autoencoders), then one can sample from the autoencoder by applying the detector to a randomly chosen element of the latent space.} - \item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Kach:2022qnf,Kach:2022uzq,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko,Golling:2023yjq,Wen:2023oju,Xu:2023xdc,Singha:2023xxq,Buckley:2023rez,Pang:2023wfx,Golling:2023mqx} + \item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Kach:2022qnf,Kach:2022uzq,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko,Golling:2023yjq,Wen:2023oju,Xu:2023xdc,Singha:2023xxq,Buckley:2023rez,Pang:2023wfx,Golling:2023mqx,Reyes-Gonzalez:2023oei,Bickendorf:2023nej,Finke:2023ltw,Bright-Thonney:2023sqf,Albandea:2023ais,Pham:2023bnl,Gavranovic:2023oam} \\\textit{Normalizing flows~\cite{pmlr-v37-rezende15} learn $p(x)$ explicitly by starting with a simple probability density and then applying a series of bijective transformations with tractable Jacobians.} - \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov,Mikuni:2023tok,Acosta:2023zik,Imani:2023blb,Amram:2023onf,Diefenbacher:2023flw,Cotler:2023lem,Diefenbacher:2023wec,Mikuni:2023tqg,Hunt-Smith:2023ccp,Buhmann:2023kdg} + \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov,Mikuni:2023tok,Acosta:2023zik,Imani:2023blb,Amram:2023onf,Diefenbacher:2023flw,Cotler:2023lem,Diefenbacher:2023wec,Mikuni:2023tqg,Hunt-Smith:2023ccp,Buhmann:2023kdg,Buhmann:2023zgc,Buhmann:2023acn,Devlin:2023jzp} \\\textit{These approaches learn the gradient of the density instead of the density directly.} - \item \textbf{Transformer Models}~\cite{Finke:2023veq,Butter:2023fov,Raine:2023fko} + \item \textbf{Transformer Models}~\cite{Finke:2023veq,Butter:2023fov,Raine:2023fko,Tomiya:2023jdy} \\\textit{These approaches learn the density or perform generative modeling using transformer-based networks.} - \item \textbf{Physics-inspired}~\cite{Andreassen:2018apy,Andreassen:2019txo,1808876,Lai:2020byl,Barenboim:2021vzh} + \item \textbf{Physics-inspired}~\cite{Andreassen:2018apy,Andreassen:2019txo,1808876,Lai:2020byl,Barenboim:2021vzh,Larkoski:2023xam} \\\textit{A variety of methods have been proposed to use machine learning tools (e.g. neural networks) combined with physical components.} \item \textbf{Mixture Models}~\cite{Chen:2020uds,Burton:2021tsd,Graziani:2021vai,Liu:2022dem,Vermunt:2023fsr} \\\textit{A mixture model is a superposition of simple probability densities. For example, a Gaussian mixture model is a sum of normal probability densities. Mixture density networks are mixture models where the coefficients in front of the constituent densities as well as the density parameters (e.g. mean and variances of Gaussians) are parameterized by neural networks.} @@ -186,20 +186,20 @@ \item \textbf{Other/hybrid}~\cite{Cresswell:2022tof,DiBello:2022rss,Li:2022jon,Kansal:2022spb,Butter:2023fov,Kronheim:2023jrl,Santos:2023mib,Sahu:2023uwb} \\\textit{Architectures that combine different network elements or otherwise do not fit into the other categories.} \end{itemize} -\item \textbf{Anomaly detection}~\cite{DAgnolo:2018cun,Collins:2018epr,Collins:2019jip,DAgnolo:2019vbw,Farina:2018fyg,Heimel:2018mkt,Roy:2019jae,Cerri:2018anq,Blance:2019ibf,Hajer:2018kqm,DeSimone:2018efk,Mullin:2019mmh,1809.02977,Dillon:2019cqt,Andreassen:2020nkr,Nachman:2020lpy,Aguilar-Saavedra:2017rzt,Romao:2019dvs,Romao:2020ojy,knapp2020adversarially,collaboration2020dijet,1797846,1800445,Amram:2020ykb,Cheng:2020dal,Khosa:2020qrz,Thaprasop:2020mzp,Alexander:2020mbx,aguilarsaavedra2020mass,1815227,pol2020anomaly,Mikuni:2020qds,vanBeekveld:2020txa,Park:2020pak,Faroughy:2020gas,Stein:2020rou,Kasieczka:2021xcg,Chakravarti:2021svb,Batson:2021agz,Blance:2021gcs,Bortolato:2021zic,Collins:2021nxn,Dillon:2021nxw,Finke:2021sdf,Shih:2021kbt,Atkinson:2021nlt,Kahn:2021drv,Aarrestad:2021oeb,Dorigo:2021iyy,Caron:2021wmq,Govorkova:2021hqu,Kasieczka:2021tew,Volkovich:2021txe,Govorkova:2021utb,Hallin:2021wme,Ostdiek:2021bem,Fraser:2021lxm,Jawahar:2021vyu,Herrero-Garcia:2021goa,Aguilar-Saavedra:2021utu,Tombs:2021wae,Lester:2021aks,Mikuni:2021nwn,Chekanov:2021pus,dAgnolo:2021aun,Canelli:2021aps,Ngairangbam:2021yma,Bradshaw:2022qev,Aguilar-Saavedra:2022ejy,Buss:2022lxw,Alvi:2022fkk,Dillon:2022tmm,Birman:2022xzu,Raine:2022hht,Letizia:2022xbe,Fanelli:2022xwl,Finke:2022lsu,Verheyen:2022tov,Dillon:2022mkq,Caron:2022wrw,Park:2022zov,Kamenik:2022qxs,Hallin:2022eoq,Kasieczka:2022naq,Araz:2022zxk,Mastandrea:2022vas,Schuhmacher:2023pro,Roche:2023int,Golling:2023juz,Sengupta:2023xqy,Mikuni:2023tok,Golling:2023yjq,Vaslin:2023lig,ATLAS:2023azi,Chekanov:2023uot} +\item \textbf{Anomaly detection}~\cite{DAgnolo:2018cun,Collins:2018epr,Collins:2019jip,DAgnolo:2019vbw,Farina:2018fyg,Heimel:2018mkt,Roy:2019jae,Cerri:2018anq,Blance:2019ibf,Hajer:2018kqm,DeSimone:2018efk,Mullin:2019mmh,1809.02977,Dillon:2019cqt,Andreassen:2020nkr,Nachman:2020lpy,Aguilar-Saavedra:2017rzt,Romao:2019dvs,Romao:2020ojy,knapp2020adversarially,collaboration2020dijet,1797846,1800445,Amram:2020ykb,Cheng:2020dal,Khosa:2020qrz,Thaprasop:2020mzp,Alexander:2020mbx,aguilarsaavedra2020mass,1815227,pol2020anomaly,Mikuni:2020qds,vanBeekveld:2020txa,Park:2020pak,Faroughy:2020gas,Stein:2020rou,Kasieczka:2021xcg,Chakravarti:2021svb,Batson:2021agz,Blance:2021gcs,Bortolato:2021zic,Collins:2021nxn,Dillon:2021nxw,Finke:2021sdf,Shih:2021kbt,Atkinson:2021nlt,Kahn:2021drv,Aarrestad:2021oeb,Dorigo:2021iyy,Caron:2021wmq,Govorkova:2021hqu,Kasieczka:2021tew,Volkovich:2021txe,Govorkova:2021utb,Hallin:2021wme,Ostdiek:2021bem,Fraser:2021lxm,Jawahar:2021vyu,Herrero-Garcia:2021goa,Aguilar-Saavedra:2021utu,Tombs:2021wae,Lester:2021aks,Mikuni:2021nwn,Chekanov:2021pus,dAgnolo:2021aun,Canelli:2021aps,Ngairangbam:2021yma,Bradshaw:2022qev,Aguilar-Saavedra:2022ejy,Buss:2022lxw,Alvi:2022fkk,Dillon:2022tmm,Birman:2022xzu,Raine:2022hht,Letizia:2022xbe,Fanelli:2022xwl,Finke:2022lsu,Verheyen:2022tov,Dillon:2022mkq,Caron:2022wrw,Park:2022zov,Kamenik:2022qxs,Hallin:2022eoq,Kasieczka:2022naq,Araz:2022zxk,Mastandrea:2022vas,Schuhmacher:2023pro,Roche:2023int,Golling:2023juz,Sengupta:2023xqy,Mikuni:2023tok,Golling:2023yjq,Vaslin:2023lig,ATLAS:2023azi,Chekanov:2023uot,CMSECAL:2023fvz,Bickendorf:2023nej,Finke:2023ltw,Buhmann:2023acn,Freytsis:2023cjr} \\\textit{The goal of anomaly detection is to identify abnormal events. The abnormal events could be from physics beyond the Standard Model or from faults in a detector. While nearly all searches for new physics are technically anomaly detection, this category is for methods that are mode-independent (broadly defined). Anomalies in high energy physics tend to manifest as over-densities in phase space (often called `population anomalies') in contrast to off-manifold anomalies where you can flag individual examples as anomalous. } \item \textbf{Simulation-based (`likelihood-free') Inference} \\\textit{Likelihood-based inference is the case where $p(x|\theta)$ is known and $\theta$ can be determined by maximizing the probability of the data. In high energy physics, $p(x|\theta)$ is often not known analytically, but it is often possible to sample from the density implicitly using simulations.} \begin{itemize} - \item \textbf{Parameter estimation}~\cite{Andreassen:2019nnm,Stoye:2018ovl,Hollingsworth:2020kjg,Brehmer:2018kdj,Brehmer:2018eca,Brehmer:2019xox,Brehmer:2018hga,Cranmer:2015bka,Andreassen:2020gtw,Coogan:2020yux,Flesher:2020kuy,Bieringer:2020tnw,Nachman:2021yvi,Chatterjee:2021nms,NEURIPS2020_a878dbeb,Mishra-Sharma:2021oxe,Barman:2021yfh,Bahl:2021dnc,Arganda:2022qzy,Kong:2022rnd,Arganda:2022zbs,Butter:2022vkj,Neubauer:2022gbu,Rizvi:2023mws,Heinrich:2023bmt,Breitenmoser:2023tmi,Erdogan:2023uws,Morandini:2023pwj,Barrue:2023ysk,Espejo:2023wzf} + \item \textbf{Parameter estimation}~\cite{Andreassen:2019nnm,Stoye:2018ovl,Hollingsworth:2020kjg,Brehmer:2018kdj,Brehmer:2018eca,Brehmer:2019xox,Brehmer:2018hga,Cranmer:2015bka,Andreassen:2020gtw,Coogan:2020yux,Flesher:2020kuy,Bieringer:2020tnw,Nachman:2021yvi,Chatterjee:2021nms,NEURIPS2020_a878dbeb,Mishra-Sharma:2021oxe,Barman:2021yfh,Bahl:2021dnc,Arganda:2022qzy,Kong:2022rnd,Arganda:2022zbs,Butter:2022vkj,Neubauer:2022gbu,Rizvi:2023mws,Heinrich:2023bmt,Breitenmoser:2023tmi,Erdogan:2023uws,Morandini:2023pwj,Barrue:2023ysk,Espejo:2023wzf,Heimel:2023mvw} \\\textit{This can also be viewed as a regression problem, but there the goal is typically to do maximum likelihood estimation in contrast to directly minimizing the mean squared error between a function and the target.} - \item \textbf{Unfolding}~\cite{Andreassen:2019cjw,Datta:2018mwd,Bellagente:2019uyp,Gagunashvili:2010zw,Glazov:2017vni,Martschei:2012pr,Lindemann:1995ut,Zech2003BinningFreeUB,1800956,Vandegar:2020yvw,Howard:2021pos,Baron:2021vvl,Andreassen:2021zzk,Komiske:2021vym,H1:2021wkz,Arratia:2021otl,Wong:2021zvv,Arratia:2022wny,Backes:2022vmn,Chan:2023tbf,Shmakov:2023kjj} + \item \textbf{Unfolding}~\cite{Mieskolainen:2018fhf,Andreassen:2019cjw,Datta:2018mwd,Bellagente:2019uyp,Gagunashvili:2010zw,Glazov:2017vni,Martschei:2012pr,Lindemann:1995ut,Zech2003BinningFreeUB,1800956,Vandegar:2020yvw,Howard:2021pos,Baron:2021vvl,Andreassen:2021zzk,Komiske:2021vym,H1:2021wkz,Arratia:2021otl,Wong:2021zvv,Arratia:2022wny,Backes:2022vmn,Chan:2023tbf,Shmakov:2023kjj} \\\textit{This is the task of removing detector distortions. In contrast to parameter estimation, the goal is not to infer model parameters, but instead, the undistorted phase space probability density. This is often also called deconvolution.} \item \textbf{Domain adaptation}~\cite{Rogozhnikov:2016bdp,Andreassen:2019nnm,Cranmer:2015bka,2009.03796,Nachman:2021opi,Camaiani:2022kul,Schreck:2023pzs,Algren:2023qnb} \\\textit{Morphing simulations to look like data is a form of domain adaptation.} \item \textbf{BSM}~\cite{Andreassen:2020nkr,Hollingsworth:2020kjg,Brehmer:2018kdj,Brehmer:2018eca,Brehmer:2018hga,Brehmer:2019xox,Romao:2020ojy,deSouza:2022uhk,GomezAmbrosio:2022mpm,Castro:2022zpq,Anisha:2023xmh,Dennis:2023kfe,vanBeekveld:2023ney,Chhibra:2023tyf,Mandal:2023mck,Franz:2023gic,Arganda:2023qni} \\\textit{This category is for parameter estimation when the parameter is the signal strength of new physics.} - \item \textbf{Differentiable Simulation}~\cite{Heinrich:2022xfa,Nachman:2022jbj,Lei:2022dvn,Napolitano:2023jhg,Shenoy:2023ros,Kagan:2023gxz} + \item \textbf{Differentiable Simulation}~\cite{Heinrich:2022xfa,MODE:2022znx,Nachman:2022jbj,Lei:2022dvn,Napolitano:2023jhg,Shenoy:2023ros,Kagan:2023gxz,Aehle:2023wwi,Smith:2023ssh} \\\textit{Coding up a simulation using a differentiable programming language like TensorFlow, PyTorch, or JAX.} \end{itemize} \item \textbf{Uncertainty Quantification} @@ -217,14 +217,14 @@ \item \textbf{Formal Theory and ML} \\\textit{ML can also be utilized in formal theory.} \begin{itemize} - \item Theory and physics for ML~\cite{Erbin:2022lls,Zuniga-Galindo:2023hty,Banta:2023kqe,Zuniga-Galindo:2023uwp,Kumar:2023hlu,Demirtas:2023fir} - \item ML for theory~\cite{Berglund:2022gvm,Erbin:2022rgx,Gerdes:2022nzr,Escalante-Notario:2022fik,Chen:2022jwd,Cheung:2022itk,He:2023csq,Lal:2023dkj,Dorrill:2023vox,Forestano:2023ijh,Dersy:2023job,Cotler:2023lem,Mizera:2023bsw,Gnech:2023prs,Seong:2023njx} + \item Theory and physics for ML~\cite{Erbin:2022lls,Zuniga-Galindo:2023hty,Banta:2023kqe,Zuniga-Galindo:2023uwp,Kumar:2023hlu,Demirtas:2023fir,Halverson:2023ndu} + \item ML for theory~\cite{Berglund:2022gvm,Erbin:2022rgx,Gerdes:2022nzr,Escalante-Notario:2022fik,Chen:2022jwd,Cheung:2022itk,He:2023csq,Lal:2023dkj,Dorrill:2023vox,Forestano:2023ijh,Dersy:2023job,Cotler:2023lem,Mizera:2023bsw,Gnech:2023prs,Seong:2023njx,Wojcik:2023usm,Alawadhi:2023gxa,Choi:2023rqg,Halverson:2023ndu,Matchev:2023mii} \end{itemize} \item \textbf{Experimental results} \\\textit{This section is incomplete as there are many results that directly and indirectly (e.g. via flavor tagging) use modern machine learning techniques. We will try to highlight experimental results that use deep learning in a critical way for the final analysis sensitivity.} \begin{itemize} \item Performance studies~\cite{CMS:2022prd,Yang:2022dwu,NEOS-II:2022mov,Jiang:2022zho,Gronroos:2023qff} - \item Searches and measurements were ML reconstruction is a core component~\cite{Keck:2018lcd,MicroBooNE:2021nxr,MicroBooNE:2021jwr,ATLAS:2022ihe,CMS:2022idi,CMS:2022fxs,Li:2022gpb,Tran:2022ago,Manganelli:2022whv,CMS:2022wjc,ATLAS:2023mcc,ATLAS:2023hbp,ATLAS:2023vxg,ATLAS:2023qdu,ATLAS:2023bzb,ATLAS:2023sbu,ATLAS:2023dnm,NOvA:2023uxq,Gravili:2023hbp}. + \item Searches and measurements were ML reconstruction is a core component~\cite{Keck:2018lcd,CMS:2019dqq,MicroBooNE:2021nxr,MicroBooNE:2021jwr,ATLAS:2022ihe,CMS:2022idi,CMS:2022fxs,Li:2022gpb,Tran:2022ago,Manganelli:2022whv,CMS:2022wjc,ATLAS:2023mcc,ATLAS:2023hbp,ATLAS:2023vxg,ATLAS:2023qdu,ATLAS:2023bzb,ATLAS:2023sbu,ATLAS:2023dnm,NOvA:2023uxq,Gravili:2023hbp,Dutta:2023jbz,Belfkir:2023vpo,Tung:2023lkv,Akar:2023puf,BOREXINO:2023pcv}. \item Final analysis discriminate for searches~\cite{Aad:2019yxi,Aad:2020hzm,collaboration2020dijet,Sirunyan:2020hwz,Manganelli:2022whv}. \item Measurements using deep learning directly (not through object reconstruction)~\cite{H1:2021wkz,H1:2023fzk} \end{itemize} diff --git a/README.md b/README.md index a61fe1f..f13ff07 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ The purpose of this note is to collect references for modern machine learning as applied to particle physics. A minimal number of categories is chosen in order to be as useful as possible. Note that papers may be referenced in more than one category. The fact that a paper is listed in this document does not endorse or validate its content - that is for the community (and for peer-review) to decide. Furthermore, the classification here is a best attempt and may have flaws - please let us know if (a) we have missed a paper you think should be included, (b) a paper has been misclassified, or (c) a citation for a paper is not correct or if the journal information is now available. In order to be as useful as possible, this document will continue to evolve so please check back before you write your next paper. If you find this review helpful, please consider citing it using ```\cite{hepmllivingreview}``` in `HEPML.bib`. -This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder. +This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder. ## Reviews ### Modern reviews @@ -135,6 +135,9 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [QCD-Aware Recursive Neural Networks for Jet Physics](https://arxiv.org/abs/1702.00748) [[DOI](https://doi.org/10.1007/JHEP01(2019)057)] * [Recursive Neural Networks in Quark/Gluon Tagging](https://arxiv.org/abs/1711.02633) [[DOI](https://doi.org/10.1007/s41781-018-0007-y)] * [Introduction and analysis of a method for the investigation of QCD-like tree data](https://arxiv.org/abs/2112.01809) +* [Applying Machine Learning Techniques to Searches for Lepton-Partner Pair-Production with Intermediate Mass Gaps at the Large Hadron Collider](https://arxiv.org/abs/2309.10197) +* [Boosting dark matter searches at muon colliders with Machine Learning: the mono-Higgs channel as a case study](https://arxiv.org/abs/2309.11241) +* [Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection](https://arxiv.org/abs/2309.13111) #### Graphs @@ -198,6 +201,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Jet energy calibration with deep learning as a Kubeflow pipeline](https://arxiv.org/abs/2308.12724) [[DOI](https://doi.org/10.1007/s41781-023-00103-y)] * [LLPNet: Graph Autoencoder for Triggering Light Long-Lived Particles at HL-LHC](https://arxiv.org/abs/2308.13611) * [Graph Structure from Point Clouds: Geometric Attention is All You Need](https://arxiv.org/abs/2307.16662) +* [Hypergraphs in LHC Phenomenology -- The Next Frontier of IRC-Safe Feature Extraction](https://arxiv.org/abs/2309.17351) #### Sets (point clouds) @@ -219,6 +223,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) * [Attention to Mean-Fields for Particle Cloud Generation](https://arxiv.org/abs/2305.15254) * [A data-driven and model-agnostic approach to solving combinatorial assignment problems in searches for new physics](https://arxiv.org/abs/2309.05728) +* [EPiC-ly Fast Particle Cloud Generation with Flow-Matching and Diffusion](https://arxiv.org/abs/2310.00049) +* [The Optimal use of Segmentation for Sampling Calorimeters](https://arxiv.org/abs/2310.04442) #### Physics-inspired basis @@ -248,6 +254,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) * [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) +* [Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes](https://arxiv.org/abs/2310.13009) * [Explainable Equivariant Neural Networks for Particle Physics: PELICAN](https://arxiv.org/abs/2307.16506) #### $H\rightarrow b\bar{b}$ @@ -312,6 +319,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Hierarchical High-Point Energy Flow Network for Jet Tagging](https://arxiv.org/abs/2308.08300) * [Investigating the Violation of Charge-parity Symmetry Through Top-quark ChromoElectric Dipole Moments by Using Machine Learning Techniques](https://arxiv.org/abs/2306.11683) [[DOI](https://doi.org/10.5506/APhysPolB.54.5-A4)] * [ML-Based Top Taggers: Performance, Uncertainty and Impact of Tower \& Tracker Data Integration](https://arxiv.org/abs/2309.01568) +* [Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes](https://arxiv.org/abs/2310.13009) +* [19 Parameters Is All You Need: Tiny Neural Networks for Particle Physics](https://arxiv.org/abs/2310.16121) #### strange jets @@ -339,6 +348,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Predicting Exotic Hadron Masses with Data Augmentation Using Multilayer Perceptron](https://arxiv.org/abs/2208.09538) * [Revealing the nature of hidden charm pentaquarks with machine learning](https://arxiv.org/abs/2301.05364) [[DOI](https://doi.org/10.1016/j.scib.2023.04.018)] * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) +* [Differentiable Vertex Fitting for Jet Flavour Tagging](https://arxiv.org/abs/2310.12804) #### BSM particles and models @@ -403,6 +413,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Sharpening the $A\to Z^{(*)}h $ Signature of the Type-II 2HDM at the LHC through Advanced Machine Learning](https://arxiv.org/abs/2305.13781) * [Improving sensitivity of trilinear RPV SUSY searches using machine learning at the LHC](https://arxiv.org/abs/2308.02697) * [LLPNet: Graph Autoencoder for Triggering Light Long-Lived Particles at HL-LHC](https://arxiv.org/abs/2308.13611) +* [Machine Learning Classification of Sphalerons and Black Holes at the LHC](https://arxiv.org/abs/2310.15227) +* [Probing Light Fermiophobic Higgs Boson via diphoton jets at the HL-LHC](https://arxiv.org/abs/2310.17741) #### Particle identification @@ -426,6 +438,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Identification of light leptons and pions in the electromagnetic calorimeter of Belle II](https://arxiv.org/abs/2301.05074) * [Particle identification with the Belle II calorimeter using machine learning](https://arxiv.org/abs/2301.11654) [[DOI](https://doi.org/10.1088/1742-6596/2438/1/012111)] * [Improved calorimetric particle identification in NA62 using machine learning techniques](https://arxiv.org/abs/2304.10580) +* [Particle identification with machine learning in ALICE Run 3](https://arxiv.org/abs/2309.07768) +* [Study of residual artificial neural network for particle identification in the CEPC high-granularity calorimeter prototype](https://arxiv.org/abs/2310.09489) #### Neutrino Detectors @@ -484,6 +498,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Domain-informed neural networks for interaction localization within astroparticle experiments](https://arxiv.org/abs/2112.07995) * [Improving the machine learning based vertex reconstruction for large liquid scintillator detectors with multiple types of PMTs](https://arxiv.org/abs/2205.04039) * [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744) +* [Detector signal characterization with a Bayesian network in XENONnT](https://arxiv.org/abs/2304.05428) [[DOI](https://doi.org/10.1103/PhysRevD.108.012016)] #### Cosmology, Astro Particle, and Cosmic Ray physics @@ -525,6 +540,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Core States of Neutron Stars from Anatomizing Their Scaled Structure Equations](https://arxiv.org/abs/2306.08202) [[DOI](https://doi.org/10.3847/1538-4357/acdef0)] * [A Deep Learning Approach to Extracting Nuclear Matter Properties from Neutron Star Observations](https://arxiv.org/abs/2303.17146) [[DOI](https://doi.org/10.3390/sym15051123)] * [Sequential Monte Carlo with Cross-validated Neural Networks for Complexity of Hyperbolic Black Hole Solutions in 4D](https://arxiv.org/abs/2308.07907) +* [Insights into neutron star equation of state by machine learning](https://arxiv.org/abs/2309.11227) #### Tracking @@ -557,6 +573,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Reconstruction of fast neutron direction in segmented organic detectors using deep learning](https://arxiv.org/abs/2301.10796) [[DOI](https://doi.org/10.1016/j.nima.2023.168024)] * [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) * [Comparing and improving hybrid deep learning algorithms for identifying and locating primary vertices](https://arxiv.org/abs/2304.02423) +* [HyperTrack: Neural Combinatorics for High Energy Physics](https://arxiv.org/abs/2309.14113) #### Heavy Ions / Nuclear Physics @@ -634,6 +651,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Generative modeling of nucleon-nucleon interactions](https://arxiv.org/abs/2306.13007) * [Artificial Intelligence for the Electron Ion Collider (AI4EIC)](https://arxiv.org/abs/2307.08593) * [Neural Network Solutions of Bosonic Quantum Systems in One Dimension](https://arxiv.org/abs/2309.02352) +* [Neural Network Emulation of Spontaneous Fission](https://arxiv.org/abs/2310.01608) ### Learning strategies @@ -643,6 +661,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Application of Deep Learning Technique to an Analysis of Hard Scattering Processes at Colliders](https://arxiv.org/abs/2109.08520) * [Support vector machines and generalisation in HEP](https://arxiv.org/abs/1702.04686) [[DOI](https://doi.org/10.1088/1742-6596/898/7/072021)] * [Principles for Initialization and Architecture Selection in Graph Neural Networks with ReLU Activations](https://arxiv.org/abs/2306.11668) +* [Event Generator Tuning Incorporating Systematic Uncertainty](https://arxiv.org/abs/2310.07566) #### Weak/Semi supervision @@ -692,6 +711,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [A machine learning pipeline for autonomous numerical analytic continuation of Dyson-Schwinger equations](https://arxiv.org/abs/2112.13011) * [Simplifying Polylogarithms with Machine Learning](https://arxiv.org/abs/2206.04115) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) +* [Lattice real-time simulations with learned optimal kernels](https://arxiv.org/abs/2310.08053) #### Quantum Machine Learning @@ -752,6 +772,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Background Modeling for Double Higgs Boson Production: Density Ratios and Optimal Transport](https://arxiv.org/abs/2208.02807) * [Optimal transport for a global event description at high-intensity hadron colliders](https://arxiv.org/abs/2211.02029) * [Measurements of multijet event isotropies using optimal transport with the ATLAS detector](https://arxiv.org/abs/2305.16930) +* [Chained Quantile Morphing with Normalizing Flows](https://arxiv.org/abs/2309.15912) ### Fast inference / deployment @@ -812,6 +833,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Tetris-inspired detector with neural network for radiation mapping](https://arxiv.org/abs/2302.07099) * [Comparing machine learning models for tau triggers](https://arxiv.org/abs/2306.06743) * [Development of the Topological Trigger for LHCb Run 3](https://arxiv.org/abs/2306.09873) +* [Smart pixel sensors: towards on-sensor filtering of pixel clusters with deep learning](https://arxiv.org/abs/2310.02474) #### Deployment @@ -878,10 +900,12 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Correction of the baseline fluctuations in the GEM-based ALICE TPC](https://arxiv.org/abs/2304.03881) * [A first application of machine and deep learning for background rejection in the ALPS II TES detector](https://arxiv.org/abs/2304.08406) [[DOI](https://doi.org/10.1002/andp.202200545)] * [Jet energy calibration with deep learning as a Kubeflow pipeline](https://arxiv.org/abs/2308.12724) [[DOI](https://doi.org/10.1007/s41781-023-00103-y)] +* [Refining fast simulation using machine learning](https://arxiv.org/abs/2309.12919) +* [The Optimal use of Segmentation for Sampling Calorimeters](https://arxiv.org/abs/2310.04442) ### Recasting -* [The BSM-AI project: SUSY-AI--generalizing LHC limits on supersymmetry with machine learning](https://doi.org/{10.1140/epjc/s10052-017-4814-9) +* [The BSM-AI project: SUSY-AI--generalizing LHC limits on supersymmetry with machine learning](https://arxiv.org/abs/1605.02797) [[DOI](https://doi.org/10.1140/epjc/s10052-017-4814-9)] * [Accelerating the BSM interpretation of LHC data with machine learning](https://arxiv.org/abs/1611.02704) [[DOI](https://doi.org/10.1016/j.dark.2019.100293)] * [Bayesian Neural Networks for Fast SUSY Predictions](https://arxiv.org/abs/2007.04506) [[DOI](https://doi.org/10.1016/j.physletb.2020.136041)] * [Exploration of Parameter Spaces Assisted by Machine Learning](https://arxiv.org/abs/2207.09959) @@ -904,6 +928,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Loop Amplitudes from Precision Networks](https://arxiv.org/abs/2206.14831) [[DOI](https://doi.org/10.21468/SciPostPhysCore.6.2.034)] * [Unweighting multijet event generation using factorisation-aware neural networks](https://arxiv.org/abs/2301.13562) * [One-loop matrix element emulation with factorisation awareness](https://arxiv.org/abs/2302.04005) +* [Pole-fitting for complex functions: Enhancing standard techniques by artificial-neural-network classifiers and regressors](https://arxiv.org/abs/2309.08358) [[DOI](https://doi.org/10.1016/j.cpc.2023.108998)] ### Parameter estimation @@ -921,6 +946,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [$\Sigma$ Resonances from a Neural Network-based Partial Wave Analysis on $K^-p$ Scattering](https://arxiv.org/abs/2305.01852) * [Determination of high-energy hadronic interaction properties from observables of proton initiated extensive air showers](https://arxiv.org/abs/2304.08007) * [Improving the temporal resolution of event-based electron detectors using neural network cluster analysis](https://arxiv.org/abs/2307.16666) +* [First attempt of directionality reconstruction for atmospheric neutrinos in a large homogeneous liquid scintillator detector](https://arxiv.org/abs/2310.06281) ### Parton Distribution Functions (and related) @@ -989,10 +1015,14 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Signal-to-noise improvement through neural network contour deformations for 3D $SU(2)$ lattice gauge theory](https://arxiv.org/abs/2309.00600) * [Application of the path optimization method to a discrete spin system](https://arxiv.org/abs/2309.06018) * [Breaking Free with AI: The Deconfinement Transition](https://arxiv.org/abs/2309.07225) +* [Learning Trivializing Flows in a $\phi^4$ theory from coarser lattices](https://arxiv.org/abs/2310.03381) +* [Lattice real-time simulations with learned optimal kernels](https://arxiv.org/abs/2310.08053) +* [Equivariant Transformer is all you need](https://arxiv.org/abs/2310.13222) ### Function Approximation * [Elvet -- a neural network-based differential equation and variational problem solver](https://arxiv.org/abs/2103.14575) +* [The DNNLikelihood: enhancing likelihood distribution with Deep Learning](https://arxiv.org/abs/1911.03305) [[DOI](https://doi.org/10.1140/epjc/s10052-020-8230-1)] * [Invariant polynomials and machine learning](https://arxiv.org/abs/2104.12733) * [Function Approximation for High-Energy Physics: Comparing Machine Learning and Interpolation Methods](https://arxiv.org/abs/2111.14788) * [Reconstructing spectral functions via automatic differentiation](https://arxiv.org/abs/2111.14760) @@ -1000,6 +1030,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) * [Determination of the distribution of strong coupling constant with machine learning](https://arxiv.org/abs/2303.07968) * [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) +* [The NFLikelihood: an unsupervised DNNLikelihood from Normalizing Flows](https://arxiv.org/abs/2309.09743) ### Symbolic Regression @@ -1016,6 +1047,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Predicting the Future of the CMS Detector: Crystal Radiation Damage and Machine Learning at the LHC](https://arxiv.org/abs/2303.15291) * [Magnetic field regression using artificial neural networks for cold atom experiments](https://arxiv.org/abs/2305.18822) * [Autoencoder-based Online Data Quality Monitoring for the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2308.16659) +* [How to Understand Limitations of Generative Networks](https://arxiv.org/abs/2305.16774) +* [Autoencoder-based Anomaly Detection System for Online Data Quality Monitoring of the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2309.10157) ## Equivariant networks. @@ -1222,6 +1255,13 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Inductive CaloFlow](https://arxiv.org/abs/2305.11934) * [SuperCalo: Calorimeter shower super-resolution](https://arxiv.org/abs/2308.11700) * [Flows for Flows: Morphing one Dataset into another with Maximum Likelihood Estimation](https://arxiv.org/abs/2309.06472) +* [The NFLikelihood: an unsupervised DNNLikelihood from Normalizing Flows](https://arxiv.org/abs/2309.09743) +* [Combining Resonant and Tail-based Anomaly Detection](https://arxiv.org/abs/2309.12918) +* [Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection](https://arxiv.org/abs/2309.13111) +* [Chained Quantile Morphing with Normalizing Flows](https://arxiv.org/abs/2309.15912) +* [Learning Trivializing Flows in a $\phi^4$ theory from coarser lattices](https://arxiv.org/abs/2310.03381) +* [Simulation of Hadronic Interactions with Deep Generative Models](https://arxiv.org/abs/2310.07553) +* [Systematic Evaluation of Generative Machine Learning Capability to Simulate Distributions of Observables at the Large Hadron Collider](https://arxiv.org/abs/2310.08994) ### Diffusion Models @@ -1241,12 +1281,16 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [CaloScore v2: Single-shot Calorimeter Shower Simulation with Diffusion Models](https://arxiv.org/abs/2308.03847) * [Accelerating Markov Chain Monte Carlo sampling with diffusion models](https://arxiv.org/abs/2309.01454) * [CaloClouds II: Ultra-Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2309.05704) +* [EPiC-ly Fast Particle Cloud Generation with Flow-Matching and Diffusion](https://arxiv.org/abs/2310.00049) +* [Full Phase Space Resonant Anomaly Detection](https://arxiv.org/abs/2310.06897) +* [Diffusion model approach to simulating electron-proton scattering events](https://arxiv.org/abs/2310.16308) ### Transformer Models * [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) +* [Equivariant Transformer is all you need](https://arxiv.org/abs/2310.13222) ### Physics-inspired @@ -1255,6 +1299,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Exploring the Possibility of a Recovery of Physics Process Properties from a Neural Network Model](https://arxiv.org/abs/2007.13110) [[DOI](https://doi.org/10.3390/e22090994)] * [Explainable machine learning of the underlying physics of high-energy particle collisions](https://arxiv.org/abs/2012.06582) * [Symmetry meets AI](https://arxiv.org/abs/2103.06115) +* [Binary Discrimination Through Next-to-Leading Order](https://arxiv.org/abs/2309.14417) ### Mixture Models @@ -1399,6 +1444,11 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [GAN-AE : An anomaly detection algorithm for New Physics search in LHC data](https://arxiv.org/abs/2305.15179) * [Anomaly detection search for new resonances decaying into a Higgs boson and a generic new particle $X$ in hadronic final states using $\sqrt{s}](https://arxiv.org/abs/2306.03637) * [Boosting sensitivity to new physics with unsupervised anomaly detection in dijet resonance search](https://arxiv.org/abs/2308.02671) +* [Autoencoder-based Anomaly Detection System for Online Data Quality Monitoring of the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2309.10157) +* [Combining Resonant and Tail-based Anomaly Detection](https://arxiv.org/abs/2309.12918) +* [Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection](https://arxiv.org/abs/2309.13111) +* [Full Phase Space Resonant Anomaly Detection](https://arxiv.org/abs/2310.06897) +* [Anomaly Detection in Presence of Irrelevant Features](https://arxiv.org/abs/2310.13057) ## Simulation-based (`likelihood-free') Inference ### Parameter estimation @@ -1433,9 +1483,11 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Reconstructing axion-like particles from beam dumps with simulation-based inference](https://arxiv.org/abs/2308.01353) * [Simulation-based inference in the search for CP violation in leptonic WH production](https://arxiv.org/abs/2308.02882) * [Scaling MadMiner with a deployment on REANA](https://arxiv.org/abs/2304.05814) +* [Precision-Machine Learning for the Matrix Element Method](https://arxiv.org/abs/2310.07752) ### Unfolding +* [DeepEfficiency - optimal efficiency inversion in higher dimensions at the LHC](https://arxiv.org/abs/1809.06101) * [OmniFold: A Method to Simultaneously Unfold All Observables](https://arxiv.org/abs/1911.09107) [[DOI](https://doi.org/10.1103/PhysRevLett.124.182001)] * [Unfolding with Generative Adversarial Networks](https://arxiv.org/abs/1806.00433) * [How to GAN away Detector Effects](https://arxiv.org/abs/1912.00477) [[DOI](https://doi.org/10.21468/SciPostPhys.8.4.070)] @@ -1492,11 +1544,14 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A ### Differentiable Simulation * [Differentiable Matrix Elements with MadJax](https://arxiv.org/abs/2203.00057) +* [Toward the end-to-end optimization of particle physics instruments with differentiable programming](https://arxiv.org/abs/2203.13818) [[DOI](https://doi.org/10.1016/j.revip.2023.100085)] * [Morphing parton showers with event derivatives](https://arxiv.org/abs/2208.02274) * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) * [Novel Machine Learning and Differentiable Programming Techniques applied to the VIP-2 Underground Experiment](https://arxiv.org/abs/2305.17153) * [Differentiable Earth Mover's Distance for Data Compression at the High-Luminosity LHC](https://arxiv.org/abs/2306.04712) * [Branches of a Tree: Taking Derivatives of Programs with Discrete and Branching Randomness in High Energy Physics](https://arxiv.org/abs/2308.16680) +* [Progress in End-to-End Optimization of Detectors for Fundamental Physics with Differentiable Programming](https://arxiv.org/abs/2310.05673) +* [Differentiable Vertex Fitting for Jet Flavour Tagging](https://arxiv.org/abs/2310.12804) ## Uncertainty Quantification ### Interpretability @@ -1554,6 +1609,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [A Correspondence Between Deep Boltzmann Machines and p-Adic Statistical Field Theories](https://arxiv.org/abs/2306.03751) * [Black holes and the loss landscape in machine learning](https://arxiv.org/abs/2306.14817) * [Neural Network Field Theories: Non-Gaussianity, Actions, and Locality](https://arxiv.org/abs/2307.03223) +* [Metric Flows with Neural Networks](https://arxiv.org/abs/2310.19870) ### ML for theory @@ -1572,6 +1628,11 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Scattering with Neural Operators](https://arxiv.org/abs/2308.14789) * [Distilling the essential elements of nuclear binding via neural-network quantum states](https://arxiv.org/abs/2308.16266) * [Unsupervised Machine Learning Techniques for Exploring Tropical Coamoeba, Brane Tilings and Seiberg Duality](https://arxiv.org/abs/2309.05702) +* [BFBrain: Scalar Bounded-From-Below Conditions from Bayesian Active Learning](https://arxiv.org/abs/2309.10959) +* [Constructing and Machine Learning Calabi-Yau Five-folds](https://arxiv.org/abs/2310.15966) +* [Machine Learning Regularization for the Minimum Volume Formula of Toric Calabi-Yau 3-folds](https://arxiv.org/abs/2310.19276) +* [Metric Flows with Neural Networks](https://arxiv.org/abs/2310.19870) +* [Seeking Truth and Beauty in Flavor Physics with Machine Learning](https://arxiv.org/abs/2311.00087) ## Experimental results. *This section is incomplete as there are many results that directly and indirectly (e.g. via flavor tagging) use modern machine learning techniques. We will try to highlight experimental results that use deep learning in a critical way for the final analysis sensitivity.* @@ -1587,6 +1648,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A ### Searches and measurements were ML reconstruction is a core component * [The Full Event Interpretation}: {An Exclusive Tagging Algorithm for the Belle II Experiment](https://arxiv.org/abs/1807.08680) [[DOI](https://doi.org/10.1007/s41781-019-0021-8)] +* [A deep neural network to search for new long-lived particles decaying to jets](https://arxiv.org/abs/1912.12238) [[DOI](https://doi.org/10.1088/2632-2153/ab9023)] * [Search for an anomalous excess of inclusive charged-current $\nu_e$ interactions in the MicroBooNE experiment using Wire-Cell reconstruction](https://arxiv.org/abs/2110.13978) * [Search for an anomalous excess of charged-current quasi-elastic $\nu_e$ interactions with the MicroBooNE experiment using Deep-Learning-based reconstruction](https://arxiv.org/abs/2110.14080) * [Search for supersymmetry in final states with missing transverse momentum and three or more b-jets in 139 fb$^{-1}$ of proton\textendash{}proton collisions at $\sqrt{s}](https://arxiv.org/abs/2211.08028) [[DOI](https://doi.org/10.1140/epjc/s10052-023-11543-6)] @@ -1605,6 +1667,11 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Evidence of off-shell Higgs boson production from $ZZ$ leptonic decay channels and constraints on its total width with the ATLAS detector](https://arxiv.org/abs/2304.01532) * [Measurement of \ensuremath{\nu}\ensuremath{\mu} charged-current inclusive \ensuremath{\pi}0 production in the NOvA near detector](https://arxiv.org/abs/2306.04028) [[DOI](https://doi.org/10.1103/PhysRevD.107.112008)] * [Searches for supersymmetric particles with prompt decays with the ATLAS detector](https://arxiv.org/abs/2306.15014) +* [Applying Machine Learning Techniques to Searches for Lepton-Partner Pair-Production with Intermediate Mass Gaps at the Large Hadron Collider](https://arxiv.org/abs/2309.10197) +* [Boosting dark matter searches at muon colliders with Machine Learning: the mono-Higgs channel as a case study](https://arxiv.org/abs/2309.11241) +* [Suppression of Neutron Background using Deep Neural Network and Fourier Frequency Analysis at the KOTO Experiment](https://arxiv.org/abs/2309.12063) +* [Advances in developing deep neural networks for finding primary vertices in proton-proton collisions at the LHC](https://arxiv.org/abs/2309.12417) +* [Novel techniques for alpha/beta pulse shape discrimination in Borexino](https://arxiv.org/abs/2310.11826) ### Final analysis discriminate for searches diff --git a/docs/about.md b/docs/about.md index e9ca6e8..46a14ce 100644 --- a/docs/about.md +++ b/docs/about.md @@ -5,5 +5,5 @@ hide: The purpose of this note is to collect references for modern machine learning as applied to particle physics. A minimal number of categories is chosen in order to be as useful as possible. Note that papers may be referenced in more than one category. The fact that a paper is listed in this document does not endorse or validate its content - that is for the community (and for peer-review) to decide. Furthermore, the classification here is a best attempt and may have flaws - please let us know if (a) we have missed a paper you think should be included, (b) a paper has been misclassified, or (c) a citation for a paper is not correct or if the journal information is now available. In order to be as useful as possible, this document will continue to evolve so please check back before you write your next paper. If you find this review helpful, please consider citing it using ```\cite{hepmllivingreview}``` in `HEPML.bib`. -This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder. +This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder. diff --git a/docs/index.md b/docs/index.md index c78f1bf..3a1d293 100644 --- a/docs/index.md +++ b/docs/index.md @@ -186,6 +186,9 @@ const expandElements = shouldExpand => { * [QCD-Aware Recursive Neural Networks for Jet Physics](https://arxiv.org/abs/1702.00748) [[DOI](https://doi.org/10.1007/JHEP01(2019)057)] * [Recursive Neural Networks in Quark/Gluon Tagging](https://arxiv.org/abs/1711.02633) [[DOI](https://doi.org/10.1007/s41781-018-0007-y)] * [Introduction and analysis of a method for the investigation of QCD-like tree data](https://arxiv.org/abs/2112.01809) + * [Applying Machine Learning Techniques to Searches for Lepton-Partner Pair-Production with Intermediate Mass Gaps at the Large Hadron Collider](https://arxiv.org/abs/2309.10197) + * [Boosting dark matter searches at muon colliders with Machine Learning: the mono-Higgs channel as a case study](https://arxiv.org/abs/2309.11241) + * [Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection](https://arxiv.org/abs/2309.13111) #### Graphs @@ -249,6 +252,7 @@ const expandElements = shouldExpand => { * [Jet energy calibration with deep learning as a Kubeflow pipeline](https://arxiv.org/abs/2308.12724) [[DOI](https://doi.org/10.1007/s41781-023-00103-y)] * [LLPNet: Graph Autoencoder for Triggering Light Long-Lived Particles at HL-LHC](https://arxiv.org/abs/2308.13611) * [Graph Structure from Point Clouds: Geometric Attention is All You Need](https://arxiv.org/abs/2307.16662) + * [Hypergraphs in LHC Phenomenology -- The Next Frontier of IRC-Safe Feature Extraction](https://arxiv.org/abs/2309.17351) #### Sets (point clouds) @@ -270,6 +274,8 @@ const expandElements = shouldExpand => { * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) * [Attention to Mean-Fields for Particle Cloud Generation](https://arxiv.org/abs/2305.15254) * [A data-driven and model-agnostic approach to solving combinatorial assignment problems in searches for new physics](https://arxiv.org/abs/2309.05728) + * [EPiC-ly Fast Particle Cloud Generation with Flow-Matching and Diffusion](https://arxiv.org/abs/2310.00049) + * [The Optimal use of Segmentation for Sampling Calorimeters](https://arxiv.org/abs/2310.04442) #### Physics-inspired basis @@ -304,6 +310,7 @@ const expandElements = shouldExpand => { * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) * [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) + * [Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes](https://arxiv.org/abs/2310.13009) * [Explainable Equivariant Neural Networks for Particle Physics: PELICAN](https://arxiv.org/abs/2307.16506) #### $H\rightarrow b\bar{b}$ @@ -368,6 +375,8 @@ const expandElements = shouldExpand => { * [Hierarchical High-Point Energy Flow Network for Jet Tagging](https://arxiv.org/abs/2308.08300) * [Investigating the Violation of Charge-parity Symmetry Through Top-quark ChromoElectric Dipole Moments by Using Machine Learning Techniques](https://arxiv.org/abs/2306.11683) [[DOI](https://doi.org/10.5506/APhysPolB.54.5-A4)] * [ML-Based Top Taggers: Performance, Uncertainty and Impact of Tower \& Tracker Data Integration](https://arxiv.org/abs/2309.01568) + * [Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes](https://arxiv.org/abs/2310.13009) + * [19 Parameters Is All You Need: Tiny Neural Networks for Particle Physics](https://arxiv.org/abs/2310.16121) #### strange jets @@ -395,6 +404,7 @@ const expandElements = shouldExpand => { * [Predicting Exotic Hadron Masses with Data Augmentation Using Multilayer Perceptron](https://arxiv.org/abs/2208.09538) * [Revealing the nature of hidden charm pentaquarks with machine learning](https://arxiv.org/abs/2301.05364) [[DOI](https://doi.org/10.1016/j.scib.2023.04.018)] * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) + * [Differentiable Vertex Fitting for Jet Flavour Tagging](https://arxiv.org/abs/2310.12804) #### BSM particles and models @@ -459,6 +469,8 @@ const expandElements = shouldExpand => { * [Sharpening the $A\to Z^{(*)}h $ Signature of the Type-II 2HDM at the LHC through Advanced Machine Learning](https://arxiv.org/abs/2305.13781) * [Improving sensitivity of trilinear RPV SUSY searches using machine learning at the LHC](https://arxiv.org/abs/2308.02697) * [LLPNet: Graph Autoencoder for Triggering Light Long-Lived Particles at HL-LHC](https://arxiv.org/abs/2308.13611) + * [Machine Learning Classification of Sphalerons and Black Holes at the LHC](https://arxiv.org/abs/2310.15227) + * [Probing Light Fermiophobic Higgs Boson via diphoton jets at the HL-LHC](https://arxiv.org/abs/2310.17741) #### Particle identification @@ -482,6 +494,8 @@ const expandElements = shouldExpand => { * [Identification of light leptons and pions in the electromagnetic calorimeter of Belle II](https://arxiv.org/abs/2301.05074) * [Particle identification with the Belle II calorimeter using machine learning](https://arxiv.org/abs/2301.11654) [[DOI](https://doi.org/10.1088/1742-6596/2438/1/012111)] * [Improved calorimetric particle identification in NA62 using machine learning techniques](https://arxiv.org/abs/2304.10580) + * [Particle identification with machine learning in ALICE Run 3](https://arxiv.org/abs/2309.07768) + * [Study of residual artificial neural network for particle identification in the CEPC high-granularity calorimeter prototype](https://arxiv.org/abs/2310.09489) #### Neutrino Detectors @@ -540,6 +554,7 @@ const expandElements = shouldExpand => { * [Domain-informed neural networks for interaction localization within astroparticle experiments](https://arxiv.org/abs/2112.07995) * [Improving the machine learning based vertex reconstruction for large liquid scintillator detectors with multiple types of PMTs](https://arxiv.org/abs/2205.04039) * [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744) + * [Detector signal characterization with a Bayesian network in XENONnT](https://arxiv.org/abs/2304.05428) [[DOI](https://doi.org/10.1103/PhysRevD.108.012016)] #### Cosmology, Astro Particle, and Cosmic Ray physics @@ -581,6 +596,7 @@ const expandElements = shouldExpand => { * [Core States of Neutron Stars from Anatomizing Their Scaled Structure Equations](https://arxiv.org/abs/2306.08202) [[DOI](https://doi.org/10.3847/1538-4357/acdef0)] * [A Deep Learning Approach to Extracting Nuclear Matter Properties from Neutron Star Observations](https://arxiv.org/abs/2303.17146) [[DOI](https://doi.org/10.3390/sym15051123)] * [Sequential Monte Carlo with Cross-validated Neural Networks for Complexity of Hyperbolic Black Hole Solutions in 4D](https://arxiv.org/abs/2308.07907) + * [Insights into neutron star equation of state by machine learning](https://arxiv.org/abs/2309.11227) #### Tracking @@ -613,6 +629,7 @@ const expandElements = shouldExpand => { * [Reconstruction of fast neutron direction in segmented organic detectors using deep learning](https://arxiv.org/abs/2301.10796) [[DOI](https://doi.org/10.1016/j.nima.2023.168024)] * [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) * [Comparing and improving hybrid deep learning algorithms for identifying and locating primary vertices](https://arxiv.org/abs/2304.02423) + * [HyperTrack: Neural Combinatorics for High Energy Physics](https://arxiv.org/abs/2309.14113) #### Heavy Ions / Nuclear Physics @@ -690,6 +707,7 @@ const expandElements = shouldExpand => { * [Generative modeling of nucleon-nucleon interactions](https://arxiv.org/abs/2306.13007) * [Artificial Intelligence for the Electron Ion Collider (AI4EIC)](https://arxiv.org/abs/2307.08593) * [Neural Network Solutions of Bosonic Quantum Systems in One Dimension](https://arxiv.org/abs/2309.02352) + * [Neural Network Emulation of Spontaneous Fission](https://arxiv.org/abs/2310.01608) ??? example "Learning strategies" @@ -704,6 +722,7 @@ const expandElements = shouldExpand => { * [Application of Deep Learning Technique to an Analysis of Hard Scattering Processes at Colliders](https://arxiv.org/abs/2109.08520) * [Support vector machines and generalisation in HEP](https://arxiv.org/abs/1702.04686) [[DOI](https://doi.org/10.1088/1742-6596/898/7/072021)] * [Principles for Initialization and Architecture Selection in Graph Neural Networks with ReLU Activations](https://arxiv.org/abs/2306.11668) + * [Event Generator Tuning Incorporating Systematic Uncertainty](https://arxiv.org/abs/2310.07566) #### Weak/Semi supervision @@ -753,6 +772,7 @@ const expandElements = shouldExpand => { * [A machine learning pipeline for autonomous numerical analytic continuation of Dyson-Schwinger equations](https://arxiv.org/abs/2112.13011) * [Simplifying Polylogarithms with Machine Learning](https://arxiv.org/abs/2206.04115) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) + * [Lattice real-time simulations with learned optimal kernels](https://arxiv.org/abs/2310.08053) #### Quantum Machine Learning @@ -813,6 +833,7 @@ const expandElements = shouldExpand => { * [Background Modeling for Double Higgs Boson Production: Density Ratios and Optimal Transport](https://arxiv.org/abs/2208.02807) * [Optimal transport for a global event description at high-intensity hadron colliders](https://arxiv.org/abs/2211.02029) * [Measurements of multijet event isotropies using optimal transport with the ATLAS detector](https://arxiv.org/abs/2305.16930) + * [Chained Quantile Morphing with Normalizing Flows](https://arxiv.org/abs/2309.15912) ??? example "Fast inference / deployment" @@ -878,6 +899,7 @@ const expandElements = shouldExpand => { * [Tetris-inspired detector with neural network for radiation mapping](https://arxiv.org/abs/2302.07099) * [Comparing machine learning models for tau triggers](https://arxiv.org/abs/2306.06743) * [Development of the Topological Trigger for LHCb Run 3](https://arxiv.org/abs/2306.09873) + * [Smart pixel sensors: towards on-sensor filtering of pixel clusters with deep learning](https://arxiv.org/abs/2310.02474) #### Deployment @@ -954,6 +976,8 @@ const expandElements = shouldExpand => { * [Correction of the baseline fluctuations in the GEM-based ALICE TPC](https://arxiv.org/abs/2304.03881) * [A first application of machine and deep learning for background rejection in the ALPS II TES detector](https://arxiv.org/abs/2304.08406) [[DOI](https://doi.org/10.1002/andp.202200545)] * [Jet energy calibration with deep learning as a Kubeflow pipeline](https://arxiv.org/abs/2308.12724) [[DOI](https://doi.org/10.1007/s41781-023-00103-y)] + * [Refining fast simulation using machine learning](https://arxiv.org/abs/2309.12919) + * [The Optimal use of Segmentation for Sampling Calorimeters](https://arxiv.org/abs/2310.04442) ??? example "Recasting" @@ -962,7 +986,7 @@ const expandElements = shouldExpand => { ### Recasting - * [The BSM-AI project: SUSY-AI--generalizing LHC limits on supersymmetry with machine learning](https://doi.org/{10.1140/epjc/s10052-017-4814-9) + * [The BSM-AI project: SUSY-AI--generalizing LHC limits on supersymmetry with machine learning](https://arxiv.org/abs/1605.02797) [[DOI](https://doi.org/10.1140/epjc/s10052-017-4814-9)] * [Accelerating the BSM interpretation of LHC data with machine learning](https://arxiv.org/abs/1611.02704) [[DOI](https://doi.org/10.1016/j.dark.2019.100293)] * [Bayesian Neural Networks for Fast SUSY Predictions](https://arxiv.org/abs/2007.04506) [[DOI](https://doi.org/10.1016/j.physletb.2020.136041)] * [Exploration of Parameter Spaces Assisted by Machine Learning](https://arxiv.org/abs/2207.09959) @@ -990,6 +1014,7 @@ const expandElements = shouldExpand => { * [Loop Amplitudes from Precision Networks](https://arxiv.org/abs/2206.14831) [[DOI](https://doi.org/10.21468/SciPostPhysCore.6.2.034)] * [Unweighting multijet event generation using factorisation-aware neural networks](https://arxiv.org/abs/2301.13562) * [One-loop matrix element emulation with factorisation awareness](https://arxiv.org/abs/2302.04005) + * [Pole-fitting for complex functions: Enhancing standard techniques by artificial-neural-network classifiers and regressors](https://arxiv.org/abs/2309.08358) [[DOI](https://doi.org/10.1016/j.cpc.2023.108998)] ??? example "Parameter estimation" @@ -1012,6 +1037,7 @@ const expandElements = shouldExpand => { * [$\Sigma$ Resonances from a Neural Network-based Partial Wave Analysis on $K^-p$ Scattering](https://arxiv.org/abs/2305.01852) * [Determination of high-energy hadronic interaction properties from observables of proton initiated extensive air showers](https://arxiv.org/abs/2304.08007) * [Improving the temporal resolution of event-based electron detectors using neural network cluster analysis](https://arxiv.org/abs/2307.16666) + * [First attempt of directionality reconstruction for atmospheric neutrinos in a large homogeneous liquid scintillator detector](https://arxiv.org/abs/2310.06281) ??? example "Parton Distribution Functions (and related)" @@ -1090,6 +1116,9 @@ const expandElements = shouldExpand => { * [Signal-to-noise improvement through neural network contour deformations for 3D $SU(2)$ lattice gauge theory](https://arxiv.org/abs/2309.00600) * [Application of the path optimization method to a discrete spin system](https://arxiv.org/abs/2309.06018) * [Breaking Free with AI: The Deconfinement Transition](https://arxiv.org/abs/2309.07225) + * [Learning Trivializing Flows in a $\phi^4$ theory from coarser lattices](https://arxiv.org/abs/2310.03381) + * [Lattice real-time simulations with learned optimal kernels](https://arxiv.org/abs/2310.08053) + * [Equivariant Transformer is all you need](https://arxiv.org/abs/2310.13222) ??? example "Function Approximation" @@ -1099,6 +1128,7 @@ const expandElements = shouldExpand => { * [Elvet -- a neural network-based differential equation and variational problem solver](https://arxiv.org/abs/2103.14575) + * [The DNNLikelihood: enhancing likelihood distribution with Deep Learning](https://arxiv.org/abs/1911.03305) [[DOI](https://doi.org/10.1140/epjc/s10052-020-8230-1)] * [Invariant polynomials and machine learning](https://arxiv.org/abs/2104.12733) * [Function Approximation for High-Energy Physics: Comparing Machine Learning and Interpolation Methods](https://arxiv.org/abs/2111.14788) * [Reconstructing spectral functions via automatic differentiation](https://arxiv.org/abs/2111.14760) @@ -1106,6 +1136,7 @@ const expandElements = shouldExpand => { * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) * [Determination of the distribution of strong coupling constant with machine learning](https://arxiv.org/abs/2303.07968) * [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) + * [The NFLikelihood: an unsupervised DNNLikelihood from Normalizing Flows](https://arxiv.org/abs/2309.09743) ??? example "Symbolic Regression" @@ -1132,6 +1163,8 @@ const expandElements = shouldExpand => { * [Predicting the Future of the CMS Detector: Crystal Radiation Damage and Machine Learning at the LHC](https://arxiv.org/abs/2303.15291) * [Magnetic field regression using artificial neural networks for cold atom experiments](https://arxiv.org/abs/2305.18822) * [Autoencoder-based Online Data Quality Monitoring for the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2308.16659) + * [How to Understand Limitations of Generative Networks](https://arxiv.org/abs/2305.16774) + * [Autoencoder-based Anomaly Detection System for Online Data Quality Monitoring of the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2309.10157) ## Equivariant networks. @@ -1359,6 +1392,13 @@ const expandElements = shouldExpand => { * [Inductive CaloFlow](https://arxiv.org/abs/2305.11934) * [SuperCalo: Calorimeter shower super-resolution](https://arxiv.org/abs/2308.11700) * [Flows for Flows: Morphing one Dataset into another with Maximum Likelihood Estimation](https://arxiv.org/abs/2309.06472) + * [The NFLikelihood: an unsupervised DNNLikelihood from Normalizing Flows](https://arxiv.org/abs/2309.09743) + * [Combining Resonant and Tail-based Anomaly Detection](https://arxiv.org/abs/2309.12918) + * [Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection](https://arxiv.org/abs/2309.13111) + * [Chained Quantile Morphing with Normalizing Flows](https://arxiv.org/abs/2309.15912) + * [Learning Trivializing Flows in a $\phi^4$ theory from coarser lattices](https://arxiv.org/abs/2310.03381) + * [Simulation of Hadronic Interactions with Deep Generative Models](https://arxiv.org/abs/2310.07553) + * [Systematic Evaluation of Generative Machine Learning Capability to Simulate Distributions of Observables at the Large Hadron Collider](https://arxiv.org/abs/2310.08994) ??? example "Diffusion Models" @@ -1383,6 +1423,9 @@ const expandElements = shouldExpand => { * [CaloScore v2: Single-shot Calorimeter Shower Simulation with Diffusion Models](https://arxiv.org/abs/2308.03847) * [Accelerating Markov Chain Monte Carlo sampling with diffusion models](https://arxiv.org/abs/2309.01454) * [CaloClouds II: Ultra-Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2309.05704) + * [EPiC-ly Fast Particle Cloud Generation with Flow-Matching and Diffusion](https://arxiv.org/abs/2310.00049) + * [Full Phase Space Resonant Anomaly Detection](https://arxiv.org/abs/2310.06897) + * [Diffusion model approach to simulating electron-proton scattering events](https://arxiv.org/abs/2310.16308) ??? example "Transformer Models" @@ -1394,6 +1437,7 @@ const expandElements = shouldExpand => { * [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) + * [Equivariant Transformer is all you need](https://arxiv.org/abs/2310.13222) ??? example "Physics-inspired" @@ -1407,6 +1451,7 @@ const expandElements = shouldExpand => { * [Exploring the Possibility of a Recovery of Physics Process Properties from a Neural Network Model](https://arxiv.org/abs/2007.13110) [[DOI](https://doi.org/10.3390/e22090994)] * [Explainable machine learning of the underlying physics of high-energy particle collisions](https://arxiv.org/abs/2012.06582) * [Symmetry meets AI](https://arxiv.org/abs/2103.06115) + * [Binary Discrimination Through Next-to-Leading Order](https://arxiv.org/abs/2309.14417) ??? example "Mixture Models" @@ -1574,6 +1619,11 @@ const expandElements = shouldExpand => { * [GAN-AE : An anomaly detection algorithm for New Physics search in LHC data](https://arxiv.org/abs/2305.15179) * [Anomaly detection search for new resonances decaying into a Higgs boson and a generic new particle $X$ in hadronic final states using $\sqrt{s}](https://arxiv.org/abs/2306.03637) * [Boosting sensitivity to new physics with unsupervised anomaly detection in dijet resonance search](https://arxiv.org/abs/2308.02671) + * [Autoencoder-based Anomaly Detection System for Online Data Quality Monitoring of the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2309.10157) + * [Combining Resonant and Tail-based Anomaly Detection](https://arxiv.org/abs/2309.12918) + * [Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection](https://arxiv.org/abs/2309.13111) + * [Full Phase Space Resonant Anomaly Detection](https://arxiv.org/abs/2310.06897) + * [Anomaly Detection in Presence of Irrelevant Features](https://arxiv.org/abs/2310.13057) ## Simulation-based (`likelihood-free') Inference @@ -1613,6 +1663,7 @@ const expandElements = shouldExpand => { * [Reconstructing axion-like particles from beam dumps with simulation-based inference](https://arxiv.org/abs/2308.01353) * [Simulation-based inference in the search for CP violation in leptonic WH production](https://arxiv.org/abs/2308.02882) * [Scaling MadMiner with a deployment on REANA](https://arxiv.org/abs/2304.05814) + * [Precision-Machine Learning for the Matrix Element Method](https://arxiv.org/abs/2310.07752) ??? example "Unfolding" @@ -1621,6 +1672,7 @@ const expandElements = shouldExpand => { ### Unfolding + * [DeepEfficiency - optimal efficiency inversion in higher dimensions at the LHC](https://arxiv.org/abs/1809.06101) * [OmniFold: A Method to Simultaneously Unfold All Observables](https://arxiv.org/abs/1911.09107) [[DOI](https://doi.org/10.1103/PhysRevLett.124.182001)] * [Unfolding with Generative Adversarial Networks](https://arxiv.org/abs/1806.00433) * [How to GAN away Detector Effects](https://arxiv.org/abs/1912.00477) [[DOI](https://doi.org/10.21468/SciPostPhys.8.4.070)] @@ -1692,11 +1744,14 @@ const expandElements = shouldExpand => { * [Differentiable Matrix Elements with MadJax](https://arxiv.org/abs/2203.00057) + * [Toward the end-to-end optimization of particle physics instruments with differentiable programming](https://arxiv.org/abs/2203.13818) [[DOI](https://doi.org/10.1016/j.revip.2023.100085)] * [Morphing parton showers with event derivatives](https://arxiv.org/abs/2208.02274) * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) * [Novel Machine Learning and Differentiable Programming Techniques applied to the VIP-2 Underground Experiment](https://arxiv.org/abs/2305.17153) * [Differentiable Earth Mover's Distance for Data Compression at the High-Luminosity LHC](https://arxiv.org/abs/2306.04712) * [Branches of a Tree: Taking Derivatives of Programs with Discrete and Branching Randomness in High Energy Physics](https://arxiv.org/abs/2308.16680) + * [Progress in End-to-End Optimization of Detectors for Fundamental Physics with Differentiable Programming](https://arxiv.org/abs/2310.05673) + * [Differentiable Vertex Fitting for Jet Flavour Tagging](https://arxiv.org/abs/2310.12804) ## Uncertainty Quantification @@ -1779,6 +1834,7 @@ const expandElements = shouldExpand => { * [A Correspondence Between Deep Boltzmann Machines and p-Adic Statistical Field Theories](https://arxiv.org/abs/2306.03751) * [Black holes and the loss landscape in machine learning](https://arxiv.org/abs/2306.14817) * [Neural Network Field Theories: Non-Gaussianity, Actions, and Locality](https://arxiv.org/abs/2307.03223) + * [Metric Flows with Neural Networks](https://arxiv.org/abs/2310.19870) ??? example "ML for theory" @@ -1802,6 +1858,11 @@ const expandElements = shouldExpand => { * [Scattering with Neural Operators](https://arxiv.org/abs/2308.14789) * [Distilling the essential elements of nuclear binding via neural-network quantum states](https://arxiv.org/abs/2308.16266) * [Unsupervised Machine Learning Techniques for Exploring Tropical Coamoeba, Brane Tilings and Seiberg Duality](https://arxiv.org/abs/2309.05702) + * [BFBrain: Scalar Bounded-From-Below Conditions from Bayesian Active Learning](https://arxiv.org/abs/2309.10959) + * [Constructing and Machine Learning Calabi-Yau Five-folds](https://arxiv.org/abs/2310.15966) + * [Machine Learning Regularization for the Minimum Volume Formula of Toric Calabi-Yau 3-folds](https://arxiv.org/abs/2310.19276) + * [Metric Flows with Neural Networks](https://arxiv.org/abs/2310.19870) + * [Seeking Truth and Beauty in Flavor Physics with Machine Learning](https://arxiv.org/abs/2311.00087) ## Experimental results. *This section is incomplete as there are many results that directly and indirectly (e.g. via flavor tagging) use modern machine learning techniques. We will try to highlight experimental results that use deep learning in a critical way for the final analysis sensitivity.* @@ -1827,6 +1888,7 @@ const expandElements = shouldExpand => { * [The Full Event Interpretation}: {An Exclusive Tagging Algorithm for the Belle II Experiment](https://arxiv.org/abs/1807.08680) [[DOI](https://doi.org/10.1007/s41781-019-0021-8)] + * [A deep neural network to search for new long-lived particles decaying to jets](https://arxiv.org/abs/1912.12238) [[DOI](https://doi.org/10.1088/2632-2153/ab9023)] * [Search for an anomalous excess of inclusive charged-current $\nu_e$ interactions in the MicroBooNE experiment using Wire-Cell reconstruction](https://arxiv.org/abs/2110.13978) * [Search for an anomalous excess of charged-current quasi-elastic $\nu_e$ interactions with the MicroBooNE experiment using Deep-Learning-based reconstruction](https://arxiv.org/abs/2110.14080) * [Search for supersymmetry in final states with missing transverse momentum and three or more b-jets in 139 fb$^{-1}$ of proton\textendash{}proton collisions at $\sqrt{s}](https://arxiv.org/abs/2211.08028) [[DOI](https://doi.org/10.1140/epjc/s10052-023-11543-6)] @@ -1845,6 +1907,11 @@ const expandElements = shouldExpand => { * [Evidence of off-shell Higgs boson production from $ZZ$ leptonic decay channels and constraints on its total width with the ATLAS detector](https://arxiv.org/abs/2304.01532) * [Measurement of \ensuremath{\nu}\ensuremath{\mu} charged-current inclusive \ensuremath{\pi}0 production in the NOvA near detector](https://arxiv.org/abs/2306.04028) [[DOI](https://doi.org/10.1103/PhysRevD.107.112008)] * [Searches for supersymmetric particles with prompt decays with the ATLAS detector](https://arxiv.org/abs/2306.15014) + * [Applying Machine Learning Techniques to Searches for Lepton-Partner Pair-Production with Intermediate Mass Gaps at the Large Hadron Collider](https://arxiv.org/abs/2309.10197) + * [Boosting dark matter searches at muon colliders with Machine Learning: the mono-Higgs channel as a case study](https://arxiv.org/abs/2309.11241) + * [Suppression of Neutron Background using Deep Neural Network and Fourier Frequency Analysis at the KOTO Experiment](https://arxiv.org/abs/2309.12063) + * [Advances in developing deep neural networks for finding primary vertices in proton-proton collisions at the LHC](https://arxiv.org/abs/2309.12417) + * [Novel techniques for alpha/beta pulse shape discrimination in Borexino](https://arxiv.org/abs/2310.11826) ??? example "Final analysis discriminate for searches" diff --git a/docs/recent.md b/docs/recent.md index b7b12e7..2584795 100644 --- a/docs/recent.md +++ b/docs/recent.md @@ -9,107 +9,49 @@ search: This is an automatically compiled list of papers which have been added to the living review that were made public within the previous 4 months at the time of updating. This is not an exhaustive list of released papers, and is only able to find those which have both year and month data provided in the bib reference. -## September 2023 -* [Breaking Free with AI: The Deconfinement Transition](https://arxiv.org/abs/2309.07225) -* [CaloShowerGAN, a Generative Adversarial Networks model for fast calorimeter shower simulation](https://arxiv.org/abs/2309.06515) -* [Electron Energy Regression in the CMS High-Granularity Calorimeter Prototype](https://arxiv.org/abs/2309.06582) -* [Flows for Flows: Morphing one Dataset into another with Maximum Likelihood Estimation](https://arxiv.org/abs/2309.06472) -* [CaloClouds II: Ultra-Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2309.05704) -* [Unsupervised Machine Learning Techniques for Exploring Tropical Coamoeba, Brane Tilings and Seiberg Duality](https://arxiv.org/abs/2309.05702) -* [Application of the path optimization method to a discrete spin system](https://arxiv.org/abs/2309.06018) -* [A data-driven and model-agnostic approach to solving combinatorial assignment problems in searches for new physics](https://arxiv.org/abs/2309.05728) -* [LHC Study of Third-Generation Scalar Leptoquarks with Machine-Learned Likelihoods](https://arxiv.org/abs/2309.05407) -* [Neural Network Solutions of Bosonic Quantum Systems in One Dimension](https://arxiv.org/abs/2309.02352) -* [Accelerating Markov Chain Monte Carlo sampling with diffusion models](https://arxiv.org/abs/2309.01454) -* [ML-Based Top Taggers: Performance, Uncertainty and Impact of Tower \& Tracker Data Integration](https://arxiv.org/abs/2309.01568) -* [Signal-to-noise improvement through neural network contour deformations for 3D $SU(2)$ lattice gauge theory](https://arxiv.org/abs/2309.00600) - -## August 2023 -* [Mapping QGP properties in Pb--Pb and Xe--Xe collisions at the LHC](https://arxiv.org/abs/2308.16722) -* [Autoencoder-based Online Data Quality Monitoring for the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2308.16659) -* [Distilling the essential elements of nuclear binding via neural-network quantum states](https://arxiv.org/abs/2308.16266) -* [Scattering with Neural Operators](https://arxiv.org/abs/2308.14789) -* [A Neural Network Approach for Orienting Heavy-Ion Collision Events](https://arxiv.org/abs/2308.15796) -* [Branches of a Tree: Taking Derivatives of Programs with Discrete and Branching Randomness in High Energy Physics](https://arxiv.org/abs/2308.16680) -* [LLPNet: Graph Autoencoder for Triggering Light Long-Lived Particles at HL-LHC](https://arxiv.org/abs/2308.13611) -* [Improving Generative Model-based Unfolding with Schr\"odinger Bridges](https://arxiv.org/abs/2308.12351) -* [Renormalizing Diffusion Models](https://arxiv.org/abs/2308.12355) -* [Refining Fast Calorimeter Simulations with a Schr\"odinger Bridge](https://arxiv.org/abs/2308.12339) -* [SuperCalo: Calorimeter shower super-resolution](https://arxiv.org/abs/2308.11700) -* [Jet energy calibration with deep learning as a Kubeflow pipeline](https://arxiv.org/abs/2308.12724) [[DOI](https://doi.org/10.1007/s41781-023-00103-y)] -* [Overview: Jet quenching with machine learning](https://arxiv.org/abs/2308.10035) -* [Reconstructing $S$-matrix Phases with Machine Learning](https://arxiv.org/abs/2308.09451) -* [SR-GAN for SR-gamma: photon super resolution at collider experiments](https://arxiv.org/abs/2308.09025) -* [Hierarchical High-Point Energy Flow Network for Jet Tagging](https://arxiv.org/abs/2308.08300) -* [Sequential Monte Carlo with Cross-validated Neural Networks for Complexity of Hyperbolic Black Hole Solutions in 4D](https://arxiv.org/abs/2308.07907) -* [Boosting likelihood learning with event reweighting](https://arxiv.org/abs/2308.05704) -* [CaloDiffusion with GLaM for High Fidelity Calorimeter Simulation](https://arxiv.org/abs/2308.03876) -* [Simulation-based inference in the search for CP violation in leptonic WH production](https://arxiv.org/abs/2308.02882) -* [Boosting sensitivity to new physics with unsupervised anomaly detection in dijet resonance search](https://arxiv.org/abs/2308.02671) -* [Improving sensitivity of trilinear RPV SUSY searches using machine learning at the LHC](https://arxiv.org/abs/2308.02697) -* [CaloScore v2: Single-shot Calorimeter Shower Simulation with Diffusion Models](https://arxiv.org/abs/2308.03847) -* [Reconstructing axion-like particles from beam dumps with simulation-based inference](https://arxiv.org/abs/2308.01353) -* [Inclusive, prompt and non-prompt $\rm{J}/\psi$ identification in proton-proton collisions at the Large Hadron Collider using machine learning](https://arxiv.org/abs/2308.00329) +## October 2023 +* [Seeking Truth and Beauty in Flavor Physics with Machine Learning](https://arxiv.org/abs/2311.00087) +* [Machine Learning Regularization for the Minimum Volume Formula of Toric Calabi-Yau 3-folds](https://arxiv.org/abs/2310.19276) +* [Metric Flows with Neural Networks](https://arxiv.org/abs/2310.19870) +* [Probing Light Fermiophobic Higgs Boson via diphoton jets at the HL-LHC](https://arxiv.org/abs/2310.17741) +* [Diffusion model approach to simulating electron-proton scattering events](https://arxiv.org/abs/2310.16308) +* [19 Parameters Is All You Need: Tiny Neural Networks for Particle Physics](https://arxiv.org/abs/2310.16121) +* [Constructing and Machine Learning Calabi-Yau Five-folds](https://arxiv.org/abs/2310.15966) +* [Machine Learning Classification of Sphalerons and Black Holes at the LHC](https://arxiv.org/abs/2310.15227) +* [Anomaly Detection in Presence of Irrelevant Features](https://arxiv.org/abs/2310.13057) +* [Differentiable Vertex Fitting for Jet Flavour Tagging](https://arxiv.org/abs/2310.12804) +* [Equivariant Transformer is all you need](https://arxiv.org/abs/2310.13222) +* [Novel techniques for alpha/beta pulse shape discrimination in Borexino](https://arxiv.org/abs/2310.11826) +* [Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes](https://arxiv.org/abs/2310.13009) +* [Study of residual artificial neural network for particle identification in the CEPC high-granularity calorimeter prototype](https://arxiv.org/abs/2310.09489) +* [Systematic Evaluation of Generative Machine Learning Capability to Simulate Distributions of Observables at the Large Hadron Collider](https://arxiv.org/abs/2310.08994) +* [Lattice real-time simulations with learned optimal kernels](https://arxiv.org/abs/2310.08053) +* [Event Generator Tuning Incorporating Systematic Uncertainty](https://arxiv.org/abs/2310.07566) +* [Simulation of Hadronic Interactions with Deep Generative Models](https://arxiv.org/abs/2310.07553) +* [Precision-Machine Learning for the Matrix Element Method](https://arxiv.org/abs/2310.07752) +* [Full Phase Space Resonant Anomaly Detection](https://arxiv.org/abs/2310.06897) +* [First attempt of directionality reconstruction for atmospheric neutrinos in a large homogeneous liquid scintillator detector](https://arxiv.org/abs/2310.06281) +* [Learning Trivializing Flows in a $\phi^4$ theory from coarser lattices](https://arxiv.org/abs/2310.03381) +* [Smart pixel sensors: towards on-sensor filtering of pixel clusters with deep learning](https://arxiv.org/abs/2310.02474) +* [The Optimal use of Segmentation for Sampling Calorimeters](https://arxiv.org/abs/2310.04442) +* [Neural Network Emulation of Spontaneous Fission](https://arxiv.org/abs/2310.01608) -## July 2023 -* [Explainable Equivariant Neural Networks for Particle Physics: PELICAN](https://arxiv.org/abs/2307.16506) -* [Improving the temporal resolution of event-based electron detectors using neural network cluster analysis](https://arxiv.org/abs/2307.16666) -* [Determination of impact parameter for CEE with Digi-input neural networks](https://arxiv.org/abs/2307.15355) -* [Graph Structure from Point Clouds: Geometric Attention is All You Need](https://arxiv.org/abs/2307.16662) -* [Score-based Diffusion Models for Generating Liquid Argon Time Projection Chamber Images](https://arxiv.org/abs/2307.13687) -* [Tip of the Red Giant Branch Bounds on the Neutrino Magnetic Dipole Moment Revisited](https://arxiv.org/abs/2307.13050) -* [The Interplay of Machine Learning--based Resonant Anomaly Detection Methods](https://arxiv.org/abs/2307.11157) -* [Public Kaggle Competition ''IceCube -- Neutrinos in Deep Ice''](https://arxiv.org/abs/2307.15289) -* [Particle-flow based tau identification at future $\textrm{e}^{+}\textrm{e}^{-}$ colliders](https://arxiv.org/abs/2307.07747) -* [Real-time Graph Building on FPGAs for Machine Learning Trigger Applications in Particle Physics](https://arxiv.org/abs/2307.07289) -* [Artificial Intelligence for the Electron Ion Collider (AI4EIC)](https://arxiv.org/abs/2307.08593) -* [Improved selective background Monte Carlo simulation at Belle II with graph attention networks and weighted events](https://arxiv.org/abs/2307.06434) -* [Towards an integrated determination of proton, deuteron and nuclear PDFs](https://arxiv.org/abs/2307.05967) -* [Accelerated Discovery of Machine-Learned Symmetries: Deriving the Exceptional Lie Groups G2, F4 and E6](https://arxiv.org/abs/2307.04891) -* [Fast Neural Network Inference on FPGAs for Triggering on Long-Lived Particles at Colliders](https://arxiv.org/abs/2307.05152) -* [Precise Image Generation on Current Noisy Quantum Computing Devices](https://arxiv.org/abs/2307.05253) -* [Decorrelation using Optimal Transport](https://arxiv.org/abs/2307.05187) -* [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) -* [Comparison of Point Cloud and Image-based Models for Calorimeter Fast Simulation](https://arxiv.org/abs/2307.04780) -* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) -* [Neural Network Field Theories: Non-Gaussianity, Actions, and Locality](https://arxiv.org/abs/2307.03223) -* [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) -* [Sampling the lattice Nambu-Goto string using Continuous Normalizing Flows](https://arxiv.org/abs/2307.01107) -* [Teaching to extract spectral densities from lattice correlators to a broad audience of learning-machines](https://arxiv.org/abs/2307.00808) -* [Pinning down the leptophobic $Z^\prime$ in leptonic final states with Deep Learning](https://arxiv.org/abs/2307.01118) - -## June 2023 -* [Learning to Isolate Muons in Data](https://arxiv.org/abs/2306.15737) -* [Searches for supersymmetric particles with prompt decays with the ATLAS detector](https://arxiv.org/abs/2306.15014) -* [Black holes and the loss landscape in machine learning](https://arxiv.org/abs/2306.14817) -* [Implicit Quantile Networks For Emulation in Jet Physics](https://arxiv.org/abs/2306.15053) -* [Autoencoders for Real-Time SUEP Detection](https://arxiv.org/abs/2306.13595) -* [Generative modeling of nucleon-nucleon interactions](https://arxiv.org/abs/2306.13007) -* [Machine Learning methods for simulating particle response in the Zero Degree Calorimeter at the ALICE experiment, CERN](https://arxiv.org/abs/2306.13606) -* [Retrieval of Boost Invariant Symbolic Observables via Feature Importance](https://arxiv.org/abs/2306.13496) -* [Triggering Dark Showers with Conditional Dual Auto-Encoders](https://arxiv.org/abs/2306.12955) -* [Towards accurate real-time luminescence thermometry: an automated machine learning approach](https://arxiv.org/abs/2307.05497) -* [Analysis of a Skyrme energy density functional with deep learning](https://arxiv.org/abs/2306.11314) -* [Constraining the Woods-Saxon potential in fusion reactions based on a physics-informed neural network](https://arxiv.org/abs/2306.11236) -* [Hierarchical Neural Simulation-Based Inference Over Event Ensembles](https://arxiv.org/abs/2306.12584) -* [Fast $b$-tagging at the high-level trigger of the ATLAS experiment in LHC Run 3](https://arxiv.org/abs/2306.09738) -* [Principles for Initialization and Architecture Selection in Graph Neural Networks with ReLU Activations](https://arxiv.org/abs/2306.11668) -* [Development of the Topological Trigger for LHCb Run 3](https://arxiv.org/abs/2306.09873) -* [Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification](https://arxiv.org/abs/2306.09356) -* [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) -* [Applications of Deep Learning to physics workflows](https://arxiv.org/abs/2306.08106) -* [IMSRG-Net: A machine learning-based solver for In-Medium Similarity Renormalization Group](https://arxiv.org/abs/2306.08878) -* [Generative deep-learning reveals collective variables of Fermionic systems](https://arxiv.org/abs/2306.08348) -* [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) -* [Comparing machine learning models for tau triggers](https://arxiv.org/abs/2306.06743) -* [Decoding Neutron Star Observations: Revealing Composition through Bayesian Neural Networks](https://arxiv.org/abs/2306.06929) -* [NuCLR: Nuclear Co-Learned Representations](https://arxiv.org/abs/2306.06099) -* [Differentiable Earth Mover's Distance for Data Compression at the High-Luminosity LHC](https://arxiv.org/abs/2306.04712) -* [Photon Reconstruction in the Belle II Calorimeter Using Graph Neural Networks](https://arxiv.org/abs/2306.04179) -* [Flavour tagging with graph neural networks with the ATLAS detector](https://arxiv.org/abs/2306.04415) -* [Nuclear mass predictions based on deep neural network and finite-range droplet model (2012)](https://arxiv.org/abs/2306.04171) -* [Anomaly detection search for new resonances decaying into a Higgs boson and a generic new particle $X$ in hadronic final states using $\sqrt{s}](https://arxiv.org/abs/2306.03637) -* [A Correspondence Between Deep Boltzmann Machines and p-Adic Statistical Field Theories](https://arxiv.org/abs/2306.03751) -* [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) -* [Combining lattice QCD and phenomenological inputs on generalised parton distributions at moderate skewness](https://arxiv.org/abs/2306.01647) +## September 2023 +* [Progress in End-to-End Optimization of Detectors for Fundamental Physics with Differentiable Programming](https://arxiv.org/abs/2310.05673) +* [Hypergraphs in LHC Phenomenology -- The Next Frontier of IRC-Safe Feature Extraction](https://arxiv.org/abs/2309.17351) +* [EPiC-ly Fast Particle Cloud Generation with Flow-Matching and Diffusion](https://arxiv.org/abs/2310.00049) +* [Chained Quantile Morphing with Normalizing Flows](https://arxiv.org/abs/2309.15912) +* [HyperTrack: Neural Combinatorics for High Energy Physics](https://arxiv.org/abs/2309.14113) +* [Binary Discrimination Through Next-to-Leading Order](https://arxiv.org/abs/2309.14417) +* [Combining Resonant and Tail-based Anomaly Detection](https://arxiv.org/abs/2309.12918) +* [Back To The Roots: Tree-Based Algorithms for Weakly Supervised Anomaly Detection](https://arxiv.org/abs/2309.13111) +* [Refining fast simulation using machine learning](https://arxiv.org/abs/2309.12919) +* [Suppression of Neutron Background using Deep Neural Network and Fourier Frequency Analysis at the KOTO Experiment](https://arxiv.org/abs/2309.12063) +* [Advances in developing deep neural networks for finding primary vertices in proton-proton collisions at the LHC](https://arxiv.org/abs/2309.12417) +* [Boosting dark matter searches at muon colliders with Machine Learning: the mono-Higgs channel as a case study](https://arxiv.org/abs/2309.11241) +* [Insights into neutron star equation of state by machine learning](https://arxiv.org/abs/2309.11227) +* [BFBrain: Scalar Bounded-From-Below Conditions from Bayesian Active Learning](https://arxiv.org/abs/2309.10959) +* [The NFLikelihood: an unsupervised DNNLikelihood from Normalizing Flows](https://arxiv.org/abs/2309.09743) +* [Applying Machine Learning Techniques to Searches for Lepton-Partner Pair-Production with Intermediate Mass Gaps at the Large Hadron Collider](https://arxiv.org/abs/2309.10197) +* [Autoencoder-based Anomaly Detection System for Online Data Quality Monitoring of the CMS Electromagnetic Calorimeter](https://arxiv.org/abs/2309.10157) diff --git a/make_md.py b/make_md.py index 9675a07..ab30f52 100644 --- a/make_md.py +++ b/make_md.py @@ -29,7 +29,7 @@ for file in myfile_readme,myfile_about: file.write(r"The purpose of this note is to collect references for modern machine learning as applied to particle physics. A minimal number of categories is chosen in order to be as useful as possible. Note that papers may be referenced in more than one category. The fact that a paper is listed in this document does not endorse or validate its content - that is for the community (and for peer-review) to decide. Furthermore, the classification here is a best attempt and may have flaws - please let us know if (a) we have missed a paper you think should be included, (b) a paper has been misclassified, or (c) a citation for a paper is not correct or if the journal information is now available. In order to be as useful as possible, this document will continue to evolve so please check back before you write your next paper. If you find this review helpful, please consider citing it using ```\cite{hepmllivingreview}``` in `HEPML.bib`.") - file.write("\n\nThis review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder.\n\n") + file.write("\n\nThis review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder.\n\n") ###Add buttons myfile_out.write("""\nExpand all sections\nCollapse all sections\n""")