diff --git a/HEPML.bib b/HEPML.bib index 504ba31..dd874aa 100644 --- a/HEPML.bib +++ b/HEPML.bib @@ -274,18 +274,7 @@ @inproceedings{Malara:2024zsj year = "2024" } -% October 18, 2024* -@article{Dutrieux:2024bgc, - author = "Dutrieux, Herv\'e and Meisgny, Thibaud and Mezrag, C\'edric and Moutarde, Herv\'e", - title = "{Proton internal pressure from deeply virtual Compton scattering on collider kinematics}", - eprint = "2410.13518", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - month = "10", - year = "2024" -} - -% October 18, 2024* +% October 18, 2024 @article{Arganda:2024tqo, author = "Arganda, Ernesto and Carena, Marcela and de los Rios, Mart\'\i{}n and Perez, Andres D. and Rocha, Duncan and Sand\'a Seoane, Rosa M. and Wagner, Carlos E. M.", title = "{Machine-Learning Analysis of Radiative Decays to Dark Matter at the LHC}", @@ -297,7 +286,7 @@ @article{Arganda:2024tqo year = "2024" } -% October 18, 2024* +% October 18, 2024 @article{Yu:2024eog, author = {Yu, Felix J. and Kamp, Nicholas and Arg\"uelles, Carlos A.}, title = "{Learning Efficient Representations of Neutrino Telescope Events}", @@ -308,7 +297,7 @@ @article{Yu:2024eog year = "2024" } -% October 18, 2024* +% October 18, 2024 @article{BESIII:2024mgg, author = "{BESIII Collaboration}", title = "{Observation of a rare beta decay of the charmed baryon with a Graph Neural Network}", @@ -319,7 +308,7 @@ @article{BESIII:2024mgg year = "2024" } -% October 18, 2024* +% October 18, 2024 @article{Gerdes:2024rjk, author = "Gerdes, Mathis and de Haan, Pim and Bondesan, Roberto and Cheng, Miranda C. N.", title = "{Continuous normalizing flows for lattice gauge theories}", @@ -330,7 +319,7 @@ @article{Gerdes:2024rjk year = "2024" } -% October 16, 2024* +% October 16, 2024 @article{Tagami:2024gtc, author = "Tagami, Risako and Suehara, Taikan and Ishino, Masaya", title = "{Application of Particle Transformer to quark flavor tagging in the ILC project}", @@ -341,7 +330,7 @@ @article{Tagami:2024gtc year = "2024" } -% October 16, 2024* +% October 16, 2024 @article{Ek:2024fgd, author = "Ek, Carl Henrik and Kim, Oisin and Mishra, Challenger", title = "{Calabi-Yau metrics through Grassmannian learning and Donaldson's algorithm}", @@ -352,18 +341,7 @@ @article{Ek:2024fgd year = "2024" } -% October 15, 2024* -@article{Ma:2024lyt, - author = "Ma, Zhiqin and Zeng, Chunhua and Zhang, Yi-Cheng and Bury, Thomas M.", - title = "{Learning from the past: predicting critical transitions with machine learning trained on surrogates of historical data}", - eprint = "2410.09707", - archivePrefix = "arXiv", - primaryClass = "physics.data-an", - month = "10", - year = "2024" -} - -% October 10, 2024* +% October 10, 2024 @article{Verma:2024kdx, author = "Verma, Shivam and Biswas, Sanjoy and Mandal, Tanumoy and Mitra, Subhadip", title = "{Machine learning tagged boosted dark photon: A signature of fermionic portal matter at the LHC}", @@ -374,7 +352,7 @@ @article{Verma:2024kdx year = "2024" } -% October 10, 2024* +% October 10, 2024 @article{Ragoni:2024jhg, author = "Ragoni, Simone and Seger, Janet and Anson, Christopher and Tlusty, David", title = "{Machine learning opportunities for online and offline tagging of photo-induced and diffractive events in continuous readout experiments}", @@ -385,18 +363,7 @@ @article{Ragoni:2024jhg year = "2024" } -% October 10, 2024* -@article{Kim:2024car, - author = "Kim, Sejin and Kim, Kyung Kiu and Seo, Yunseok", - title = "{Phase Diagram from Nonlinear Interaction between Superconducting Order and Density: Toward Data-Based Holographic Superconductor}", - eprint = "2410.06523", - archivePrefix = "arXiv", - primaryClass = "hep-th", - month = "10", - year = "2024" -} - -% October 09, 2024* +% October 09, 2024 @article{Ma:2024qoa, author = "Ma, Xiaotian and Wu, Zuofei and Wu, Jinfei and Huang, Yanping and Li, Gang and Ruan, Manqi and Alves, F\'abio L. and Jin, Shan", title = "{Measurements of decay branching fractions of the Higgs boson to hadronic final states at the CEPC}", @@ -407,7 +374,7 @@ @article{Ma:2024qoa year = "2024" } -% October 08, 2024* +% October 08, 2024 @inproceedings{Wang:2024ykk, author = "Wang, Lingxiao and Doi, Takumi and Hatsuda, Tetsuo and Lyu, Yan", title = "{Building Hadron Potentials from Lattice QCD with Deep Neural Networks}", @@ -420,7 +387,7 @@ @inproceedings{Wang:2024ykk year = "2024" } -% October 07, 2024* +% October 07, 2024 @article{Bhimji:2024bcd, author = "Bhimji, Wahid and others", title = "{FAIR Universe HiggsML Uncertainty Challenge Competition}", @@ -431,7 +398,7 @@ @article{Bhimji:2024bcd year = "2024" } -% October 07, 2024* +% October 07, 2024 @article{Migala:2024ael, author = "Migala, Alexander and Ku, Eugene and Li, Zepeng and Li, Aobo", title = "{Real-time Position Reconstruction for the KamLAND-Zen Experiment using Hardware-AI Co-design}", @@ -442,7 +409,7 @@ @article{Migala:2024ael year = "2024" } -% October 07, 2024* +% October 07, 2024 @inproceedings{Badea:2024zoq, author = "Badea, Anthony and others", title = "{Intelligent Pixel Detectors: Towards a Radiation Hard ASIC with On-Chip Machine Learning in 28 nm CMOS}", diff --git a/HEPML.tex b/HEPML.tex index b7d9cc9..4770412 100644 --- a/HEPML.tex +++ b/HEPML.tex @@ -45,7 +45,7 @@ \item Modern reviews~\cite{Larkoski:2017jix,Guest:2018yhq,Albertsson:2018maf,Radovic:2018dip,Carleo:2019ptp,Bourilkov:2019yoi,Schwartz:2021ftp,Karagiorgi:2021ngt,Boehnlein:2021eym,Shanahan:2022ifi} \item Specialized reviews~\cite{Kasieczka:2019dbj,1807719,Shlomi:2020gdn,Psihas:2020pby,Butter:2020tvl,Forte:2020yip,Brehmer:2020cvb,Nachman:2020ccu,Duarte:2020ngm,Vlimant:2020enz,Cranmer:2019eaq,Rousseau:2020rnz,Kagan:2020yrm,Guan:2020bdl,deLima:2021fwm,Alanazi:2021grv,Baldi:2022okj,Viren:2022qon,Bogatskiy:2022hub,Butter:2022rso,Dvorkin:2022pwo,Adelmann:2022ozp,Thais:2022iok,Harris:2022qtm,Coadou:2022nsh,Benelli:2022sqn,Chen:2022pzc,Plehn:2022ftl,Cheng:2022idp,Huerta:2022kgj,Huber:2022lpm,Zhou:2023pti,DeZoort:2023vrm,Du:2023qst,Allaire:2023fgp,Hashemi:2023rgo,Belis:2023mqs,Araz:2023mda,Gooding:2024wpi,Kheddar:2024osf,Bardhan:2024zla,Mondal:2024nsa,Huetsch:2024quz,Ahmad:2024dql,Barman:2024wfx,Larkoski:2024uoc,Halverson:2024hax,Sahu:2024fzi,Duarte:2024lsg} \item Classical papers~\cite{Denby:1987rk,Lonnblad:1990bi} - \item Datasets~\cite{Kasieczka:2021xcg,Aarrestad:2021oeb,Benato:2021olt,Govorkova:2021hqu,Chen:2021euv,Qu:2022mxj,Eller:2023myr,Rusack:2023pob,Zoch:2024eyp} + \item Datasets~\cite{Kasieczka:2021xcg,Aarrestad:2021oeb,Benato:2021olt,Govorkova:2021hqu,Chen:2021euv,Qu:2022mxj,Eller:2023myr,Rusack:2023pob,Zoch:2024eyp,Bhimji:2024bcd} \end{itemize} \item \textbf{Classification} \\\textit{Given a feature space $x\in\mathbb{R}^n$, a binary classifier is a function $f:\mathbb{R}^n\rightarrow [0,1]$, where $0$ corresponds to features that are more characteristic of the zeroth class (e.g. background) and $1$ correspond to features that are more characteristic of the one class (e.g. signal). Typically, $f$ will be a function specified by some parameters $w$ (e.g. weights and biases of a neural network) that are determined by minimizing a loss of the form $L[f]=\sum_{i}\ell(f(x_i),y_i)$, where $y_i\in\{0,1\}$ are labels. The function $\ell$ is smaller when $f(x_i)$ and $y_i$ are closer. Two common loss functions are the mean squared error $\ell(x,y)=(x-y)^2$ and the binary cross entropy $\ell(x,y)=y\log(x)+(1-y)\log(1-x)$. Exactly what `more characteristic of' means depends on the loss function used to determine $f$. It is also possible to make a multi-class classifier. A common strategy for the multi-class case is to represent each class as a different basis vector in $\mathbb{R}^{n_\text{classes}}$ and then $f(x)\in[0,1]^{n_\text{classes}}$. In this case, $f(x)$ is usually restricted to have its $n_\text{classes}$ components sum to one and the loss function is typically the cross entropy $\ell(x,y)=\sum_\text{classes $i$} y_i\log(x)$.} @@ -63,7 +63,7 @@ \\\textit{Data that have a variable with a particular order may be represented as a sequence. Recurrent neural networks are natural tools for processing sequence data. } \item \textbf{Trees}~\cite{Louppe:2017ipp,Cheng:2017rdo,Jercic:2021bfc,Dutta:2023jbz,Belfkir:2023vpo,Finke:2023ltw,Matousek:2024vpa,Choudhury:2024crp} \\\textit{Recursive neural networks are natural tools for processing data in a tree structure.} - \item \textbf{Graphs}~\cite{Henrion:DLPS2017,Ju:2020xty,Abdughani:2018wrw,Martinez:2018fwc,Ren:2019xhp,Moreno:2019bmu,Qasim:2019otl,Chakraborty:2019imr,DiBello:2020bas,Chakraborty:2020yfc,1797439,1801423,Shlomi:2020gdn,Iiyama:2020wap,1811770,Choma:2020cry,alonsomonsalve2020graph,guo2020boosted,Heintz:2020soy,Verma:2020gnq,Dreyer:2020brq,Qian:2021vnh,Pata:2021oez,Biscarat:2021dlj,Rossi:2021tjf,Hewes:2021heg,Thais:2021qcb,Dezoort:2021kfk,Verma:2021ceh,Hariri:2021clz,Belavin:2021bxb,Atkinson:2021nlt,Konar:2021zdg,Atkinson:2021jnj,Tsan:2021brw,Elabd:2021lgo,Pata:2022wam,Gong:2022lye,Qasim:2022rww,Ma:2022bvt,Bogatskiy:2022czk,Builtjes:2022usj,DiBello:2022iwf,Mokhtar:2022pwm,Huang:2023ssr,Forestano:2023fpj,Anisha:2023xmh,Ehrke:2023cpn,Murnane:2023kfm,Yu:2023juh,Neu:2023sfh,Wang:2023cac,McEneaney:2023vwp,Liu:2023siw,GarciaPardinas:2023pmx,Duperrin:2023elp,BelleII:2023egc,Holmberg:2023rfr,Bhattacherjee:2023evs,Murnane:2023ksa,Konar:2023ptv,Chatterjee:2024pbp,Heinrich:2024tdf,Mo:2024dru,Lu:2024qrc,Birch-Sykes:2024gij,Belle-II:2024lwr,Pfeffer:2024tjl,Aurisano:2024uvd,Kobylianskii:2024sup,Aamir:2024lpz,Soybelman:2024mbv,Correia:2024ogc,Calafiura:2024qhv,CMS:2024xzb} + \item \textbf{Graphs}~\cite{Henrion:DLPS2017,Ju:2020xty,Abdughani:2018wrw,Martinez:2018fwc,Ren:2019xhp,Moreno:2019bmu,Qasim:2019otl,Chakraborty:2019imr,DiBello:2020bas,Chakraborty:2020yfc,1797439,1801423,Shlomi:2020gdn,Iiyama:2020wap,1811770,Choma:2020cry,alonsomonsalve2020graph,guo2020boosted,Heintz:2020soy,Verma:2020gnq,Dreyer:2020brq,Qian:2021vnh,Pata:2021oez,Biscarat:2021dlj,Rossi:2021tjf,Hewes:2021heg,Thais:2021qcb,Dezoort:2021kfk,Verma:2021ceh,Hariri:2021clz,Belavin:2021bxb,Atkinson:2021nlt,Konar:2021zdg,Atkinson:2021jnj,Tsan:2021brw,Elabd:2021lgo,Pata:2022wam,Gong:2022lye,Qasim:2022rww,Ma:2022bvt,Bogatskiy:2022czk,Builtjes:2022usj,DiBello:2022iwf,Mokhtar:2022pwm,Huang:2023ssr,Forestano:2023fpj,Anisha:2023xmh,Ehrke:2023cpn,Murnane:2023kfm,Yu:2023juh,Neu:2023sfh,Wang:2023cac,McEneaney:2023vwp,Liu:2023siw,GarciaPardinas:2023pmx,Duperrin:2023elp,BelleII:2023egc,Holmberg:2023rfr,Bhattacherjee:2023evs,Murnane:2023ksa,Konar:2023ptv,Chatterjee:2024pbp,Heinrich:2024tdf,Mo:2024dru,Lu:2024qrc,Birch-Sykes:2024gij,Belle-II:2024lwr,Pfeffer:2024tjl,Aurisano:2024uvd,Kobylianskii:2024sup,Aamir:2024lpz,Soybelman:2024mbv,Correia:2024ogc,Calafiura:2024qhv,CMS:2024xzb,Ma:2024qoa,BESIII:2024mgg} \\\textit{A graph is a collection of nodes and edges. Graph neural networks are natural tools for processing data in a tree structure.} \item \textbf{Sets (point clouds)}~\cite{Komiske:2018cqr,Qu:2019gqs,Mikuni:2020wpr,Shlomi:2020ufi,Dolan:2020qkr,Fenton:2020woz,Lee:2020qil,collado2021learning,Mikuni:2021pou,Shmakov:2021qdz,Shimmin:2021pkm,ATL-PHYS-PUB-2020-014,Qu:2022mxj,Kach:2022uzq,Onyisi:2022hdh,Athanasakos:2023fhq,Kach:2023rqw,Badea:2023jdb,Buhmann:2023zgc,Acosta:2023nuw,Mondal:2023law,Hammad:2023sbd,Odagiu:2024bkp,Gambhir:2024dtf,Leigh:2024ked} \\\textit{A point cloud is a (potentially variable-size) set of points in space. Sets are distinguished from sequences in that there is no particular order (i.e. permutation invariance). Sets can also be viewed as graphs without edges and so graph methods that can parse variable-length inputs may also be appropriate for set learning, although there are other methods as well.} @@ -72,25 +72,25 @@ \end{itemize} \item \textbf{Targets} \begin{itemize} - \item \textbf{$W/Z$ tagging}~\cite{deOliveira:2015xxd,Barnard:2016qma,Louppe:2017ipp,Sirunyan:2020lcu,Chen:2019uar,1811770,Dreyer:2020brq,Kim:2021gtv,Subba:2022czw,Aguilar-Saavedra:2023pde,Athanasakos:2023fhq,Grossi:2023fqq,Baron:2023yhw,Bogatskiy:2023nnw,Bose:2024pwc} + \item \textbf{$W/Z$ tagging}~\cite{deOliveira:2015xxd,Barnard:2016qma,Louppe:2017ipp,Sirunyan:2020lcu,Chen:2019uar,1811770,Dreyer:2020brq,Kim:2021gtv,Subba:2022czw,Aguilar-Saavedra:2023pde,Athanasakos:2023fhq,Grossi:2023fqq,Baron:2023yhw,Bogatskiy:2023nnw,Bose:2024pwc,Ma:2024qoa} \\\textit{Boosted, hadronically decaying $W$ and $Z$ bosons form jets that are distinguished from generic quark and gluon jets by their mass near the boson mass and their two-prong substructure.} - \item \textbf{$H\rightarrow b\bar{b}$}~\cite{Datta:2019ndh,Lin:2018cin,Moreno:2019neq,Chakraborty:2019imr,Sirunyan:2020lcu,Chung:2020ysf,Tannenwald:2020mhq,guo2020boosted,Abbas:2020khd,Jang:2021eph,Khosa:2021cyk} + \item \textbf{$H\rightarrow b\bar{b}$}~\cite{Datta:2019ndh,Lin:2018cin,Moreno:2019neq,Chakraborty:2019imr,Sirunyan:2020lcu,Chung:2020ysf,Tannenwald:2020mhq,guo2020boosted,Abbas:2020khd,Jang:2021eph,Khosa:2021cyk,Ma:2024qoa,Tagami:2024gtc} \\\textit{Due to the fidelity of $b$-tagging, boosted, hadronically decaying Higgs bosons (predominantly decaying to $b\bar{b}$) has unique challenged and opportunities compared with $W/Z$ tagging.} - \item \textbf{quarks and gluons}~\cite{ATL-PHYS-PUB-2017-017,Komiske:2016rsd,Cheng:2017rdo,Stoye:DLPS2017,Chien:2018dfn,Moreno:2019bmu,Kasieczka:2018lwf,1806025,Lee:2019ssx,Lee:2019cad,Dreyer:2020brq,Romero:2021qlf,Filipek:2021qbe,Dreyer:2021hhr,Bright-Thonney:2022xkx,CrispimRomao:2023ssj,Athanasakos:2023fhq,He:2023cfc,Shen:2023ofd,Dolan:2023abg,Blekman:2024wyf,Sandoval:2024ldp,Wu:2024thh} + \item \textbf{quarks and gluons}~\cite{ATL-PHYS-PUB-2017-017,Komiske:2016rsd,Cheng:2017rdo,Stoye:DLPS2017,Chien:2018dfn,Moreno:2019bmu,Kasieczka:2018lwf,1806025,Lee:2019ssx,Lee:2019cad,Dreyer:2020brq,Romero:2021qlf,Filipek:2021qbe,Dreyer:2021hhr,Bright-Thonney:2022xkx,CrispimRomao:2023ssj,Athanasakos:2023fhq,He:2023cfc,Shen:2023ofd,Dolan:2023abg,Blekman:2024wyf,Sandoval:2024ldp,Wu:2024thh,Tagami:2024gtc} \\\textit{Quark jets tend to be narrower and have fewer particles than gluon jets. This classification task has been a benchmark for many new machine learning models.} \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq,Keicher:2023mer,He:2023cfc,Bogatskiy:2023nnw,Shen:2023ofd,Isildak:2023dnf,Sahu:2023uwb,Baron:2023yhw,Bogatskiy:2023fug,Liu:2023dio,Batson:2023ohn,Furuichi:2023vdx,Ngairangbam:2023cps,Cai:2024xnt,Dong:2024xsg,Sahu:2024fzi} \\\textit{Boosted top quarks form jets that have a three-prong substructure ($t\rightarrow Wb,W\rightarrow q\bar{q}$).} - \item \textbf{strange jets}~\cite{Nakai:2020kuu,Erdmann:2019blf,Erdmann:2020ovh,Subba:2023rpm,Kats:2024eaq} + \item \textbf{strange jets}~\cite{Nakai:2020kuu,Erdmann:2019blf,Erdmann:2020ovh,Subba:2023rpm,Kats:2024eaq,Tagami:2024gtc} \\\textit{Strange quarks have a very similar fragmentation to generic quark and gluon jets, so this is a particularly challenging task.} \item \textbf{$b$-tagging}~\cite{Sirunyan:2017ezt,Guest:2016iqz,Keck:2018lcd,bielkov2020identifying,Bols:2020bkb,ATL-PHYS-PUB-2017-003,ATL-PHYS-PUB-2020-014,Liao:2022ufk,Stein:2023cnt,ATLAS:2023gog,Tamir:2023aiz,VanStroud:2023ggs,Song:2024aka} \\\textit{Due to their long (but not too long) lifetime, the $B$-hadron lifetime is macroscopic and $b$-jet tagging has been one of the earliest adapters of modern machine learning tools.} \item \textbf{Flavor physics}~\cite{1811097,Bahtiyar:2022une,Zhang:2023czx,Nishimura:2023wdu,Smith:2023ssh,Tian:2024yfz,Chang:2024ksq,Co:2024bfl,Malekhosseini:2024eot,Chen:2024epd,Mansouri:2024uwc,Nishimura:2024apb} \\\textit{This category is for studies related to exclusive particle decays, especially with bottom and charm hadrons.} - \item \textbf{BSM particles and models}~\cite{Datta:2019ndh,Baldi:2014kfa,Chakraborty:2019imr,10.1088/2632-2153/ab9023,1792136,1801423,Chang:2020rtc,Cogollo:2020afo,Grossi:2020orx,Ngairangbam:2020ksz,Englert:2020ntw,Freitas:2020ttd,Khosa:2019kxd,Freitas:2019hbk,Stakia:2021pvp,Arganda:2021azw,Jorge:2021vpo,Ren:2021prq,Barron:2021btf,Yang:2021gge,Alvestad:2021sje,Morais:2021ead,Jung:2021tym,Drees:2021oew,Cornell:2021gut,Vidal:2021oed,Beauchesne:2021qrw,Feng:2021eke,Konar:2022bgc,Badea:2022dzb,Freitas:2022cno,Goodsell:2022beo,Lv:2022pme,Ai:2022qvs,Yang:2022fhw,Alasfar:2022vqw,Barbosa:2022mmw,Chiang:2022lsn,Hall:2022bme,Faucett:2022zie,Bhattacharya:2022kje,Bardhan:2022sif,Bhattacharyya:2022umc,ATLAS:2022ihe,CMS:2022idi,Ballabene:2022fms,ATLAS:2023mcc,Palit:2023dvs,Liu:2023gpt,Pedro:2023sdp,MB:2023edk,Dong:2023nir,Guo:2023jkz,Lu:2023gjk,Flacke:2023eil,Bardhan:2023mia,Aguilar-Saavedra:2023pde,Cremer:2023gne,Esmail:2023axd,Choudhury:2023eje,Bhattacherjee:2023evs,Grefsrud:2023dad,Wang:2023pqx,Zhang:2023ykh,Hammad:2023wme,Hammad:2023sbd,Zhang:2024bld,Ma:2024deu,Jurciukonis:2024hlg,Chiang:2024pho,Birch-Sykes:2024gij,Ahmed:2024iqx,Esmail:2024gdc,Bickendorf:2024ovi,Wojcik:2024lfy,Grosso:2024wjt} + \item \textbf{BSM particles and models}~\cite{Datta:2019ndh,Baldi:2014kfa,Chakraborty:2019imr,10.1088/2632-2153/ab9023,1792136,1801423,Chang:2020rtc,Cogollo:2020afo,Grossi:2020orx,Ngairangbam:2020ksz,Englert:2020ntw,Freitas:2020ttd,Khosa:2019kxd,Freitas:2019hbk,Stakia:2021pvp,Arganda:2021azw,Jorge:2021vpo,Ren:2021prq,Barron:2021btf,Yang:2021gge,Alvestad:2021sje,Morais:2021ead,Jung:2021tym,Drees:2021oew,Cornell:2021gut,Vidal:2021oed,Beauchesne:2021qrw,Feng:2021eke,Konar:2022bgc,Badea:2022dzb,Freitas:2022cno,Goodsell:2022beo,Lv:2022pme,Ai:2022qvs,Yang:2022fhw,Alasfar:2022vqw,Barbosa:2022mmw,Chiang:2022lsn,Hall:2022bme,Faucett:2022zie,Bhattacharya:2022kje,Bardhan:2022sif,Bhattacharyya:2022umc,ATLAS:2022ihe,CMS:2022idi,Ballabene:2022fms,ATLAS:2023mcc,Palit:2023dvs,Liu:2023gpt,Pedro:2023sdp,MB:2023edk,Dong:2023nir,Guo:2023jkz,Lu:2023gjk,Flacke:2023eil,Bardhan:2023mia,Aguilar-Saavedra:2023pde,Cremer:2023gne,Esmail:2023axd,Choudhury:2023eje,Bhattacherjee:2023evs,Grefsrud:2023dad,Wang:2023pqx,Zhang:2023ykh,Hammad:2023wme,Hammad:2023sbd,Zhang:2024bld,Ma:2024deu,Jurciukonis:2024hlg,Chiang:2024pho,Birch-Sykes:2024gij,Ahmed:2024iqx,Esmail:2024gdc,Bickendorf:2024ovi,Wojcik:2024lfy,Grosso:2024wjt,Verma:2024kdx,Arganda:2024tqo} \\\textit{There are many proposals to train classifiers to enhance the presence of particular new physics models.} \item \textbf{Particle identification}~\cite{deOliveira:2018lqd,Paganini:DLPS2017,Hooberman:DLPS2017,Keck:2018lcd,Belayneh:2019vyx,Qasim:2019otl,Collado:2020fwm,Verma:2021ixg,Graziani:2021vai,Graczykowski:2022zae,Fanelli:2022ifa,Dimitrova:2022uum,Ryzhikov:2022lbu,Kushawaha:2023dms,Wu:2023pzn,Prasad:2023zdd,Lange:2023gbe,Novosel:2023cki,Charan:2023ldg,NA62:2023wzm,Karwowska:2023dhl,Song:2023ceh,Kasak:2023hhr,Ai:2024mkl} \\\textit{This is a generic category for direct particle identification and categorization using various detector technologies. Direct means that the particle directly interacts with the detector (in contrast with $b$-tagging).} - \item \textbf{Neutrino Detectors}~\cite{Aurisano:2016jvx,Acciarri:2016ryt,Hertel:DLPS2017,Adams:2018bvi,Domine:2019zhm,Aiello:2020orq,Adams:2020vlj,Domine:2020tlx,DUNE:2020gpm,DeepLearnPhysics:2020hut,Koh:2020snv,Yu:2020wxu,Psihas:2020pby,alonsomonsalve2020graph,Abratenko:2020pbp,Clerbaux:2020ttg,Liu:2020pzv,Abratenko:2020ocq,Chen:2020zkj,Qian:2021vnh,abbasi2021convolutional,Drielsma:2021jdv,Rossi:2021tjf,Hewes:2021heg,Acciarri:2021oav,Belavin:2021bxb,Maksimovic:2021dmz,Gavrikov:2021ktt,Garcia-Mendez:2021vts,Carloni:2021zbc,MicroBooNE:2021nss,MicroBooNE:2021ojx,Elkarghli:2020owr,DUNE:2022fiy,Lutkus:2022eou,Chappell:2022yxd,Bachlechner:2022cvf,Sogaard:2022qgg,IceCube:2022njh,Bai:2022lbv,Biassoni:2023lih,Yu:2023ehc,Mo:2024dru,Bat:2024gln,Aurisano:2024uvd,IceCube:2024xjj,Cai:2024bpv,Kopp:2024lch,Yu:2024ldv} + \item \textbf{Neutrino Detectors}~\cite{Aurisano:2016jvx,Acciarri:2016ryt,Hertel:DLPS2017,Adams:2018bvi,Domine:2019zhm,Aiello:2020orq,Adams:2020vlj,Domine:2020tlx,DUNE:2020gpm,DeepLearnPhysics:2020hut,Koh:2020snv,Yu:2020wxu,Psihas:2020pby,alonsomonsalve2020graph,Abratenko:2020pbp,Clerbaux:2020ttg,Liu:2020pzv,Abratenko:2020ocq,Chen:2020zkj,Qian:2021vnh,abbasi2021convolutional,Drielsma:2021jdv,Rossi:2021tjf,Hewes:2021heg,Acciarri:2021oav,Belavin:2021bxb,Maksimovic:2021dmz,Gavrikov:2021ktt,Garcia-Mendez:2021vts,Carloni:2021zbc,MicroBooNE:2021nss,MicroBooNE:2021ojx,Elkarghli:2020owr,DUNE:2022fiy,Lutkus:2022eou,Chappell:2022yxd,Bachlechner:2022cvf,Sogaard:2022qgg,IceCube:2022njh,Bai:2022lbv,Biassoni:2023lih,Yu:2023ehc,Mo:2024dru,Bat:2024gln,Aurisano:2024uvd,IceCube:2024xjj,Cai:2024bpv,Kopp:2024lch,Yu:2024ldv,Migala:2024ael,Yu:2024eog} \\\textit{Neutrino detectors are very large in order to have a sizable rate of neutrino detection. The entire neutrino interaction can be characterized to distinguish different neutrino flavors.} \item \textbf{Direct Dark Matter Detectors}~\cite{Ilyasov_2020,Akerib:2020aws,Khosa:2019qgp,Golovatiuk:2021lqn,McDonald:2021hus,Coarasa:2021fpv,Herrero-Garcia:2021goa,Liang:2021nsz,Li:2022tvg,Biassoni:2023lih,XENONCollaboration:2023dar,Ghrear:2024rku,Cerdeno:2024uqt} \\\textit{Dark matter detectors are similar to neutrino detectors, but aim to achieve `zero' background.} @@ -126,9 +126,9 @@ \item \textbf{Fast inference / deployment} \\\textit{There are many practical issues that can be critical for the actual application of machine learning models.} \begin{itemize} - \item \textbf{Software}~\cite{Strong:2020mge,Gligorov:2012qt,Weitekamp:DLPS2017,Nguyen:2018ugw,Bourgeois:2018nvk,1792136,Balazs:2021uhg,Rehm:2021zow,Mahesh:2021iph,Amrouche:2021tio,Pol:2021iqw,Goncharov:2021wvd,Saito:2021vpp,Jiang:2022zho,Garg:2022tal,Duarte:2022job,Guo:2023nfu,Tyson:2023zkx,DPHEP:2023blx,DiBello:2023kzc,Bal:2023bvt,Kauffman:2024bov,Held:2024gwj,CALICE:2024imr,Ivanov:2024whr,Bierlich:2024vqo,Pratiush:2024ltm} + \item \textbf{Software}~\cite{Strong:2020mge,Gligorov:2012qt,Weitekamp:DLPS2017,Nguyen:2018ugw,Bourgeois:2018nvk,1792136,Balazs:2021uhg,Rehm:2021zow,Mahesh:2021iph,Amrouche:2021tio,Pol:2021iqw,Goncharov:2021wvd,Saito:2021vpp,Jiang:2022zho,Garg:2022tal,Duarte:2022job,Guo:2023nfu,Tyson:2023zkx,DPHEP:2023blx,DiBello:2023kzc,Bal:2023bvt,Kauffman:2024bov,Held:2024gwj,CALICE:2024imr,Ivanov:2024whr,Bierlich:2024vqo,Pratiush:2024ltm,Ragoni:2024jhg,Yu:2024eog} \\\textit{Strategies for efficient inference for a given hardware architecture.} - \item \textbf{Hardware/firmware}~\cite{Duarte:2018ite,DiGuglielmo:2020eqx,Summers:2020xiy,1808088,Iiyama:2020wap,Mohan:2020vvi,Carrazza:2020qwu,Rankin:2020usv,Heintz:2020soy,Rossi:2020sbh,Aarrestad:2021zos,Hawks:2021ruw,Teixeira:2021yhl,Hong:2021snb,DiGuglielmo:2021ide,Migliorini:2021fuj,Govorkova:2021utb,Elabd:2021lgo,Jwa:2019zlh,Butter:2022lkf,Sun:2022bxx,Khoda:2022dwz,Carlson:2022vac,Abidi:2022ogh,MeyerzuTheenhausen:2022ffb,Cai:2023ldc,Herbst:2023lug,Coccaro:2023nol,Neu:2023sfh,Okabe:2023efz,Yaary:2023dvw,Schulte:2023gtt,Yoo:2023lxy,Grosso:2023owo,Jin:2023xts,Lin:2023xrw,Zipper:2023ybp,Delaney:2023swp,Dickinson:2023yes,CMS:2024twn,Bahr:2024dzg,Tiras:2024yzr,Parpillon:2024maz,Los:2024xzl,Zhu:2024ubz,Borella:2024mgs,Serhiayenka:2024han} + \item \textbf{Hardware/firmware}~\cite{Duarte:2018ite,DiGuglielmo:2020eqx,Summers:2020xiy,1808088,Iiyama:2020wap,Mohan:2020vvi,Carrazza:2020qwu,Rankin:2020usv,Heintz:2020soy,Rossi:2020sbh,Aarrestad:2021zos,Hawks:2021ruw,Teixeira:2021yhl,Hong:2021snb,DiGuglielmo:2021ide,Migliorini:2021fuj,Govorkova:2021utb,Elabd:2021lgo,Jwa:2019zlh,Butter:2022lkf,Sun:2022bxx,Khoda:2022dwz,Carlson:2022vac,Abidi:2022ogh,MeyerzuTheenhausen:2022ffb,Cai:2023ldc,Herbst:2023lug,Coccaro:2023nol,Neu:2023sfh,Okabe:2023efz,Yaary:2023dvw,Schulte:2023gtt,Yoo:2023lxy,Grosso:2023owo,Jin:2023xts,Lin:2023xrw,Zipper:2023ybp,Delaney:2023swp,Dickinson:2023yes,CMS:2024twn,Bahr:2024dzg,Tiras:2024yzr,Parpillon:2024maz,Los:2024xzl,Zhu:2024ubz,Borella:2024mgs,Serhiayenka:2024han,Badea:2024zoq,Migala:2024ael} \\\textit{Various accelerators have been studied for fast inference that is very important for latency-limited applications like the trigger at collider experiments.} \item \textbf{Deployment}~\cite{Kuznetsov:2020mcj,SunnebornGudnadottir:2021nhk,Holmberg:2023rfr,Savard:2023wwi,Bieringer:2024pzt,Li:2024uju} \\\textit{This category is for the deployment of machine learning interfaces, such as in the cloud.} @@ -149,7 +149,7 @@ \\\textit{The target features could be parameters of a model, which can be learned directly through a regression setup. Other forms of inference are described in later sections (which could also be viewed as regression).} \item \textbf{Parton Distribution Functions (and related)}~\cite{DelDebbio:2020rgv,Grigsby:2020auv,Rossi:2020sbh,Carrazza:2021hny,Ball:2021leu,Ball:2021xlu,Khalek:2021gon,Iranipour:2022iak,Gao:2022uhg,Gao:2022srd,Candido:2023utz,Wang:2023nab,Kassabov:2023hbm,Wang:2023poi,Fernando:2023obn,Rabemananjara:2023xfq,Kriesten:2023uoi,NNPDF:2024djq,NNPDF:2024dpb,DallOlio:2024vjv,Gombas:2024rvw,Costantini:2024xae,Bertone:2024taw,Soleymaninia:2024jam,Ochoa-Oregon:2024zgm,Barontini:2024dyb,Yan:2024yir,Liuti:2024umy,Kriesten:2024are,Chowdhury:2024ymm} \\\textit{Various machine learning models can provide flexible function approximators, which can be useful for modeling functions that cannot be determined easily from first principles such as parton distribution functions.} - \item \textbf{Lattice Gauge Theory}~\cite{Kanwar:2003.06413,Favoni:2020reg,Bulusu:2021rqz,Shi:2021qri,Hackett:2021idh,Yoon:2018krb,Zhang:2019qiq,Nguyen:2019gpo,Favoni:2021epq,Chen:2021jey,Bulusu:2021njs,Shi:2022yqw,Luo:2022jzl,Chen:2022ytr,Li:2022ozl,Kang:2022jbg,Albandea:2022fky,Khan:2022vot,Sale:2022snt,Kim:2022rna,Karsch:2022yka,Favoni:2022mcg,Chen:2022asj,Bacchio:2022vje,Bacchio:2022vje,Gao:2022uhg,Aguilar:2022thg,Lawrence:2022dba,Peng:2022wdl,Lehner:2023bba,Albandea:2023wgd,Nicoli:2023qsl,Aronsson:2023rli,Zhou:2023pti,Hudspith:2023loy,R:2023dcr,Bender:2023gwr,NarcisoFerreira:2023kak,Lehner:2023prf,Singha:2023xxq,Riberdy:2023awf,Buzzicotti:2023qdv,Caselle:2023mvh,Detmold:2023kjm,Kashiwa:2023dfx,Ermann:2023unw,Albandea:2023ais,Alvestad:2023jgl,Tomiya:2023jdy,Wang:2023sry,Gao:2023uel,Soloveva:2023tvj,Holland:2023lfx,Gao:2023quv,Foreman:2023ymy,Lawrence:2023cft,Kanwar:2024ujc,Goswami:2024jlc,Holland:2024muu,Catumba:2024wxc,Chen:2024ckb,Boyle:2024nlh,Chu:2024swv,Bonanno:2024udh,Lin:2024eiz,Kim:2024rpd,Finkenrath:2024tdp,Abbott:2024knk,Bai:2024pii,Chen:2024mmd,Xu:2024tjp,Apte:2024vwn,Bachtis:2024vks,Cai:2024eqa,Jiang:2024vsr,Bachtis:2024dss,Gao:2024nzg,Luo:2024iwf,Gao:2024zdz,Rovira:2024aqd} + \item \textbf{Lattice Gauge Theory}~\cite{Kanwar:2003.06413,Favoni:2020reg,Bulusu:2021rqz,Shi:2021qri,Hackett:2021idh,Yoon:2018krb,Zhang:2019qiq,Nguyen:2019gpo,Favoni:2021epq,Chen:2021jey,Bulusu:2021njs,Shi:2022yqw,Luo:2022jzl,Chen:2022ytr,Li:2022ozl,Kang:2022jbg,Albandea:2022fky,Khan:2022vot,Sale:2022snt,Kim:2022rna,Karsch:2022yka,Favoni:2022mcg,Chen:2022asj,Bacchio:2022vje,Bacchio:2022vje,Gao:2022uhg,Aguilar:2022thg,Lawrence:2022dba,Peng:2022wdl,Lehner:2023bba,Albandea:2023wgd,Nicoli:2023qsl,Aronsson:2023rli,Zhou:2023pti,Hudspith:2023loy,R:2023dcr,Bender:2023gwr,NarcisoFerreira:2023kak,Lehner:2023prf,Singha:2023xxq,Riberdy:2023awf,Buzzicotti:2023qdv,Caselle:2023mvh,Detmold:2023kjm,Kashiwa:2023dfx,Ermann:2023unw,Albandea:2023ais,Alvestad:2023jgl,Tomiya:2023jdy,Wang:2023sry,Gao:2023uel,Soloveva:2023tvj,Holland:2023lfx,Gao:2023quv,Foreman:2023ymy,Lawrence:2023cft,Kanwar:2024ujc,Goswami:2024jlc,Holland:2024muu,Catumba:2024wxc,Chen:2024ckb,Boyle:2024nlh,Chu:2024swv,Bonanno:2024udh,Lin:2024eiz,Kim:2024rpd,Finkenrath:2024tdp,Abbott:2024knk,Bai:2024pii,Chen:2024mmd,Xu:2024tjp,Apte:2024vwn,Bachtis:2024vks,Cai:2024eqa,Jiang:2024vsr,Bachtis:2024dss,Gao:2024nzg,Luo:2024iwf,Gao:2024zdz,Rovira:2024aqd,Wang:2024ykk,Gerdes:2024rjk} \\\textit{Lattice methods offer a complementary approach to perturbation theory. A key challenge is to create approaches that respect the local gauge symmetry (equivariant networks).} \item \textbf{Function Approximation}~\cite{1853982,Coccaro:2019lgs,Haddadin:2021mmo,Chahrour:2021eiv,Wang:2021jou,Kitouni:2021fkh,Lei:2022dvn,Wang:2023nab,Fernando:2023obn,Reyes-Gonzalez:2023oei,Hirst:2024abn,Rovira:2024aqd} \\\textit{Approximating functions that obey certain (physical) constraints.} @@ -220,13 +220,13 @@ \\\textit{ML can also be utilized in formal theory.} \begin{itemize} \item Theory and physics for ML~\cite{Erbin:2022lls,Zuniga-Galindo:2023hty,Banta:2023kqe,Zuniga-Galindo:2023uwp,Kumar:2023hlu,Demirtas:2023fir,Halverson:2023ndu,Zhang:2024mcu} - \item ML for theory~\cite{Berglund:2022gvm,Erbin:2022rgx,Gerdes:2022nzr,Escalante-Notario:2022fik,Chen:2022jwd,Cheung:2022itk,He:2023csq,Lal:2023dkj,Dorrill:2023vox,Forestano:2023ijh,Dersy:2023job,Cotler:2023lem,Mizera:2023bsw,Gnech:2023prs,Seong:2023njx,Wojcik:2023usm,Alawadhi:2023gxa,Choi:2023rqg,Halverson:2023ndu,Matchev:2023mii,Lanza:2023vee,Erbin:2023ncy,Hirst:2023kdl,Ishiguro:2023hcv,Constantin:2024yxh,Berman:2024pax,Gukov:2024buj,Lanza:2024mqp,Hashimoto:2024aga,Orman:2024mpw,Bea:2024xgv,Balduf:2024gvv,Hou:2024vtx,Keita:2024skh,LopesCardoso:2024tol,Gukov:2024opc,Dao:2024zab,Cheung:2024svk,Bodendorfer:2024egw,Halverson:2024axc,Capuozzo:2024vdw,Bhat:2024agd} + \item ML for theory~\cite{Berglund:2022gvm,Erbin:2022rgx,Gerdes:2022nzr,Escalante-Notario:2022fik,Chen:2022jwd,Cheung:2022itk,He:2023csq,Lal:2023dkj,Dorrill:2023vox,Forestano:2023ijh,Dersy:2023job,Cotler:2023lem,Mizera:2023bsw,Gnech:2023prs,Seong:2023njx,Wojcik:2023usm,Alawadhi:2023gxa,Choi:2023rqg,Halverson:2023ndu,Matchev:2023mii,Lanza:2023vee,Erbin:2023ncy,Hirst:2023kdl,Ishiguro:2023hcv,Constantin:2024yxh,Berman:2024pax,Gukov:2024buj,Lanza:2024mqp,Hashimoto:2024aga,Orman:2024mpw,Bea:2024xgv,Balduf:2024gvv,Hou:2024vtx,Keita:2024skh,LopesCardoso:2024tol,Gukov:2024opc,Dao:2024zab,Cheung:2024svk,Bodendorfer:2024egw,Halverson:2024axc,Capuozzo:2024vdw,Bhat:2024agd,Ek:2024fgd} \end{itemize} \item \textbf{Experimental results} \\\textit{This section is incomplete as there are many results that directly and indirectly (e.g. via flavor tagging) use modern machine learning techniques. We will try to highlight experimental results that use deep learning in a critical way for the final analysis sensitivity.} \begin{itemize} \item Performance studies~\cite{CMS:2022prd,Yang:2022dwu,NEOS-II:2022mov,Jiang:2022zho,Gronroos:2023qff,ATLAS:2023zca,Palo:2023xnr,Karwowska:2024xqy,Kara:2024xkk} - \item Searches and measurements where ML reconstruction is a core component~\cite{Keck:2018lcd,CMS:2019dqq,MicroBooNE:2021nxr,MicroBooNE:2021jwr,ATLAS:2022ihe,CMS:2022idi,CMS:2022fxs,Li:2022gpb,Tran:2022ago,Manganelli:2022whv,CMS:2022wjc,ATLAS:2023mcc,ATLAS:2023hbp,ATLAS:2023vxg,ATLAS:2023qdu,ATLAS:2023bzb,ATLAS:2023sbu,ATLAS:2023dnm,NOvA:2023uxq,Gravili:2023hbp,Dutta:2023jbz,Belfkir:2023vpo,Tung:2023lkv,Akar:2023puf,BOREXINO:2023pcv,Vourliotis:2024bem,ATLAS:2024rcx,CMS:2024trg,ATLAS:2024ett,ATLAS:2024fdw,CMS:2024fkb,ATLAS:2024auw,CMS:2024vjn,ATLAS:2024itc,Belle-II:2024vvr,CMS:2024zqs,ATLAS:2024xxl,MicroBooNE:2024zhz,CALICE:2024jke,CMS:2024ddc,ATLAS:2024rua,CMS:2024xzb} + \item Searches and measurements where ML reconstruction is a core component~\cite{Keck:2018lcd,CMS:2019dqq,MicroBooNE:2021nxr,MicroBooNE:2021jwr,ATLAS:2022ihe,CMS:2022idi,CMS:2022fxs,Li:2022gpb,Tran:2022ago,Manganelli:2022whv,CMS:2022wjc,ATLAS:2023mcc,ATLAS:2023hbp,ATLAS:2023vxg,ATLAS:2023qdu,ATLAS:2023bzb,ATLAS:2023sbu,ATLAS:2023dnm,NOvA:2023uxq,Gravili:2023hbp,Dutta:2023jbz,Belfkir:2023vpo,Tung:2023lkv,Akar:2023puf,BOREXINO:2023pcv,Vourliotis:2024bem,ATLAS:2024rcx,CMS:2024trg,ATLAS:2024ett,ATLAS:2024fdw,CMS:2024fkb,ATLAS:2024auw,CMS:2024vjn,ATLAS:2024itc,Belle-II:2024vvr,CMS:2024zqs,ATLAS:2024xxl,MicroBooNE:2024zhz,CALICE:2024jke,CMS:2024ddc,ATLAS:2024rua,CMS:2024xzb,BESIII:2024mgg} \item Final analysis discriminate for searches~\cite{Aad:2019yxi,Aad:2020hzm,collaboration2020dijet,Sirunyan:2020hwz,Manganelli:2022whv}. \item Measurements using deep learning directly (not through object reconstruction)~\cite{H1:2021wkz,H1:2023fzk} \end{itemize} diff --git a/README.md b/README.md index aff0d52..2a97275 100644 --- a/README.md +++ b/README.md @@ -93,6 +93,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Public Kaggle Competition ''IceCube -- Neutrinos in Deep Ice''](https://arxiv.org/abs/2307.15289) (2023) * [Electron Energy Regression in the CMS High-Granularity Calorimeter Prototype](https://arxiv.org/abs/2309.06582) (2023) * [RODEM Jet Datasets](https://arxiv.org/abs/2408.11616) (2024) +* [FAIR Universe HiggsML Uncertainty Challenge Competition](https://arxiv.org/abs/2410.02867) (2024) ## Classification ### Parameterized classifiers @@ -239,6 +240,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Graph Neural Network-Based Track Finding in the LHCb Vertex Detector](https://arxiv.org/abs/2407.12119) (2024) * [EggNet: An Evolving Graph-based Graph Attention Network for Particle Track Reconstruction](https://arxiv.org/abs/2407.13925) (2024) * [Search for light long-lived particles decaying to displaced jets in proton-proton collisions at $\sqrt{s}$](https://arxiv.org/abs/2409.10806) (2024) +* [Measurements of decay branching fractions of the Higgs boson to hadronic final states at the CEPC](https://arxiv.org/abs/2410.04465) (2024) +* [Observation of a rare beta decay of the charmed baryon with a Graph Neural Network](https://arxiv.org/abs/2410.13515) (2024) #### Sets (point clouds) @@ -306,6 +309,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes](https://arxiv.org/abs/2310.13009) (2023) * [Explainable Equivariant Neural Networks for Particle Physics: PELICAN](https://arxiv.org/abs/2307.16506) [[DOI](https://doi.org/10.1007/JHEP03(2024)113)] (2023) * [Interplay of Traditional Methods and Machine Learning Algorithms for Tagging Boosted Objects](https://arxiv.org/abs/2408.01138) [[DOI](https://doi.org/10.1140/epjs/s11734-024-01256-6)] (2024) +* [Measurements of decay branching fractions of the Higgs boson to hadronic final states at the CEPC](https://arxiv.org/abs/2410.04465) (2024) #### $H\rightarrow b\bar{b}$ @@ -320,6 +324,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Extracting Signals of Higgs Boson From Background Noise Using Deep Neural Networks](https://arxiv.org/abs/2010.08201) (2020) * [Learning to increase matching efficiency in identifying additional b-jets in the $\text{t}\bar{\text{t}}\text{b}\bar{\text{b}}$ process](https://arxiv.org/abs/2103.09129) [[DOI](https://doi.org/10.1140/epjp/s13360-022-03024-8)] (2021) * [Higgs tagging with the Lund jet plane](https://arxiv.org/abs/2105.03989) [[DOI](https://doi.org/10.1103/PhysRevD.104.055043)] (2021) +* [Measurements of decay branching fractions of the Higgs boson to hadronic final states at the CEPC](https://arxiv.org/abs/2410.04465) (2024) +* [Application of Particle Transformer to quark flavor tagging in the ILC project](https://arxiv.org/abs/2410.11322) (2024) #### quarks and gluons @@ -346,6 +352,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Jet Flavour Tagging at FCC-ee with a Transformer-based Neural Network: DeepJetTransformer](https://arxiv.org/abs/2406.08590) (2024) * [A multicategory jet image classification framework using deep neural network](https://arxiv.org/abs/2407.03524) (2024) * [Jet Tagging with More-Interaction Particle Transformer](https://arxiv.org/abs/2407.08682) [[DOI](https://doi.org/10.1088/1674-1137/ad7f3d)] (2024) +* [Application of Particle Transformer to quark flavor tagging in the ILC project](https://arxiv.org/abs/2410.11322) (2024) #### top quark tagging @@ -390,6 +397,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Maximum performance of strange-jet tagging at hadron colliders](https://arxiv.org/abs/2011.10736) [[DOI](https://doi.org/10.1088/1748-0221/16/08/P08039)] (2020) * [Study of anomalous $W^-W^+\gamma/Z$ couplings using polarizations and spin correlations in $e^-e^+\to W^-W^+$ with polarized beams](https://arxiv.org/abs/2305.15106) [[DOI](https://doi.org/10.1140/epjc/s10052-023-12292-2)] (2023) * [From strange-quark tagging to fragmentation tagging with machine learning](https://arxiv.org/abs/2408.12377) (2024) +* [Application of Particle Transformer to quark flavor tagging in the ILC project](https://arxiv.org/abs/2410.11322) (2024) #### $b$-tagging @@ -500,6 +508,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Learning to see R-parity violating scalar top decays](https://arxiv.org/abs/2406.03096) [[DOI](https://doi.org/10.1103/PhysRevD.110.056006)] (2024) * [Graph Reinforcement Learning for Exploring BSM Model Spaces](https://arxiv.org/abs/2407.07203) (2024) * [Multiple testing for signal-agnostic searches of new physics with machine learning](https://arxiv.org/abs/2408.12296) (2024) +* [Machine learning tagged boosted dark photon: A signature of fermionic portal matter at the LHC](https://arxiv.org/abs/2410.06925) (2024) +* [Machine-Learning Analysis of Radiative Decays to Dark Matter at the LHC](https://arxiv.org/abs/2410.13799) (2024) #### Particle identification @@ -579,6 +589,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [RELICS: a REactor neutrino LIquid xenon Coherent elastic Scattering experiment](https://arxiv.org/abs/2405.05554) [[DOI](https://doi.org/10.1103/PhysRevD.110.072011)] (2024) * [Improving Neutrino Energy Reconstruction with Machine Learning](https://arxiv.org/abs/2405.15867) (2024) * [Enhancing Events in Neutrino Telescopes through Deep Learning-Driven Super-Resolution](https://arxiv.org/abs/2408.08474) (2024) +* [Real-time Position Reconstruction for the KamLAND-Zen Experiment using Hardware-AI Co-design](https://arxiv.org/abs/2410.02991) (2024) +* [Learning Efficient Representations of Neutrino Telescope Events](https://arxiv.org/abs/2410.13148) (2024) #### Direct Dark Matter Detectors @@ -940,6 +952,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [RootInteractive tool for multidimensional statistical analysis, machine learning and analytical model validation](https://arxiv.org/abs/2403.19330) [[DOI](https://doi.org/10.1051/epjconf/202429506019)] (2024) * [Robust Independent Validation of Experiment and Theory: Rivet version 4 release note](https://arxiv.org/abs/2404.15984) (2024) * [Implementing dynamic high-performance computing supported workflows on Scanning Transmission Electron Microscope](https://arxiv.org/abs/2406.11018) (2024) +* [Machine learning opportunities for online and offline tagging of photo-induced and diffractive events in continuous readout experiments](https://arxiv.org/abs/2410.06983) (2024) +* [Learning Efficient Representations of Neutrino Telescope Events](https://arxiv.org/abs/2410.13148) (2024) #### Hardware/firmware @@ -990,6 +1004,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Comparison of Geometrical Layouts for Next-Generation Large-volume Cherenkov Neutrino Telescopes](https://arxiv.org/abs/2407.19010) (2024) * [Ultra-low latency quantum-inspired machine learning predictors implemented on FPGA](https://arxiv.org/abs/2409.16075) (2024) * [Nanosecond hardware regression trees in FPGA at the LHC](https://arxiv.org/abs/2409.20506) (2024) +* [Intelligent Pixel Detectors: Towards a Radiation Hard ASIC with On-Chip Machine Learning in 28 nm CMOS](https://arxiv.org/abs/2410.02945) (2024) +* [Real-time Position Reconstruction for the KamLAND-Zen Experiment using Hardware-AI Co-design](https://arxiv.org/abs/2410.02991) (2024) #### Deployment @@ -1233,6 +1249,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Neural Network Modeling of Heavy-Quark Potential from Holography](https://arxiv.org/abs/2408.03784) (2024) * [Estimation of the pseudoscalar glueball mass based on a modified Transformer](https://arxiv.org/abs/2408.13280) (2024) * [A Variational Approach to Quantum Field Theory](https://arxiv.org/abs/2409.17887) (2024) +* [Building Hadron Potentials from Lattice QCD with Deep Neural Networks](https://arxiv.org/abs/2410.03082) (2024) +* [Continuous normalizing flows for lattice gauge theories](https://arxiv.org/abs/2410.13161) (2024) ### Function Approximation @@ -2004,6 +2022,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Conformal Fields from Neural Networks](https://arxiv.org/abs/2409.12222) (2024) * [Machine Learning Toric Duality in Brane Tilings](https://arxiv.org/abs/2409.15251) (2024) * [Bootstrapping string models with entanglement minimization and Machine-Learning](https://arxiv.org/abs/2409.18259) (2024) +* [Calabi-Yau metrics through Grassmannian learning and Donaldson's algorithm](https://arxiv.org/abs/2410.11284) (2024) ## Experimental results. *This section is incomplete as there are many results that directly and indirectly (e.g. via flavor tagging) use modern machine learning techniques. We will try to highlight experimental results that use deep learning in a critical way for the final analysis sensitivity.* @@ -2064,6 +2083,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Measurement of boosted Higgs bosons produced via vector boson fusion or gluon fusion in the H $\to$$\mathrm{b\bar{b}}$ decay mode using LHC proton-proton collision data at $\sqrt{s}$](https://arxiv.org/abs/2407.08012) (2024) * [Accuracy versus precision in boosted top tagging with the ATLAS detector](https://arxiv.org/abs/2407.20127) [[DOI](https://doi.org/10.1088/1748-0221/19/08/P08018)] (2024) * [Search for light long-lived particles decaying to displaced jets in proton-proton collisions at $\sqrt{s}$](https://arxiv.org/abs/2409.10806) (2024) +* [Observation of a rare beta decay of the charmed baryon with a Graph Neural Network](https://arxiv.org/abs/2410.13515) (2024) ### Final analysis discriminate for searches diff --git a/docs/assets/dark_per_year.png b/docs/assets/dark_per_year.png index 2cea77e..2703176 100644 Binary files a/docs/assets/dark_per_year.png and b/docs/assets/dark_per_year.png differ diff --git a/docs/assets/per_year.png b/docs/assets/per_year.png index f0e6849..9e06a60 100644 Binary files a/docs/assets/per_year.png and b/docs/assets/per_year.png differ diff --git a/docs/index.md b/docs/index.md index 8265718..0501e25 100644 --- a/docs/index.md +++ b/docs/index.md @@ -135,6 +135,7 @@ const expandElements = shouldExpand => { * [Public Kaggle Competition ''IceCube -- Neutrinos in Deep Ice''](https://arxiv.org/abs/2307.15289) (2023) * [Electron Energy Regression in the CMS High-Granularity Calorimeter Prototype](https://arxiv.org/abs/2309.06582) (2023) * [RODEM Jet Datasets](https://arxiv.org/abs/2408.11616) (2024) + * [FAIR Universe HiggsML Uncertainty Challenge Competition](https://arxiv.org/abs/2410.02867) (2024) ## Classification @@ -291,6 +292,8 @@ const expandElements = shouldExpand => { * [Graph Neural Network-Based Track Finding in the LHCb Vertex Detector](https://arxiv.org/abs/2407.12119) (2024) * [EggNet: An Evolving Graph-based Graph Attention Network for Particle Track Reconstruction](https://arxiv.org/abs/2407.13925) (2024) * [Search for light long-lived particles decaying to displaced jets in proton-proton collisions at $\sqrt{s}$](https://arxiv.org/abs/2409.10806) (2024) + * [Measurements of decay branching fractions of the Higgs boson to hadronic final states at the CEPC](https://arxiv.org/abs/2410.04465) (2024) + * [Observation of a rare beta decay of the charmed baryon with a Graph Neural Network](https://arxiv.org/abs/2410.13515) (2024) #### Sets (point clouds) @@ -363,6 +366,7 @@ const expandElements = shouldExpand => { * [Application of Machine Learning Based Top Quark and W Jet Tagging to Hadronic Four-Top Final States Induced by SM as well as BSM Processes](https://arxiv.org/abs/2310.13009) (2023) * [Explainable Equivariant Neural Networks for Particle Physics: PELICAN](https://arxiv.org/abs/2307.16506) [[DOI](https://doi.org/10.1007/JHEP03(2024)113)] (2023) * [Interplay of Traditional Methods and Machine Learning Algorithms for Tagging Boosted Objects](https://arxiv.org/abs/2408.01138) [[DOI](https://doi.org/10.1140/epjs/s11734-024-01256-6)] (2024) + * [Measurements of decay branching fractions of the Higgs boson to hadronic final states at the CEPC](https://arxiv.org/abs/2410.04465) (2024) #### $H\rightarrow b\bar{b}$ @@ -377,6 +381,8 @@ const expandElements = shouldExpand => { * [Extracting Signals of Higgs Boson From Background Noise Using Deep Neural Networks](https://arxiv.org/abs/2010.08201) (2020) * [Learning to increase matching efficiency in identifying additional b-jets in the $\text{t}\bar{\text{t}}\text{b}\bar{\text{b}}$ process](https://arxiv.org/abs/2103.09129) [[DOI](https://doi.org/10.1140/epjp/s13360-022-03024-8)] (2021) * [Higgs tagging with the Lund jet plane](https://arxiv.org/abs/2105.03989) [[DOI](https://doi.org/10.1103/PhysRevD.104.055043)] (2021) + * [Measurements of decay branching fractions of the Higgs boson to hadronic final states at the CEPC](https://arxiv.org/abs/2410.04465) (2024) + * [Application of Particle Transformer to quark flavor tagging in the ILC project](https://arxiv.org/abs/2410.11322) (2024) #### quarks and gluons @@ -403,6 +409,7 @@ const expandElements = shouldExpand => { * [Jet Flavour Tagging at FCC-ee with a Transformer-based Neural Network: DeepJetTransformer](https://arxiv.org/abs/2406.08590) (2024) * [A multicategory jet image classification framework using deep neural network](https://arxiv.org/abs/2407.03524) (2024) * [Jet Tagging with More-Interaction Particle Transformer](https://arxiv.org/abs/2407.08682) [[DOI](https://doi.org/10.1088/1674-1137/ad7f3d)] (2024) + * [Application of Particle Transformer to quark flavor tagging in the ILC project](https://arxiv.org/abs/2410.11322) (2024) #### top quark tagging @@ -447,6 +454,7 @@ const expandElements = shouldExpand => { * [Maximum performance of strange-jet tagging at hadron colliders](https://arxiv.org/abs/2011.10736) [[DOI](https://doi.org/10.1088/1748-0221/16/08/P08039)] (2020) * [Study of anomalous $W^-W^+\gamma/Z$ couplings using polarizations and spin correlations in $e^-e^+\to W^-W^+$ with polarized beams](https://arxiv.org/abs/2305.15106) [[DOI](https://doi.org/10.1140/epjc/s10052-023-12292-2)] (2023) * [From strange-quark tagging to fragmentation tagging with machine learning](https://arxiv.org/abs/2408.12377) (2024) + * [Application of Particle Transformer to quark flavor tagging in the ILC project](https://arxiv.org/abs/2410.11322) (2024) #### $b$-tagging @@ -557,6 +565,8 @@ const expandElements = shouldExpand => { * [Learning to see R-parity violating scalar top decays](https://arxiv.org/abs/2406.03096) [[DOI](https://doi.org/10.1103/PhysRevD.110.056006)] (2024) * [Graph Reinforcement Learning for Exploring BSM Model Spaces](https://arxiv.org/abs/2407.07203) (2024) * [Multiple testing for signal-agnostic searches of new physics with machine learning](https://arxiv.org/abs/2408.12296) (2024) + * [Machine learning tagged boosted dark photon: A signature of fermionic portal matter at the LHC](https://arxiv.org/abs/2410.06925) (2024) + * [Machine-Learning Analysis of Radiative Decays to Dark Matter at the LHC](https://arxiv.org/abs/2410.13799) (2024) #### Particle identification @@ -636,6 +646,8 @@ const expandElements = shouldExpand => { * [RELICS: a REactor neutrino LIquid xenon Coherent elastic Scattering experiment](https://arxiv.org/abs/2405.05554) [[DOI](https://doi.org/10.1103/PhysRevD.110.072011)] (2024) * [Improving Neutrino Energy Reconstruction with Machine Learning](https://arxiv.org/abs/2405.15867) (2024) * [Enhancing Events in Neutrino Telescopes through Deep Learning-Driven Super-Resolution](https://arxiv.org/abs/2408.08474) (2024) + * [Real-time Position Reconstruction for the KamLAND-Zen Experiment using Hardware-AI Co-design](https://arxiv.org/abs/2410.02991) (2024) + * [Learning Efficient Representations of Neutrino Telescope Events](https://arxiv.org/abs/2410.13148) (2024) #### Direct Dark Matter Detectors @@ -1007,6 +1019,8 @@ const expandElements = shouldExpand => { * [RootInteractive tool for multidimensional statistical analysis, machine learning and analytical model validation](https://arxiv.org/abs/2403.19330) [[DOI](https://doi.org/10.1051/epjconf/202429506019)] (2024) * [Robust Independent Validation of Experiment and Theory: Rivet version 4 release note](https://arxiv.org/abs/2404.15984) (2024) * [Implementing dynamic high-performance computing supported workflows on Scanning Transmission Electron Microscope](https://arxiv.org/abs/2406.11018) (2024) + * [Machine learning opportunities for online and offline tagging of photo-induced and diffractive events in continuous readout experiments](https://arxiv.org/abs/2410.06983) (2024) + * [Learning Efficient Representations of Neutrino Telescope Events](https://arxiv.org/abs/2410.13148) (2024) #### Hardware/firmware @@ -1057,6 +1071,8 @@ const expandElements = shouldExpand => { * [Comparison of Geometrical Layouts for Next-Generation Large-volume Cherenkov Neutrino Telescopes](https://arxiv.org/abs/2407.19010) (2024) * [Ultra-low latency quantum-inspired machine learning predictors implemented on FPGA](https://arxiv.org/abs/2409.16075) (2024) * [Nanosecond hardware regression trees in FPGA at the LHC](https://arxiv.org/abs/2409.20506) (2024) + * [Intelligent Pixel Detectors: Towards a Radiation Hard ASIC with On-Chip Machine Learning in 28 nm CMOS](https://arxiv.org/abs/2410.02945) (2024) + * [Real-time Position Reconstruction for the KamLAND-Zen Experiment using Hardware-AI Co-design](https://arxiv.org/abs/2410.02991) (2024) #### Deployment @@ -1335,6 +1351,8 @@ const expandElements = shouldExpand => { * [Neural Network Modeling of Heavy-Quark Potential from Holography](https://arxiv.org/abs/2408.03784) (2024) * [Estimation of the pseudoscalar glueball mass based on a modified Transformer](https://arxiv.org/abs/2408.13280) (2024) * [A Variational Approach to Quantum Field Theory](https://arxiv.org/abs/2409.17887) (2024) + * [Building Hadron Potentials from Lattice QCD with Deep Neural Networks](https://arxiv.org/abs/2410.03082) (2024) + * [Continuous normalizing flows for lattice gauge theories](https://arxiv.org/abs/2410.13161) (2024) ??? example "Function Approximation" @@ -2238,6 +2256,7 @@ const expandElements = shouldExpand => { * [Conformal Fields from Neural Networks](https://arxiv.org/abs/2409.12222) (2024) * [Machine Learning Toric Duality in Brane Tilings](https://arxiv.org/abs/2409.15251) (2024) * [Bootstrapping string models with entanglement minimization and Machine-Learning](https://arxiv.org/abs/2409.18259) (2024) + * [Calabi-Yau metrics through Grassmannian learning and Donaldson's algorithm](https://arxiv.org/abs/2410.11284) (2024) ## Experimental results. *This section is incomplete as there are many results that directly and indirectly (e.g. via flavor tagging) use modern machine learning techniques. We will try to highlight experimental results that use deep learning in a critical way for the final analysis sensitivity.* @@ -2308,6 +2327,7 @@ const expandElements = shouldExpand => { * [Measurement of boosted Higgs bosons produced via vector boson fusion or gluon fusion in the H $\to$$\mathrm{b\bar{b}}$ decay mode using LHC proton-proton collision data at $\sqrt{s}$](https://arxiv.org/abs/2407.08012) (2024) * [Accuracy versus precision in boosted top tagging with the ATLAS detector](https://arxiv.org/abs/2407.20127) [[DOI](https://doi.org/10.1088/1748-0221/19/08/P08018)] (2024) * [Search for light long-lived particles decaying to displaced jets in proton-proton collisions at $\sqrt{s}$](https://arxiv.org/abs/2409.10806) (2024) + * [Observation of a rare beta decay of the charmed baryon with a Graph Neural Network](https://arxiv.org/abs/2410.13515) (2024) ??? example "Final analysis discriminate for searches"