Skip to content

Commit

Permalink
Update until Nov 08
Browse files Browse the repository at this point in the history
  • Loading branch information
ramonpeter committed Nov 11, 2024
1 parent c2fb5ae commit 3338ad3
Show file tree
Hide file tree
Showing 7 changed files with 177 additions and 8 deletions.
101 changes: 101 additions & 0 deletions HEPML.bib
Original file line number Diff line number Diff line change
@@ -1,5 +1,95 @@
# HEPML Papers

% November 08, 2024
@article{Cornell:2024dki,
author = "Cornell, Alan S. and Fuks, Benjamin and Goodsell, Mark D. and Ncube, Anele M.",
title = "{Improving smuon searches with Neural Networks}",
eprint = "2411.04526",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
month = "11",
year = "2024"
}

% November 06, 2024
@article{Heller:2024onk,
author = "Heller, Nick and Ilten, Phil and Menzo, Tony and Mrenna, Stephen and Nachman, Benjamin and Siodmok, Andrzej and Szewc, Manuel and Youssef, Ahmed",
title = "{Rejection Sampling with Autodifferentiation -- Case study: Fitting a Hadronization Model}",
eprint = "2411.02194",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
reportNumber = "FERMILAB-PUB-24-0784-CSAID, MCNET-24-18",
month = "11",
year = "2024"
}

% November 06, 2024
@inproceedings{Geuskens:2024tfo,
author = {Geuskens, Joep and Gite, Nishank and Kr\"amer, Michael and Mikuni, Vinicius and M\"uck, Alexander and Nachman, Benjamin and Reyes-Gonz\'alez, Humberto},
title = "{The Fundamental Limit of Jet Tagging}",
eprint = "2411.02628",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
month = "11",
year = "2024"
}

% November 06, 2024
@article{Butter:2024vbx,
author = "Butter, Anja and Diefenbacher, Sascha and Huetsch, Nathan and Mikuni, Vinicius and Nachman, Benjamin and Palacios Schweitzer, Sofia and Plehn, Tilman",
title = "{Generative Unfolding with Distribution Mapping}",
eprint = "2411.02495",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
month = "11",
year = "2024"
}

% November 06, 2024
@article{Terin:2024iyy,
author = "Terin, Rodrigo Carmo",
title = "{Physics-informed neural networks viewpoint for solving the Dyson-Schwinger equations of quantum electrodynamics}",
eprint = "2411.02177",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
month = "11",
year = "2024"
}

% November 06, 2024
@article{Greljo:2024ytg,
author = "Greljo, Admir and Tiblom, Hector and Valenti, Alessandro",
title = "{New Physics Through Flavor Tagging at FCC-ee}",
eprint = "2411.02485",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
month = "11",
year = "2024"
}


% November 05, 2024
@article{Li:2024akn,
author = "Li, Peng-Cheng and Bi, Xiao-Xue and Zhang, Zhen and Deng, Xiao-Bao and Li, Chun and Wang, Li-Wen and Liu, Gong-Fa and Zhang, Yi and Zhou, Ai-Yu and Liu, Yu",
title = "{A versatile framework for attitude tuning of beamlines at advanced light sources}",
eprint = "2411.01278",
archivePrefix = "arXiv",
primaryClass = "physics.ins-det",
month = "11",
year = "2024"
}

% November 05, 2024
@article{Heimel:2024drk,
author = "Heimel, Theo and Plehn, Tilman and Schmal, Nikita",
title = "{Profile Likelihoods on ML-Steroids}",
eprint = "2411.00942",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
month = "11",
year = "2024"
}

% November 04, 2024
@article{Brehmer:2024yqw,
author = "Brehmer, Johann and Bres\'o, V\'\i{}ctor and de Haan, Pim and Plehn, Tilman and Qu, Huilin and Spinner, Jonas and Thaler, Jesse",
Expand Down Expand Up @@ -328,6 +418,17 @@ @article{Ragoni:2024jhg
year = "2024"
}

% October 09, 2024
@article{Bahl:2024meb,
author = "Bahl, Henning and Bres\'o, Victor and De Crescenzo, Giovanni and Plehn, Tilman",
title = "{Advancing Tools for Simulation-Based Inference}",
eprint = "2410.07315",
archivePrefix = "arXiv",
primaryClass = "hep-ph",
month = "10",
year = "2024"
}

% October 09, 2024
@article{Ma:2024qoa,
author = "Ma, Xiaotian and Wu, Zuofei and Wu, Jinfei and Huang, Yanping and Li, Gang and Ruan, Manqi and Alves, F\'abio L. and Jin, Shan",
Expand Down
18 changes: 10 additions & 8 deletions HEPML.tex
Original file line number Diff line number Diff line change
Expand Up @@ -76,17 +76,17 @@
\\\textit{Boosted, hadronically decaying $W$ and $Z$ bosons form jets that are distinguished from generic quark and gluon jets by their mass near the boson mass and their two-prong substructure.}
\item \textbf{$H\rightarrow b\bar{b}$}~\cite{Tagami:2024gtc,Ma:2024qoa,Khosa:2021cyk,Jang:2021eph,Abbas:2020khd,guo2020boosted,Tannenwald:2020mhq,Chung:2020ysf,Sirunyan:2020lcu,Chakraborty:2019imr,Moreno:2019neq,Lin:2018cin,Datta:2019ndh}
\\\textit{Due to the fidelity of $b$-tagging, boosted, hadronically decaying Higgs bosons (predominantly decaying to $b\bar{b}$) has unique challenged and opportunities compared with $W/Z$ tagging.}
\item \textbf{quarks and gluons}~\cite{Brehmer:2024yqw,Tagami:2024gtc,Wu:2024thh,Sandoval:2024ldp,Blekman:2024wyf,Dolan:2023abg,Shen:2023ofd,He:2023cfc,Athanasakos:2023fhq,CrispimRomao:2023ssj,Bright-Thonney:2022xkx,Dreyer:2021hhr,Filipek:2021qbe,Romero:2021qlf,Dreyer:2020brq,Lee:2019cad,Lee:2019ssx,1806025,Kasieczka:2018lwf,Moreno:2019bmu,Chien:2018dfn,Stoye:DLPS2017,Cheng:2017rdo,Komiske:2016rsd,ATL-PHYS-PUB-2017-017}
\item \textbf{quarks and gluons}~\cite{Geuskens:2024tfo,Brehmer:2024yqw,Tagami:2024gtc,Wu:2024thh,Sandoval:2024ldp,Blekman:2024wyf,Dolan:2023abg,Shen:2023ofd,He:2023cfc,Athanasakos:2023fhq,CrispimRomao:2023ssj,Bright-Thonney:2022xkx,Dreyer:2021hhr,Filipek:2021qbe,Romero:2021qlf,Dreyer:2020brq,Lee:2019cad,Lee:2019ssx,1806025,Kasieczka:2018lwf,Moreno:2019bmu,Chien:2018dfn,Stoye:DLPS2017,Cheng:2017rdo,Komiske:2016rsd,ATL-PHYS-PUB-2017-017}
\\\textit{Quark jets tend to be narrower and have fewer particles than gluon jets. This classification task has been a benchmark for many new machine learning models.}
\item \textbf{top quark} tagging~\cite{Brehmer:2024yqw,Larkoski:2024hfe,Kvita:2024ooa,Sahu:2024fzi,Dong:2024xsg,Cai:2024xnt,Ngairangbam:2023cps,Furuichi:2023vdx,Batson:2023ohn,Liu:2023dio,Bogatskiy:2023fug,Baron:2023yhw,Sahu:2023uwb,Isildak:2023dnf,Shen:2023ofd,Bogatskiy:2023nnw,He:2023cfc,Keicher:2023mer,Choi:2023slq,Bhattacherjee:2022gjq,Munoz:2022gjq,Ahmed:2022hct,Dreyer:2022yom,Andrews:2021ejw,Aguilar-Saavedra:2021rjk,Dreyer:2020brq,Lim:2020igi,Bhattacharya:2020vzu,Macaluso:2018tck,Kasieczka:2017nvn,Butter:2017cot,Diefenbacher:2019ezd,Chakraborty:2020yfc,Kasieczka:2019dbj,Stoye:DLPS2017,Almeida:2015jua}
\\\textit{Boosted top quarks form jets that have a three-prong substructure ($t\rightarrow Wb,W\rightarrow q\bar{q}$).}
\item \textbf{strange jets}~\cite{Tagami:2024gtc,Kats:2024eaq,Subba:2023rpm,Erdmann:2020ovh,Erdmann:2019blf,Nakai:2020kuu}
\item \textbf{strange jets}~\cite{Greljo:2024ytg,Tagami:2024gtc,Kats:2024eaq,Subba:2023rpm,Erdmann:2020ovh,Erdmann:2019blf,Nakai:2020kuu}
\\\textit{Strange quarks have a very similar fragmentation to generic quark and gluon jets, so this is a particularly challenging task.}
\item \textbf{$b$-tagging}~\cite{Malara:2024zsj,Song:2024aka,VanStroud:2023ggs,Tamir:2023aiz,ATLAS:2023gog,Stein:2023cnt,Liao:2022ufk,ATL-PHYS-PUB-2020-014,ATL-PHYS-PUB-2017-003,Bols:2020bkb,bielkov2020identifying,Keck:2018lcd,Guest:2016iqz,Sirunyan:2017ezt}
\\\textit{Due to their long (but not too long) lifetime, the $B$-hadron lifetime is macroscopic and $b$-jet tagging has been one of the earliest adapters of modern machine learning tools.}
\item \textbf{Flavor physics}~\cite{Nishimura:2024apb,Mansouri:2024uwc,Chen:2024epd,Malekhosseini:2024eot,Co:2024bfl,Chang:2024ksq,Tian:2024yfz,Smith:2023ssh,Nishimura:2023wdu,Zhang:2023czx,Bahtiyar:2022une,1811097}
\\\textit{This category is for studies related to exclusive particle decays, especially with bottom and charm hadrons.}
\item \textbf{BSM particles and models}~\cite{Arganda:2024tqo,Verma:2024kdx,Grosso:2024wjt,Wojcik:2024lfy,Bickendorf:2024ovi,Esmail:2024gdc,Ahmed:2024iqx,Birch-Sykes:2024gij,Chiang:2024pho,Jurciukonis:2024hlg,Ma:2024deu,Zhang:2024bld,Hammad:2023sbd,Hammad:2023wme,Zhang:2023ykh,Wang:2023pqx,Grefsrud:2023dad,Bhattacherjee:2023evs,Choudhury:2023eje,Esmail:2023axd,Cremer:2023gne,Aguilar-Saavedra:2023pde,Bardhan:2023mia,Flacke:2023eil,Lu:2023gjk,Guo:2023jkz,Dong:2023nir,MB:2023edk,Pedro:2023sdp,Liu:2023gpt,Palit:2023dvs,ATLAS:2023mcc,Ballabene:2022fms,CMS:2022idi,ATLAS:2022ihe,Bhattacharyya:2022umc,Bardhan:2022sif,Bhattacharya:2022kje,Faucett:2022zie,Hall:2022bme,Chiang:2022lsn,Barbosa:2022mmw,Alasfar:2022vqw,Yang:2022fhw,Ai:2022qvs,Lv:2022pme,Goodsell:2022beo,Freitas:2022cno,Badea:2022dzb,Konar:2022bgc,Feng:2021eke,Beauchesne:2021qrw,Vidal:2021oed,Cornell:2021gut,Drees:2021oew,Jung:2021tym,Morais:2021ead,Alvestad:2021sje,Yang:2021gge,Barron:2021btf,Ren:2021prq,Jorge:2021vpo,Arganda:2021azw,Stakia:2021pvp,Freitas:2019hbk,Khosa:2019kxd,Freitas:2020ttd,Englert:2020ntw,Ngairangbam:2020ksz,Grossi:2020orx,Cogollo:2020afo,Chang:2020rtc,1801423,1792136,10.1088/2632-2153/ab9023,Chakraborty:2019imr,Baldi:2014kfa,Datta:2019ndh}
\item \textbf{BSM particles and models}~\cite{Cornell:2024dki,Arganda:2024tqo,Verma:2024kdx,Grosso:2024wjt,Wojcik:2024lfy,Bickendorf:2024ovi,Esmail:2024gdc,Ahmed:2024iqx,Birch-Sykes:2024gij,Chiang:2024pho,Jurciukonis:2024hlg,Ma:2024deu,Zhang:2024bld,Hammad:2023sbd,Hammad:2023wme,Zhang:2023ykh,Wang:2023pqx,Grefsrud:2023dad,Bhattacherjee:2023evs,Choudhury:2023eje,Esmail:2023axd,Cremer:2023gne,Aguilar-Saavedra:2023pde,Bardhan:2023mia,Flacke:2023eil,Lu:2023gjk,Guo:2023jkz,Dong:2023nir,MB:2023edk,Pedro:2023sdp,Liu:2023gpt,Palit:2023dvs,ATLAS:2023mcc,Ballabene:2022fms,CMS:2022idi,ATLAS:2022ihe,Bhattacharyya:2022umc,Bardhan:2022sif,Bhattacharya:2022kje,Faucett:2022zie,Hall:2022bme,Chiang:2022lsn,Barbosa:2022mmw,Alasfar:2022vqw,Yang:2022fhw,Ai:2022qvs,Lv:2022pme,Goodsell:2022beo,Freitas:2022cno,Badea:2022dzb,Konar:2022bgc,Feng:2021eke,Beauchesne:2021qrw,Vidal:2021oed,Cornell:2021gut,Drees:2021oew,Jung:2021tym,Morais:2021ead,Alvestad:2021sje,Yang:2021gge,Barron:2021btf,Ren:2021prq,Jorge:2021vpo,Arganda:2021azw,Stakia:2021pvp,Freitas:2019hbk,Khosa:2019kxd,Freitas:2020ttd,Englert:2020ntw,Ngairangbam:2020ksz,Grossi:2020orx,Cogollo:2020afo,Chang:2020rtc,1801423,1792136,10.1088/2632-2153/ab9023,Chakraborty:2019imr,Baldi:2014kfa,Datta:2019ndh}
\\\textit{There are many proposals to train classifiers to enhance the presence of particular new physics models.}
\item \textbf{Particle identification}~\cite{Ai:2024mkl,Kasak:2023hhr,Song:2023ceh,Karwowska:2023dhl,NA62:2023wzm,Charan:2023ldg,Novosel:2023cki,Lange:2023gbe,Prasad:2023zdd,Wu:2023pzn,Kushawaha:2023dms,Ryzhikov:2022lbu,Dimitrova:2022uum,Fanelli:2022ifa,Graczykowski:2022zae,Graziani:2021vai,Verma:2021ixg,Collado:2020fwm,Qasim:2019otl,Belayneh:2019vyx,Keck:2018lcd,Hooberman:DLPS2017,Paganini:DLPS2017,deOliveira:2018lqd}
\\\textit{This is a generic category for direct particle identification and categorization using various detector technologies. Direct means that the particle directly interacts with the detector (in contrast with $b$-tagging).}
Expand Down Expand Up @@ -155,11 +155,13 @@
\\\textit{Approximating functions that obey certain (physical) constraints.}
\item \textbf{Symbolic Regression}~\cite{Cushman:2024jgi,Wang:2023poi,Lu:2022joy,Zhang:2022uqk,Butter:2021rvz}
\\\textit{Regression where the result is a (relatively) simple formula.}
\item \textbf{Monitoring}~\cite{Cushman:2024jgi,Shutt:2024che,CMSECAL:2023fvz,Das:2023ktd,Harilal:2023smf,Chen:2023cim,Joshi:2023btt,CMSMuon:2023czf,Matha:2023tmf,Mukund:2023oyy}
\item \textbf{Monitoring}~\cite{Li:2024akn,Cushman:2024jgi,Shutt:2024che,CMSECAL:2023fvz,Das:2023ktd,Harilal:2023smf,Chen:2023cim,Joshi:2023btt,CMSMuon:2023czf,Matha:2023tmf,Mukund:2023oyy}
\\\textit{Regression models can be used to monitor experimental setups and sensors.}
\end{itemize}
\item \textbf{Equivariant networks}~\cite{Brehmer:2024yqw,Maitre:2024hzp,Hendi:2024yin,Cruz:2024grk,Spinner:2024hjm,Bhardwaj:2024wrf,Sahu:2024sts,Bhardwaj:2024djv,Chatterjee:2024pbp,Bressler:2024wzc,Gu:2024lrz,Bright-Thonney:2023gdl,Bogatskiy:2023nnw,Murnane:2023kfm,Lehner:2023prf,Forestano:2023qcy,Buhmann:2023pmh,Aronsson:2023rli,Forestano:2023fpj,Lehner:2023bba,Hao:2022zns,Bogatskiy:2022czk,Favoni:2022mcg,Bogatskiy:2022hub,Shi:2022yqw,Gong:2022lye,Bulusu:2021njs,Favoni:2020reg,Dolan:2020qkr,Kanwar:2003.06413}
\\\textit{It is often the case that implementing equivariance or learning symmetries with a model better describes the physics and improves performance}
\item \textbf{Physics-informed neural networks (PINNs)}~\cite{Terin:2024iyy,Vatellis:2024vjl,Panahi:2024sfb}
\\\textit{Physics-informed networks are a type of universal function approximators that can embed the knowledge of any physical laws that govern a given data-set in the learning process, and can be described by partial differential equations (PDEs).}
\item \textbf{Decorrelation methods}~\cite{Algren:2023spv,Rabusov:2022woa,Das:2022cjl,Klein:2022hdv,Mikuni:2021nwn,Dolan:2021pml,Ghosh:2021hrh,Kitouni:2020xgb,Kasieczka:2020pil,clavijo2020adversarial,10.1088/2632-2153/ab9023,Rogozhnikov:2014zea,Wunsch:2019qbo,Englert:2018cfo,Xia:2018kgd,DiscoFever,ATL-PHYS-PUB-2018-014,Bradshaw:2019ipy,Shimmin:2017mfk,Stevens:2013dya,Moult:2017okx,Dolen:2016kst,Louppe:2016ylz}
\\\textit{It it sometimes the case that a classification or regression model needs to be independent of a set of features (usually a mass-like variable) in order to estimate the background or otherwise reduce the uncertainty. These techniques are related to what the machine learning literature calls model `fairness'.}
\item \textbf{Generative models / density estimation}
Expand Down Expand Up @@ -193,15 +195,15 @@
\item \textbf{Simulation-based (`likelihood-free') Inference}
\\\textit{Likelihood-based inference is the case where $p(x|\theta)$ is known and $\theta$ can be determined by maximizing the probability of the data. In high energy physics, $p(x|\theta)$ is often not known analytically, but it is often possible to sample from the density implicitly using simulations.}
\begin{itemize}
\item \textbf{Parameter estimation}~\cite{Maitre:2024hzp,JETSCAPE:2024cqe,Mastandrea:2024irf,Diaz:2024yfu,Alvarez:2024owq,Chatterjee:2024pbp,Chai:2024zyl,Heimel:2023mvw,Espejo:2023wzf,Barrue:2023ysk,Morandini:2023pwj,Erdogan:2023uws,Breitenmoser:2023tmi,Heinrich:2023bmt,Rizvi:2023mws,Neubauer:2022gbu,Butter:2022vkj,Arganda:2022zbs,Kong:2022rnd,Arganda:2022qzy,Bahl:2021dnc,Barman:2021yfh,Mishra-Sharma:2021oxe,NEURIPS2020_a878dbeb,Chatterjee:2021nms,Nachman:2021yvi,Bieringer:2020tnw,Flesher:2020kuy,Coogan:2020yux,Andreassen:2020gtw,Cranmer:2015bka,Brehmer:2018hga,Brehmer:2019xox,Brehmer:2018eca,Brehmer:2018kdj,Hollingsworth:2020kjg,Stoye:2018ovl,Andreassen:2019nnm}
\item \textbf{Parameter estimation}~\cite{Heimel:2024drk,Maitre:2024hzp,Bahl:2024meb,JETSCAPE:2024cqe,Mastandrea:2024irf,Diaz:2024yfu,Alvarez:2024owq,Chatterjee:2024pbp,Chai:2024zyl,Heimel:2023mvw,Espejo:2023wzf,Barrue:2023ysk,Morandini:2023pwj,Erdogan:2023uws,Breitenmoser:2023tmi,Heinrich:2023bmt,Rizvi:2023mws,Neubauer:2022gbu,Butter:2022vkj,Arganda:2022zbs,Kong:2022rnd,Arganda:2022qzy,Bahl:2021dnc,Barman:2021yfh,Mishra-Sharma:2021oxe,NEURIPS2020_a878dbeb,Chatterjee:2021nms,Nachman:2021yvi,Bieringer:2020tnw,Flesher:2020kuy,Coogan:2020yux,Andreassen:2020gtw,Cranmer:2015bka,Brehmer:2018hga,Brehmer:2019xox,Brehmer:2018eca,Brehmer:2018kdj,Hollingsworth:2020kjg,Stoye:2018ovl,Andreassen:2019nnm}
\\\textit{This can also be viewed as a regression problem, but there the goal is typically to do maximum likelihood estimation in contrast to directly minimizing the mean squared error between a function and the target.}
\item \textbf{Unfolding}~\cite{Duarte:2024lsg,Zhu:2024drd,Desai:2024kpd,Huetsch:2024quz,Shmakov:2024gkd,Shmakov:2023kjj,Chan:2023tbf,Backes:2022vmn,Arratia:2022wny,Wong:2021zvv,Arratia:2021otl,H1:2021wkz,Komiske:2021vym,Andreassen:2021zzk,Baron:2021vvl,Howard:2021pos,Vandegar:2020yvw,1800956,Zech2003BinningFreeUB,Lindemann:1995ut,Martschei:2012pr,Glazov:2017vni,Gagunashvili:2010zw,Bellagente:2019uyp,Datta:2018mwd,Andreassen:2019cjw,Mieskolainen:2018fhf}
\item \textbf{Unfolding}~\cite{Butter:2024vbx,Duarte:2024lsg,Zhu:2024drd,Desai:2024kpd,Huetsch:2024quz,Shmakov:2024gkd,Shmakov:2023kjj,Chan:2023tbf,Backes:2022vmn,Arratia:2022wny,Wong:2021zvv,Arratia:2021otl,H1:2021wkz,Komiske:2021vym,Andreassen:2021zzk,Baron:2021vvl,Howard:2021pos,Vandegar:2020yvw,1800956,Zech2003BinningFreeUB,Lindemann:1995ut,Martschei:2012pr,Glazov:2017vni,Gagunashvili:2010zw,Bellagente:2019uyp,Datta:2018mwd,Andreassen:2019cjw,Mieskolainen:2018fhf}
\\\textit{This is the task of removing detector distortions. In contrast to parameter estimation, the goal is not to infer model parameters, but instead, the undistorted phase space probability density. This is often also called deconvolution.}
\item \textbf{Domain adaptation}~\cite{Glazier:2024ogg,Kelleher:2024jsh,Kelleher:2024rmb,Zhao:2024ely,Algren:2023qnb,Schreck:2023pzs,Camaiani:2022kul,Nachman:2021opi,Diefenbacher:2020rna,Cranmer:2015bka,Andreassen:2019nnm,Rogozhnikov:2016bdp}
\\\textit{Morphing simulations to look like data is a form of domain adaptation.}
\item \textbf{BSM}~\cite{Maselek:2024qyp,Yang:2024bqw,Florez:2024lrr,Saito:2024fmr,Schofbeck:2024zjo,Hammad:2024hhm,Ahmed:2024uaz,Choudhury:2024mox,Baruah:2024gwy,Ahmed:2024oxg,Catena:2024fjn,Bhattacharya:2024sxl,vanBeekveld:2024cby,Barman:2024xlc,Arganda:2023qni,Franz:2023gic,Mandal:2023mck,Chhibra:2023tyf,vanBeekveld:2023ney,Dennis:2023kfe,Anisha:2023xmh,Castro:2022zpq,GomezAmbrosio:2022mpm,deSouza:2022uhk,Romao:2020ojy,Brehmer:2019xox,Brehmer:2018hga,Brehmer:2018eca,Brehmer:2018kdj,Hollingsworth:2020kjg,Andreassen:2020nkr}
\item \textbf{BSM}~\cite{Heimel:2024drk,Maselek:2024qyp,Yang:2024bqw,Florez:2024lrr,Saito:2024fmr,Schofbeck:2024zjo,Hammad:2024hhm,Ahmed:2024uaz,Choudhury:2024mox,Baruah:2024gwy,Ahmed:2024oxg,Catena:2024fjn,Bhattacharya:2024sxl,vanBeekveld:2024cby,Barman:2024xlc,Arganda:2023qni,Franz:2023gic,Mandal:2023mck,Chhibra:2023tyf,vanBeekveld:2023ney,Dennis:2023kfe,Anisha:2023xmh,Castro:2022zpq,GomezAmbrosio:2022mpm,deSouza:2022uhk,Romao:2020ojy,Brehmer:2019xox,Brehmer:2018hga,Brehmer:2018eca,Brehmer:2018kdj,Hollingsworth:2020kjg,Andreassen:2020nkr}
\\\textit{This category is for parameter estimation when the parameter is the signal strength of new physics.}
\item \textbf{Differentiable Simulation}~\cite{Chung:2024vfg,Heimel:2024wph,BarhamAlzas:2024ggt,Smith:2023ssh,Aehle:2023wwi,Kagan:2023gxz,Shenoy:2023ros,Napolitano:2023jhg,Lei:2022dvn,Nachman:2022jbj,MODE:2022znx,Heinrich:2022xfa}
\item \textbf{Differentiable Simulation}~\cite{Heller:2024onk,Chung:2024vfg,Heimel:2024wph,BarhamAlzas:2024ggt,Smith:2023ssh,Aehle:2023wwi,Kagan:2023gxz,Shenoy:2023ros,Napolitano:2023jhg,Lei:2022dvn,Nachman:2022jbj,MODE:2022znx,Heinrich:2022xfa}
\\\textit{Coding up a simulation using a differentiable programming language like TensorFlow, PyTorch, or JAX.}
\end{itemize}
\item \textbf{Uncertainty Quantification}
Expand Down
Loading

0 comments on commit 3338ad3

Please sign in to comment.