From e8ee8945776e3c50c53d71767e640d9c0d45ec95 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 31 Mar 2023 12:55:54 +0200 Subject: [PATCH 001/154] Split managers creation out of main function --- src/eko/couplings.py | 4 +- src/eko/evolution_operator/grid.py | 3 +- src/eko/io/runcards.py | 2 +- src/eko/runner/commons.py | 49 +++++++++++++++++++++++ src/eko/runner/legacy.py | 46 +++++---------------- src/eko/thresholds.py | 14 ++++--- tests/eko/evolution_operator/test_grid.py | 5 ++- 7 files changed, 75 insertions(+), 48 deletions(-) diff --git a/src/eko/couplings.py b/src/eko/couplings.py index 4f8df06be..2022ae7c4 100644 --- a/src/eko/couplings.py +++ b/src/eko/couplings.py @@ -9,7 +9,7 @@ """ import logging import warnings -from typing import List +from typing import Iterable, List import numba as nb import numpy as np @@ -426,7 +426,7 @@ def __init__( method: CouplingEvolutionMethod, masses: List[float], hqm_scheme: QuarkMassScheme, - thresholds_ratios: List[float], + thresholds_ratios: Iterable[float], ): # Sanity checks def assert_positive(name, var): diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index 6f5520a03..ff5b8830f 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -7,6 +7,7 @@ import logging import numbers +from typing import List import numpy as np import numpy.typing as npt @@ -41,7 +42,7 @@ def __init__( self, mu2grid: npt.NDArray, order: tuple, - masses: tuple, + masses: List[float], mass_scheme, intrinsic_flavors: list, xif: float, diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index 6c4bb726d..ff9ede82b 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -58,7 +58,7 @@ def masses(theory: TheoryCard, evmeth: EvolutionMethod): theory.couplings, theory.order, couplings_mod_ev(evmeth), - np.power(theory.heavy.matching_ratios, 2.0), + np.power(theory.heavy.matching_ratios, 2.0).tolist(), xif2=theory.xif**2, ).tolist() if theory.heavy.masses_scheme is QuarkMassScheme.POLE: diff --git a/src/eko/runner/commons.py b/src/eko/runner/commons.py index 24eb37da2..7b49fab28 100644 --- a/src/eko/runner/commons.py +++ b/src/eko/runner/commons.py @@ -1,4 +1,12 @@ """Runners common utilities.""" +import numpy as np + +from ..couplings import Couplings, couplings_mod_ev +from ..interpolation import InterpolatorDispatcher +from ..io import runcards +from ..io.runcards import OperatorCard, TheoryCard +from ..io.types import ScaleVariationsMethod +from ..thresholds import ThresholdsAtlas BANNER = r""" oooooooooooo oooo oooo \\ .oooooo. @@ -9,3 +17,44 @@ 888 o 888 `88b. `88b // d88' o888ooooood8 o888o o888o `Y8bood8P' """ + + +def interpolator(operator: OperatorCard) -> InterpolatorDispatcher: + """Create interpolator from runcards.""" + return InterpolatorDispatcher( + xgrid=operator.xgrid, + polynomial_degree=operator.configs.interpolation_polynomial_degree, + ) + + +def threshold_atlas(theory: TheoryCard, operator: OperatorCard) -> ThresholdsAtlas: + """Create thresholds atlas from runcards.""" + thresholds_ratios = np.power(theory.heavy.matching_ratios, 2.0) + # TODO: cache result + masses = runcards.masses(theory, operator.configs.evolution_method) + return ThresholdsAtlas( + masses=masses, + q2_ref=operator.mu20, + nf_ref=theory.heavy.num_flavs_init, + thresholds_ratios=thresholds_ratios, + max_nf=theory.heavy.num_flavs_max_pdf, + ) + + +def couplings(theory: TheoryCard, operator: OperatorCard) -> Couplings: + """Create couplings from runcards.""" + thresholds_ratios = np.power(theory.heavy.matching_ratios, 2.0) + masses = runcards.masses(theory, operator.configs.evolution_method) + return Couplings( + couplings=theory.couplings, + order=theory.order, + method=couplings_mod_ev(operator.configs.evolution_method), + masses=masses, + hqm_scheme=theory.heavy.masses_scheme, + thresholds_ratios=thresholds_ratios + * ( + theory.xif**2 + if operator.configs.scvar_method == ScaleVariationsMethod.EXPONENTIATED + else 1.0 + ), + ) diff --git a/src/eko/runner/legacy.py b/src/eko/runner/legacy.py index a4817c06c..de6988e7d 100644 --- a/src/eko/runner/legacy.py +++ b/src/eko/runner/legacy.py @@ -3,14 +3,9 @@ import os from typing import Union -import numpy as np - -from .. import interpolation -from ..couplings import Couplings, couplings_mod_ev from ..evolution_operator.grid import OperatorGrid from ..io import EKO, Operator, runcards -from ..io.types import RawCard, ScaleVariationsMethod -from ..thresholds import ThresholdsAtlas +from ..io.types import RawCard from . import commons logger = logging.getLogger(__name__) @@ -55,41 +50,18 @@ def __init__( self._theory = new_theory # setup basis grid - bfd = interpolation.InterpolatorDispatcher( - xgrid=new_operator.xgrid, - polynomial_degree=new_operator.configs.interpolation_polynomial_degree, - ) - - # setup the Threshold path, compute masses if necessary - masses = runcards.masses(new_theory, new_operator.configs.evolution_method) + bfd = commons.interpolator(new_operator) # call explicitly iter to explain the static analyzer that is an # iterable - thresholds_ratios = np.power(list(iter(new_theory.heavy.matching_ratios)), 2.0) - tc = ThresholdsAtlas( - masses=masses, - q2_ref=new_operator.mu20, - nf_ref=new_theory.heavy.num_flavs_init, - thresholds_ratios=thresholds_ratios, - max_nf=new_theory.heavy.num_flavs_max_pdf, - ) + tc = commons.threshold_atlas(new_theory, new_operator) # strong coupling - sc = Couplings( - couplings=new_theory.couplings, - order=new_theory.order, - method=couplings_mod_ev(new_operator.configs.evolution_method), - masses=masses, - hqm_scheme=new_theory.heavy.masses_scheme, - thresholds_ratios=thresholds_ratios - * ( - new_theory.xif**2 - if new_operator.configs.scvar_method - == ScaleVariationsMethod.EXPONENTIATED - else 1.0 - ), - ) - # setup operator grid + cs = commons.couplings(new_theory, new_operator) # setup operator grid + + # compute masses if required + masses = runcards.masses(new_theory, new_operator.configs.evolution_method) + self.op_grid = OperatorGrid( mu2grid=new_operator.mu2grid, order=new_theory.order, @@ -100,7 +72,7 @@ def __init__( configs=new_operator.configs, debug=new_operator.debug, thresholds_config=tc, - couplings=sc, + couplings=cs, interpol_dispatcher=bfd, n3lo_ad_variation=new_theory.n3lo_ad_variation, ) diff --git a/src/eko/thresholds.py b/src/eko/thresholds.py index eb0bd7fa2..763509352 100644 --- a/src/eko/thresholds.py +++ b/src/eko/thresholds.py @@ -1,7 +1,7 @@ r"""Holds the classes that define the |FNS|.""" import logging from dataclasses import astuple, dataclass -from typing import List, Optional +from typing import Iterable, List, Optional import numpy as np @@ -47,7 +47,7 @@ def __init__( masses: List[float], q2_ref: Optional[float] = None, nf_ref: Optional[int] = None, - thresholds_ratios: Optional[List[float]] = None, + thresholds_ratios: Optional[Iterable[float]] = None, max_nf: Optional[int] = None, ): """Create basic atlas. @@ -69,6 +69,12 @@ def __init__( sorted_masses = sorted(masses) if not np.allclose(masses, sorted_masses): raise ValueError("masses need to be sorted") + + if thresholds_ratios is None: + thresholds_ratios = [1.0, 1.0, 1.0] + else: + thresholds_ratios = list(thresholds_ratios) + # combine them thresholds = self.build_area_walls(sorted_masses, thresholds_ratios, max_nf) self.area_walls = [0] + thresholds + [np.inf] @@ -126,7 +132,7 @@ def ffns(cls, nf: int, q2_ref: Optional[float] = None): @staticmethod def build_area_walls( masses: List[float], - thresholds_ratios: Optional[List[float]] = None, + thresholds_ratios: List[float], max_nf: Optional[int] = None, ): r"""Create the object from the informations on the run card. @@ -150,8 +156,6 @@ def build_area_walls( """ if len(masses) != 3: raise ValueError("There have to be 3 quark masses") - if thresholds_ratios is None: - thresholds_ratios = [1.0, 1.0, 1.0] if len(thresholds_ratios) != 3: raise ValueError("There have to be 3 quark threshold ratios") if max_nf is None: diff --git a/tests/eko/evolution_operator/test_grid.py b/tests/eko/evolution_operator/test_grid.py index a65bc21e4..88a1eea2a 100644 --- a/tests/eko/evolution_operator/test_grid.py +++ b/tests/eko/evolution_operator/test_grid.py @@ -12,6 +12,7 @@ import pytest import eko.io.types +from eko import couplings from eko.quantities.couplings import CouplingEvolutionMethod from eko.runner import legacy @@ -22,12 +23,12 @@ class FakeEM(enum.Enum): BLUB = "blub" monkeypatch.setattr( - legacy, + couplings, "couplings_mod_ev", lambda *args: CouplingEvolutionMethod.EXACT, ) operator_card.configs.evolution_method = FakeEM.BLUB - with pytest.raises(ValueError, match="blub"): + with pytest.raises(ValueError, match="BLUB"): legacy.Runner(theory_ffns(3), operator_card, path=tmp_path / "eko.tar") # check LO From 1898c9d5b38ce3efa943db1175a669e6f2702f03 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 31 Mar 2023 21:37:43 +0200 Subject: [PATCH 002/154] Replace the old runner in solve With the not yet implemented one --- src/eko/runner/__init__.py | 23 ++++++++++++----------- src/eko/runner/managed.py | 29 +++++++++++++++++++++++++++++ src/eko/runner/recipes.py | 17 ++--------------- tests/eko/runner/test_init.py | 6 +++++- 4 files changed, 48 insertions(+), 27 deletions(-) create mode 100644 src/eko/runner/managed.py diff --git a/src/eko/runner/__init__.py b/src/eko/runner/__init__.py index c985fb1fd..0d05183f5 100644 --- a/src/eko/runner/__init__.py +++ b/src/eko/runner/__init__.py @@ -1,12 +1,18 @@ """Manage steps to DGLAP solution, and operator creation.""" import os +from pathlib import Path from typing import Union +from ..io import runcards from ..io.runcards import OperatorCard, TheoryCard from ..io.types import RawCard -from . import legacy +from . import managed +# TODO: drop this altogether, replacing just with managed.solve +# it is currently kept not to break the interface, but the runcards upgrade and +# path conversion should be done by the caller, here we just clearly declare +# which types we expect def solve( theory_card: Union[RawCard, TheoryCard], operators_card: Union[RawCard, OperatorCard], @@ -26,18 +32,13 @@ def solve( to the solution of the |DGLAP| equation itself, and determine the resulting operator features. - Parameters - ---------- - theory_card : - theory parameters and related settings - operator_card : - solution configurations, and further EKO options - path : - path where to store the computed operator - Note ---- For further information about EKO inputs and output see :doc:`/code/IO` """ - legacy.Runner(theory_card, operators_card, path).compute() + # TODO: drop this + # legacy.Runner(theory_card, operators_card, path).compute() + + new_theory, new_operator = runcards.update(theory_card, operators_card) + managed.solve(new_theory, new_operator, Path(path)) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py new file mode 100644 index 000000000..3aaef21ef --- /dev/null +++ b/src/eko/runner/managed.py @@ -0,0 +1,29 @@ +"""Fully managed runner. + +This is an automated runner, mainly suggested for small EKOs computations. + +The primitives used here to compute the various pieces are part of the public +interface, and should be directly used to manage a more complex run for a +considebaly large operator. + +Thus, parallelization and multi-node execution is possible using EKO primitives, +but not automatically performed. + +""" + +from pathlib import Path + +from ..io.runcards import OperatorCard, TheoryCard +from ..io.struct import EKO +from . import recipes + + +def solve(theory: TheoryCard, operator: OperatorCard, path: Path): + """Solve DGLAP equations in terms of evolution kernel operators (EKO).""" + with EKO.create(path) as builder: + eko = builder.load_cards(theory, operator).build() + + recipes.create(eko) + + # for recipe in eko.recipes: + # pass diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index 0e6163b66..659667080 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -4,10 +4,9 @@ from .. import EKO from .. import scale_variations as sv -from ..io import runcards from ..io.dictlike import DictLike from ..io.types import SquaredScale -from ..thresholds import ThresholdsAtlas +from . import commons @dataclass @@ -35,19 +34,7 @@ class MatchingRecipe(Recipe): def create(eko: EKO): """Create all associated recipes.""" - _ = eko.theory_card.matching - - masses = runcards.masses( - eko.theory_card, eko.operator_card.configs.evolution_method - ) - - tc = ThresholdsAtlas( - masses=masses, - q2_ref=eko.operator_card.mu20, - nf_ref=eko.theory_card.num_flavs_init, - thresholds_ratios=None, - max_nf=eko.theory_card.num_flavs_max_pdf, - ) + tc = commons.threshold_atlas(eko.theory_card, eko.operator_card) for mu2 in eko.mu2grid: expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded diff --git a/tests/eko/runner/test_init.py b/tests/eko/runner/test_init.py index 63037c2a7..ff5985834 100644 --- a/tests/eko/runner/test_init.py +++ b/tests/eko/runner/test_init.py @@ -11,10 +11,14 @@ def compute(self): self.path.write_text("output", encoding="utf-8") +def mock_solve(th, op, path): + return MockRunner(th, op, path).compute() + + def test_run(monkeypatch, tmp_path: pathlib.Path): # just test, that it is a shortcut to 'compute' path = tmp_path / "eko.tar" - monkeypatch.setattr(eko.runner.legacy, "Runner", MockRunner) + monkeypatch.setattr(eko, "solve", mock_solve) eko.solve({}, {}, path=path) out = path.read_text(encoding="utf-8") assert out == "output" From 75dbd90ece4976b997acfb68d3e149bc9a1cb85c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 17 Apr 2023 17:11:11 +0200 Subject: [PATCH 003/154] Extend parts documentation --- src/eko/runner/parts.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index aed1067aa..e1b5a77a4 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -1,6 +1,7 @@ """Operator components.""" from abc import ABC from dataclasses import dataclass +from typing import Optional import numpy.typing as npt @@ -20,6 +21,7 @@ class Part(DictLike, ABC): """ operator: npt.NDArray + error: Optional[npt.NDArray] @dataclass @@ -27,8 +29,18 @@ class Evolution(Part): """Evolution in a fixed number of flavors.""" final: bool + """Whether the operator is the terminal segment of evolution. + + If it is not final, then the operator is an intermediate one. + Intermediate ones always have final scales :attr:`mu2` corresponding to + matching scales, and initial scales :attr:`mu20` corresponding to either + matching scales or the global initial scale of the EKO. + + """ mu20: SquaredScale + """Initial scale.""" mu2: SquaredScale + """Final scale.""" @dataclass @@ -36,3 +48,4 @@ class Matching(Part): """Matching conditions between two different flavor schemes, at a given scale.""" mu2: SquaredScale + """Matching scale.""" From 5d8bb7608b0c695692fa0260936d6b1bae3e2bac Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 17 Apr 2023 17:55:24 +0200 Subject: [PATCH 004/154] Move from mu2grid to targetgrid --- src/eko/io/runcards.py | 1 + src/eko/io/struct.py | 91 +++++++++++++----------------------------- 2 files changed, 29 insertions(+), 63 deletions(-) diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index ff9ede82b..ff92e1138 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -34,6 +34,7 @@ ) +# TODO: add frozen @dataclass class TheoryCard(DictLike): """Represent theory card content.""" diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 956fd3fc5..26a8b834c 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -10,7 +10,7 @@ import tarfile import tempfile from dataclasses import dataclass -from typing import BinaryIO, Dict, Optional +from typing import BinaryIO, Dict, List, Optional import lz4.frame import numpy as np @@ -23,6 +23,7 @@ from . import exceptions, raw from .dictlike import DictLike from .runcards import OperatorCard, Rotations, TheoryCard +from .types import Target logger = logging.getLogger(__name__) @@ -206,26 +207,19 @@ def bootstrap(self, theory: dict, operator: dict, metadata: dict): self.operators.mkdir() @staticmethod - def opname(mu2: float) -> str: - r"""Operator file name from :math:`\mu^2` value.""" + def opname(target: Target) -> str: + r"""Operator file name from :math:`(\mu^2, n_f)` value.""" + mu2 = target[0] + nf = target[1] decoded = np.float64(mu2).tobytes() - return base64.urlsafe_b64encode(decoded).decode() + mu2bytes = base64.urlsafe_b64encode(decoded).decode() + return f"{mu2bytes}_{nf}" - def oppath(self, mu2: float) -> pathlib.Path: + def oppath(self, target: Target) -> pathlib.Path: r"""Retrieve operator file path from :math:`\mu^2` value. This method looks for an existing path matching. - Parameters - ---------- - mu2: float - :math:`\mu2` scale specified - - Returns - ------- - pathlib.Path - the path retrieved, guaranteed to exist - Raises ------ ValueError @@ -233,31 +227,21 @@ def oppath(self, mu2: float) -> pathlib.Path: specified value of :math:`\mu2` """ - name = self.opname(mu2) + name = self.opname(target) oppaths = list( filter(lambda path: path.name.startswith(name), self.operators.iterdir()) ) if len(oppaths) == 0: - raise ValueError(f"mu2 value '{mu2}' not available.") + raise ValueError(f"Target value '{target}' not available.") elif len(oppaths) > 1: - raise ValueError(f"Too many operators associated to '{mu2}':\n{oppaths}") + raise ValueError(f"Too many operators associated to '{target}':\n{oppaths}") return oppaths[0] def opcompressed(self, path: os.PathLike) -> bool: """Check if the operator at the path specified is compressed. - Parameters - ---------- - path: os.PathLike - the path to the operator to check - - Returns - ------- - bool - whether it is compressed - Raises ------ OperatorLocationError @@ -270,19 +254,9 @@ def opcompressed(self, path: os.PathLike) -> bool: return path.suffix == COMPRESSED_SUFFIX - def opmu2(self, path: os.PathLike) -> float: + def optarget(self, path: os.PathLike) -> Target: r"""Extract :math:`\mu2` value from operator path. - Parameters - ---------- - path: os.PathLike - the path to the operator - - Returns - ------- - bool - the :math:`\mu2` value associated to the operator - Raises ------ OperatorLocationError @@ -293,41 +267,32 @@ def opmu2(self, path: os.PathLike) -> float: if self.operators not in path.parents: raise exceptions.OperatorLocationError(path) - encoded = path.stem.split(".")[0] + name = path.stem.split(".")[0] + encoded, nfseg = name.split("_") decoded = base64.urlsafe_b64decode(encoded) - return float(np.frombuffer(decoded, dtype=np.float64)[0]) + mu2 = float(np.frombuffer(decoded, dtype=np.float64)[0]) + nf = int(nfseg) + return (mu2, nf) def opnewpath( - self, mu2: float, compress: bool = True, without_err: bool = True + self, target: Target, compress: bool = True, without_err: bool = True ) -> pathlib.Path: - r"""Compute the path associated to :math:`\mu^2` value. - - Parameters - ---------- - mu2: float - :math:`\mu2` scale specified - - Returns - ------- - pathlib.Path - the path computed, it might already exists - - """ + r"""Compute the path associated to :math:`(\mu^2, n_f)` value.""" suffix = ".npy" if without_err else ".npz" if compress: suffix += COMPRESSED_SUFFIX - return self.operators / (self.opname(mu2) + suffix) + return self.operators / (self.opname(target) + suffix) @property - def mu2grid(self) -> npt.NDArray[np.float64]: - r"""Provide the array of :math:`\mu2` values of existing operators.""" + def grid(self) -> List[Target]: + r"""Provide the array of :math:`(\mu^2, n_f)` values of existing operators.""" if self.root is None: raise RuntimeError() - return np.array( + return list( sorted( map( - lambda p: self.opmu2(p), + lambda p: self.optarget(p), InternalPaths(self.root).operators.iterdir(), ) ) @@ -511,7 +476,7 @@ class EKO: """ # operators cache, contains the Q2 grid information - _operators: Dict[float, Optional[Operator]] + _operators: Dict[Target, Optional[Operator]] # public containers # ----------------- @@ -549,9 +514,9 @@ def mu20(self) -> float: return self.metadata.mu20 @property - def mu2grid(self) -> npt.NDArray: + def mu2grid(self) -> List[Target]: """Provide the list of :math:`Q^2` as an array.""" - return np.array(list(self._operators)) + return list(self._operators) @property def theory_card(self): From 129004b609d64ba637d3837df85e0a07e519edbf Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 17 Apr 2023 22:52:53 +0200 Subject: [PATCH 005/154] Introduce evolution points --- src/eko/io/runcards.py | 6 +- src/eko/io/struct.py | 135 ++++++++++++-------------------- src/eko/io/types.py | 6 +- src/eko/runner/recipes.py | 2 + src/ekomark/benchmark/runner.py | 2 +- 5 files changed, 58 insertions(+), 93 deletions(-) diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index ff92e1138..2dbd7db23 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -22,15 +22,15 @@ from ..quantities.couplings import CouplingsInfo from ..quantities.heavy_quarks import HeavyInfo, QuarkMassScheme from .dictlike import DictLike +from .types import EvolutionMethod +from .types import EvolutionPoint as EPoint from .types import ( - EvolutionMethod, InversionMethod, N3LOAdVariation, Order, RawCard, ScaleVariationsMethod, T, - Target, ) @@ -221,7 +221,7 @@ class OperatorCard(DictLike): mu0: float """Initial scale.""" - mugrid: List[Target] + mugrid: List[EPoint] xgrid: interpolation.XGrid """Momentum fraction internal grid.""" diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 26a8b834c..32e46592b 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -23,7 +23,8 @@ from . import exceptions, raw from .dictlike import DictLike from .runcards import OperatorCard, Rotations, TheoryCard -from .types import Target +from .types import EvolutionPoint as EPoint +from .types import SquaredScale logger = logging.getLogger(__name__) @@ -207,15 +208,15 @@ def bootstrap(self, theory: dict, operator: dict, metadata: dict): self.operators.mkdir() @staticmethod - def opname(target: Target) -> str: + def opname(ep: EPoint) -> str: r"""Operator file name from :math:`(\mu^2, n_f)` value.""" - mu2 = target[0] - nf = target[1] + mu2 = ep[0] + nf = ep[1] decoded = np.float64(mu2).tobytes() mu2bytes = base64.urlsafe_b64encode(decoded).decode() return f"{mu2bytes}_{nf}" - def oppath(self, target: Target) -> pathlib.Path: + def oppath(self, ep: EPoint) -> pathlib.Path: r"""Retrieve operator file path from :math:`\mu^2` value. This method looks for an existing path matching. @@ -227,15 +228,15 @@ def oppath(self, target: Target) -> pathlib.Path: specified value of :math:`\mu2` """ - name = self.opname(target) + name = self.opname(ep) oppaths = list( filter(lambda path: path.name.startswith(name), self.operators.iterdir()) ) if len(oppaths) == 0: - raise ValueError(f"Target value '{target}' not available.") + raise ValueError(f"Target value '{ep}' not available.") elif len(oppaths) > 1: - raise ValueError(f"Too many operators associated to '{target}':\n{oppaths}") + raise ValueError(f"Too many operators associated to '{ep}':\n{oppaths}") return oppaths[0] @@ -254,7 +255,7 @@ def opcompressed(self, path: os.PathLike) -> bool: return path.suffix == COMPRESSED_SUFFIX - def optarget(self, path: os.PathLike) -> Target: + def optarget(self, path: os.PathLike) -> EPoint: r"""Extract :math:`\mu2` value from operator path. Raises @@ -275,16 +276,16 @@ def optarget(self, path: os.PathLike) -> Target: return (mu2, nf) def opnewpath( - self, target: Target, compress: bool = True, without_err: bool = True + self, ep: EPoint, compress: bool = True, without_err: bool = True ) -> pathlib.Path: r"""Compute the path associated to :math:`(\mu^2, n_f)` value.""" suffix = ".npy" if without_err else ".npz" if compress: suffix += COMPRESSED_SUFFIX - return self.operators / (self.opname(target) + suffix) + return self.operators / (self.opname(ep) + suffix) @property - def grid(self) -> List[Target]: + def grid(self) -> List[EPoint]: r"""Provide the array of :math:`(\mu^2, n_f)` values of existing operators.""" if self.root is None: raise RuntimeError() @@ -371,7 +372,7 @@ class Metadata(DictLike): """ - mu20: float + mu20: EPoint """Inital scale.""" rotations: Rotations """Manipulation information, describing the current status of the EKO (e.g. @@ -476,7 +477,7 @@ class EKO: """ # operators cache, contains the Q2 grid information - _operators: Dict[Target, Optional[Operator]] + _operators: Dict[EPoint, Optional[Operator]] # public containers # ----------------- @@ -509,14 +510,14 @@ def xgrid(self, value: interpolation.XGrid): self.update() @property - def mu20(self) -> float: + def mu20(self) -> SquaredScale: """Provide squared initial scale.""" - return self.metadata.mu20 + return self.metadata.mu20[0] @property - def mu2grid(self) -> List[Target]: + def mu2grid(self) -> List[SquaredScale]: """Provide the list of :math:`Q^2` as an array.""" - return list(self._operators) + return [mu2 for mu2, _ in self._operators] @property def theory_card(self): @@ -554,7 +555,7 @@ def permissions(self): # operator management # ------------------- - def __getitem__(self, mu2: float) -> Operator: + def __getitem__(self, ep: EPoint) -> Operator: r"""Retrieve operator for given :math:`\mu^2`. If the operator is not already in memory, it will be automatically @@ -573,33 +574,26 @@ def __getitem__(self, mu2: float) -> Operator: """ self.access.assert_open() - if mu2 in self._operators: - op = self._operators[mu2] + if ep in self._operators: + op = self._operators[ep] if op is not None: return op - oppath = self.paths.oppath(mu2) + oppath = self.paths.oppath(ep) compressed = self.paths.opcompressed(oppath) with open(oppath, "rb") as fd: op = Operator.load(fd, compressed=compressed) - self._operators[mu2] = op + self._operators[ep] = op return op - def __setitem__(self, mu2: float, op: Operator, compress: bool = True): - """Set operator for given :math:`Q^2`. + def __setitem__(self, ep: EPoint, op: Operator, compress: bool = True): + """Set operator for given :math:`mu^2`. The operator is automatically dumped on disk. - Parameters - ---------- - q2 : float - :math:`Q^2` value labeling the operator to be set - op : Operator - the retrieved operator - compress : bool - whether to save the operator compressed or not (default: `True`) + If ``compress``, the output array is compressed. """ self.access.assert_writeable() @@ -608,15 +602,15 @@ def __setitem__(self, mu2: float, op: Operator, compress: bool = True): raise ValueError("Only an Operator can be added to an EKO") without_err = op.error is None - oppath = self.paths.opnewpath(mu2, compress=compress, without_err=without_err) + oppath = self.paths.opnewpath(ep, compress=compress, without_err=without_err) with open(oppath, "wb") as fd: without_err2 = op.save(fd, compress) assert without_err == without_err2 - self._operators[mu2] = op + self._operators[ep] = op - def __delitem__(self, q2: float): + def __delitem__(self, ep: EPoint): """Drop operator from memory. This method only drops the operator from memory, and it's not expected @@ -627,50 +621,31 @@ def __delitem__(self, q2: float): If a further explicit save is required, repeat explicit assignment:: - eko[q2] = eko[q2] + eko[ep] = eko[ep] This is only useful if the operator has been mutated in place, that in general should be avoided, since the operator should only be the result of a full computation or a library manipulation. - - Parameters - ---------- - q2 : float - the value of :math:`Q^2` for which the corresponding operator - should be dropped - """ - self._operators[q2] = None + self._operators[ep] = None @contextlib.contextmanager - def operator(self, q2: float): + def operator(self, ep: EPoint): """Retrieve an operator and discard it afterwards. To be used as a contextmanager: the operator is automatically loaded as usual, but on the closing of the context manager it is dropped from memory. - Parameters - ---------- - q2 : float - :math:`Q^2` value labeling the operator to be retrieved - """ try: - yield self[q2] + yield self[ep] finally: - del self[q2] + del self[ep] def __iter__(self): - """Iterate over keys (i.e. Q2 values). - - Yields - ------ - float - q2 values - - """ + """Iterate over keys (i.e. evolution points).""" yield from self._operators def items(self): @@ -690,9 +665,9 @@ def items(self): immediately after """ - for q2 in self.mu2grid: - yield q2, self[q2] - del self[q2] + for ep, op in self._operators.items(): + yield ep, op + del self[ep] def __contains__(self, q2: float) -> bool: """Check whether :math:`Q^2` operators are present. @@ -710,23 +685,9 @@ def __contains__(self, q2: float) -> bool: return q2 in self._operators def approx( - self, q2: float, rtol: float = 1e-6, atol: float = 1e-10 + self, ep: EPoint, rtol: float = 1e-6, atol: float = 1e-10 ) -> Optional[float]: - """Look for close enough :math:`Q^2` value in the :class:`EKO`. - - Parameters - ---------- - q2 : float - value of :math:`Q2` in which neighbourhood to look - rtol : float - relative tolerance - atol : float - absolute tolerance - - Returns - ------- - float or None - retrieved value of :math:`Q^2`, if a single one is found + """Look for close enough :math:`mu^2` value in the :class:`EKO`. Raises ------ @@ -734,19 +695,19 @@ def approx( if multiple values are find in the neighbourhood """ - q2s = self.mu2grid - close = q2s[np.isclose(q2, q2s, rtol=rtol, atol=atol)] + mu2s = np.array([mu2 for mu2, nf in self if nf == ep[1]]) + close = mu2s[np.isclose(ep[0], mu2s, rtol=rtol, atol=atol)] if close.size == 1: return close[0] if close.size == 0: return None - raise ValueError(f"Multiple values of Q2 have been found close to {q2}") + raise ValueError(f"Multiple values of Q2 have been found close to {ep[0]}") def unload(self): """Fully unload the operators in memory.""" - for q2 in self: - del self[q2] + for ep in self: + del self[ep] # operator management # ------------------- @@ -835,7 +796,9 @@ def open(cls, path: os.PathLike, mode="r"): cls.load(path, tmpdir) metadata = Metadata.load(tmpdir) opened = cls( - _operators={mu2: None for mu2 in InternalPaths(metadata.path).mu2grid}, + _operators={ + target: None for target in InternalPaths(metadata.path).grid + }, metadata=metadata, access=access, ) diff --git a/src/eko/io/types.py b/src/eko/io/types.py index 4d260e1c4..5573bafbb 100644 --- a/src/eko/io/types.py +++ b/src/eko/io/types.py @@ -25,10 +25,10 @@ IntrinsicFlavors = typing.List[FlavorIndex] N3LOAdVariation = typing.Tuple[int, int, int, int] -# Targets -# ------- +# Evolution coordinates +# --------------------- -Target = Tuple[LinearScale, FlavorsNumber] +EvolutionPoint = Tuple[Scale, FlavorsNumber] # Scale functions diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index 659667080..a8a1a4b1d 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -36,6 +36,8 @@ def create(eko: EKO): """Create all associated recipes.""" tc = commons.threshold_atlas(eko.theory_card, eko.operator_card) + # __import__("pdb").set_trace() + for mu2 in eko.mu2grid: expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 diff --git a/src/ekomark/benchmark/runner.py b/src/ekomark/benchmark/runner.py index 7d9f9340a..9be2d5261 100644 --- a/src/ekomark/benchmark/runner.py +++ b/src/ekomark/benchmark/runner.py @@ -237,7 +237,7 @@ def log(self, theory, _, pdf, me, ext): ): rotate_to_evolution[3, :] = [0, 0, 0, 0, 0, -1, -1, 0, 1, 1, 0, 0, 0, 0] - with EKO.open(me) as eko: + with EKO.read(me) as eko: pdf_grid = apply.apply_pdf_flavor( eko, pdf, From 1cf3780eeb4277692c12d2034017028894f7894c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Wed, 19 Apr 2023 16:39:36 +0200 Subject: [PATCH 006/154] Upgrade EKO scales to epoints --- src/eko/io/runcards.py | 5 +++++ src/eko/io/struct.py | 4 ++-- src/eko/quantities/heavy_quarks.py | 4 ++-- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index 2dbd7db23..364e93285 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -246,6 +246,11 @@ def mu2grid(self) -> npt.NDArray: """Grid of squared final scales.""" return np.array([mu for mu, _ in self.mugrid]) ** 2 + @property + def evolgrid(self) -> List[EPoint]: + """Grid of squared final scales.""" + return [(mu**2, nf) for mu, nf in self.mugrid] + @property def pids(self): """Internal flavor basis, used for computation.""" diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 32e46592b..2580ff2a2 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -970,7 +970,7 @@ def build(self) -> EKO: self.access.open = True metadata = Metadata( _path=self.path, - mu20=self.operator.mu20, + mu20=(self.operator.mu20, self.theory.heavy.num_flavs_init), rotations=Rotations(xgrid=self.operator.xgrid), ) InternalPaths(self.path).bootstrap( @@ -980,7 +980,7 @@ def build(self) -> EKO: ) self.eko = EKO( - _operators={mu2: None for mu2 in self.operator.mu2grid}, + _operators={ep: None for ep in self.operator.evolgrid}, metadata=metadata, access=self.access, ) diff --git a/src/eko/quantities/heavy_quarks.py b/src/eko/quantities/heavy_quarks.py index 3f061d977..4f782e067 100644 --- a/src/eko/quantities/heavy_quarks.py +++ b/src/eko/quantities/heavy_quarks.py @@ -1,7 +1,7 @@ """Heavy quarks related quantities.""" import enum from dataclasses import dataclass -from typing import Generic, Optional, Sequence, TypeVar +from typing import Generic, Sequence, TypeVar import numpy as np @@ -98,7 +98,7 @@ class HeavyInfo(DictLike): """ - num_flavs_init: Optional[FlavorsNumber] + num_flavs_init: FlavorsNumber r"""Number of active flavors at fitting scale. I.e. :math:`n_{f,\text{ref}}(\mu^2_0)`, formerly called ``nf0``. From d805a98a00643619ce25517823ee22817867fd23 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 20 Apr 2023 20:07:46 +0200 Subject: [PATCH 007/154] Start rearranging thresholds --- src/eko/runner/recipes.py | 10 ++++++---- src/eko/thresholds.py | 32 +++++++++++--------------------- 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index a8a1a4b1d..c26776688 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -36,8 +36,10 @@ def create(eko: EKO): """Create all associated recipes.""" tc = commons.threshold_atlas(eko.theory_card, eko.operator_card) - # __import__("pdb").set_trace() + terminal = [] + for ep in eko: + # expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded + # mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 - for mu2 in eko.mu2grid: - expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded - mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 + blocks = tc.path(*ep) + terminal.append(blocks.pop()) diff --git a/src/eko/thresholds.py b/src/eko/thresholds.py index 763509352..5208e948e 100644 --- a/src/eko/thresholds.py +++ b/src/eko/thresholds.py @@ -5,6 +5,8 @@ import numpy as np +from .io.types import EvolutionPoint as EPoint + logger = logging.getLogger(__name__) @@ -170,10 +172,7 @@ def build_area_walls( def path( self, - q2_to: float, - nf_to: Optional[int] = None, - q2_from: Optional[float] = None, - nf_from: Optional[int] = None, + target: EPoint, ): """Get path from ``q2_from`` to ``q2_to``. @@ -181,8 +180,6 @@ def path( ---------- q2_to: target value of q2 - q2_from: - starting value of q2 Returns ------- @@ -191,31 +188,24 @@ def path( to ``q2_to`` """ - # fallback to init config - if q2_from is None: - q2_from = self.q2_ref - if nf_from is None: - nf_from = self.nf_ref - # determine reference thresholds - if nf_from is None: - nf_from = 2 + np.digitize(q2_from, self.area_walls) - if nf_to is None: - nf_to = 2 + np.digitize(q2_to, self.area_walls) + origin: EPoint = (self.q2_ref, self.nf_ref) + # determine direction and python slice modifier - if nf_to < nf_from: + if target[1] < origin[1]: rc = -1 shift = -3 else: rc = 1 shift = -2 + # join all necessary points in one list boundaries = ( - [q2_from] - + self.area_walls[nf_from + shift : int(nf_to) + shift : rc] - + [q2_to] + [origin[0]] + + self.area_walls[origin[1] + shift : int(target[1]) + shift : rc] + + [target[0]] ) segs = [ - PathSegment(boundaries[i], q2, nf_from + i * rc) + PathSegment(boundaries[i], q2, origin[1] + i * rc) for i, q2 in enumerate(boundaries[1:]) ] return segs From 72c1cf618a2495dd67123bdc2caea8f97bef6334 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 21 Apr 2023 13:56:25 +0200 Subject: [PATCH 008/154] Trim down matching scales management, improve docs for path determination --- src/eko/thresholds.py | 288 ++++++++++++++---------------------------- 1 file changed, 92 insertions(+), 196 deletions(-) diff --git a/src/eko/thresholds.py b/src/eko/thresholds.py index 5208e948e..8759b5624 100644 --- a/src/eko/thresholds.py +++ b/src/eko/thresholds.py @@ -1,42 +1,42 @@ r"""Holds the classes that define the |FNS|.""" import logging -from dataclasses import astuple, dataclass -from typing import Iterable, List, Optional +from dataclasses import dataclass +from typing import List import numpy as np from .io.types import EvolutionPoint as EPoint +from .io.types import FlavorsNumber, SquaredScale +from .quantities.heavy_quarks import MatchingScales logger = logging.getLogger(__name__) @dataclass -class PathSegment: +class Segment: """Oriented path in the threshold landscape.""" - q2_from: float + origin: SquaredScale """Starting point.""" - q2_to: float + target: SquaredScale """Final point.""" - nf: int + nf: FlavorsNumber """Number of active flavors.""" @property - def is_downward_q2(self) -> bool: - """Return True if ``q2_from`` is bigger than ``q2_to``.""" - return self.q2_from > self.q2_to - - @property - def tuple(self): - """Deprecated: use directly `dataclasses.astuple`.""" - return astuple(self) + def is_downward(self) -> bool: + """Return True if ``origin`` is bigger than ``target``.""" + return self.origin > self.target def __str__(self): """Textual representation, mainly for logging purpose.""" - return f"PathSegment({self.q2_from} -> {self.q2_to}, nf={self.nf})" + return f"PathSegment({self.origin} -> {self.target}, nf={self.nf})" + + +Path = List[Segment] -class ThresholdsAtlas: +class Atlas: r"""Holds information about the matching scales. These scales are the :math:`Q^2` has to pass in order to get there from a @@ -44,191 +44,109 @@ class ThresholdsAtlas: """ - def __init__( - self, - masses: List[float], - q2_ref: Optional[float] = None, - nf_ref: Optional[int] = None, - thresholds_ratios: Optional[Iterable[float]] = None, - max_nf: Optional[int] = None, - ): - """Create basic atlas. - - Parameters - ---------- - masses : - list of quark masses squared - q2_ref : - reference scale - nf_ref : - number of active flavors at the reference scale - thresholds_ratios : - list of ratios between matching scales and masses squared - max_nf : - maximum number of active flavors, if `None` no maximum is set + def __init__(self, matching_scales: MatchingScales, origin: EPoint): + """Create basic atlas.""" + self.walls = [0] + matching_scales + [np.inf] + self.origin = origin - """ - sorted_masses = sorted(masses) - if not np.allclose(masses, sorted_masses): - raise ValueError("masses need to be sorted") - - if thresholds_ratios is None: - thresholds_ratios = [1.0, 1.0, 1.0] - else: - thresholds_ratios = list(thresholds_ratios) - - # combine them - thresholds = self.build_area_walls(sorted_masses, thresholds_ratios, max_nf) - self.area_walls = [0] + thresholds + [np.inf] - - # check nf_ref - if nf_ref is not None: - if q2_ref is None: - raise ValueError( - "Without a reference Q2 value a reference number of flavors " - "does not make sense!" - ) - # else self.q2_ref is not None - nf_init = 2 + len(list(filter(lambda x: np.isclose(0, x), self.area_walls))) - if nf_ref < nf_init: - raise ValueError( - f"The reference number of flavors is set to {nf_ref}, " - f"but the atlas starts at {nf_init}" - ) - nf_final = 2 + len(list(filter(lambda x: x < np.inf, self.area_walls))) - if nf_ref > nf_final: - raise ValueError( - f"The reference number of flavors is set to {nf_ref}, " - f"but the atlas stops at {nf_final}" - ) - - # Init values - self.q2_ref = q2_ref - self.nf_ref = nf_ref - self.thresholds_ratios = thresholds_ratios logger.info(str(self)) def __str__(self): """Textual representation, mainly for logging purpose.""" - walls = " - ".join([f"{w:.2e}" for w in self.area_walls]) - return f"ThresholdsAtlas [{walls}], ref={self.q2_ref} @ {self.nf_ref}" + walls = " - ".join([f"{w:.2e}" for w in self.walls]) + return f"ThresholdsAtlas [{walls}], ref={self.origin[0]} @ {self.origin[1]}" @classmethod - def ffns(cls, nf: int, q2_ref: Optional[float] = None): + def ffns(cls, nf: int, mu2: SquaredScale): """Create a |FFNS| setup. The function creates simply sufficient thresholds at ``0`` (in the beginning), since the number of flavors is determined by counting from below. - Parameters - ---------- - nf : - number of light flavors - q2_ref : - reference scale + The origin is set with that number of flavors. """ - return cls([0] * (nf - 3) + [np.inf] * (6 - nf), q2_ref) - - @staticmethod - def build_area_walls( - masses: List[float], - thresholds_ratios: List[float], - max_nf: Optional[int] = None, - ): - r"""Create the object from the informations on the run card. - - The thresholds are computed by :math:`(m_q \cdot k_q^{Thr})`. - - Parameters - ---------- - masses : - heavy quark masses squared - thresholds_ratios : - list of ratios between matching scales and masses squared - max_nf : - maximum number of flavors - - Returns - ------- - list - threshold list + matching_scales = MatchingScales([0] * (nf - 3) + [np.inf] * (6 - nf)) + origin = (mu2, nf) + return cls(matching_scales, origin) - """ - if len(masses) != 3: - raise ValueError("There have to be 3 quark masses") - if len(thresholds_ratios) != 3: - raise ValueError("There have to be 3 quark threshold ratios") - if max_nf is None: - max_nf = 6 - - thresholds = [] - for m, k in zip(masses, thresholds_ratios): - thresholds.append(m * k) - # cut array = simply reduce some thresholds - thresholds = thresholds[: max_nf - 3] - return thresholds - - def path( - self, - target: EPoint, - ): - """Get path from ``q2_from`` to ``q2_to``. - - Parameters - ---------- - q2_to: - target value of q2 - - Returns - ------- - list(PathSegment) - List of :class:`PathSegment` to go through in order to get from ``q2_from`` - to ``q2_to`` + def path(self, target: EPoint) -> Path: + """Determine the path to the target evolution point. + + Essentially, the path is always monotonic in the number of flavors, + increasing or decreasing the active flavors by one unit every time a + matching happens at the suitable scale. + + Examples + -------- + Since this can result in a counter-intuitive behavior, let's walk through some examples. + + Starting with the intuitive one: + >>> Atlas([10, 20, 30], (5, 3)).path((25, 5)) + [Segment(5, 10, 3), Segment(10, 20, 4), Segment(20, 25, 5)] + + If the number of flavor has been reached, it will continue walking + without matchin again. + >>> Atlas([10, 20, 30], (5, 3)).path((25, 4)) + [Segment(5, 10, 3), Segment(10, 25, 4)] + + It is irrelevant the scale you start from, to step from 3 to 4 you have + to cross the charm matching scale, whether this means walking upward or + downward. + >>> Atlas([10, 20, 30], (15, 3)).path((25, 5)) + [Segment(15, 10, 3), Segment(10, 20, 4), Segment(20, 25, 5)] + + An actual backward evolution is defined by lowering the number of + flavors going from origin to target. + >>> Atlas([10, 20, 30], (25, 5)).path((5, 3)) + [Segment(25, 20, 5), Segment(20, 10, 4), Segment(10, 5, 3)] + + But the only difference is in the matching between two segments, since + a single segment is always happening in a fixed number of flavors, and + it is completely analogue for upward or downward evolution. + + Note + ---- + + Since the only task required to determine a path is interleaving the + correct matching scales, this is done by slicing the walls. """ - origin: EPoint = (self.q2_ref, self.nf_ref) + mu20, nf0 = self.origin + mu2f, nff = target # determine direction and python slice modifier - if target[1] < origin[1]: - rc = -1 - shift = -3 - else: - rc = 1 - shift = -2 + rc, shift = (-1, -3) if nff < nf0 else (1, -2) # join all necessary points in one list - boundaries = ( - [origin[0]] - + self.area_walls[origin[1] + shift : int(target[1]) + shift : rc] - + [target[0]] - ) - segs = [ - PathSegment(boundaries[i], q2, origin[1] + i * rc) - for i, q2 in enumerate(boundaries[1:]) + boundaries = [mu20] + self.walls[nf0 + shift : nff + shift : rc] + [mu2f] + + return [ + Segment(boundaries[i], mu2, nf0 + i * rc) + for i, mu2 in enumerate(boundaries[1:]) ] - return segs - def nf(self, q2): - """Find the number of flavors active at the given scale. - Parameters - ---------- - q2 : float - reference scale +def nf_default(mu2: SquaredScale, atlas: Atlas) -> FlavorsNumber: + r"""Determine the number of active flavors in the *default flow*. - Returns - ------- - int - number of active flavors + Default flow is defined by the natural sorting of the matching scales: - """ - ref_idx = np.digitize(q2, self.area_walls) - return 2 + ref_idx + .. math:: + + \mu_c < \mu_b < \mu_t + + So, the flow is defined starting with 3 flavors below the charm matching, + and increasing by one every time a matching scale is passed while + increasing the scale. + + """ + ref_idx = np.digitize(mu2, atlas.walls) + return int(2 + ref_idx) -def is_downward_path(path: List[PathSegment]) -> bool: +def is_downward_path(path: Path) -> bool: """Determine if a path is downward. Criterias are: @@ -238,34 +156,12 @@ def is_downward_path(path: List[PathSegment]) -> bool: :attr:`PathSegment.is_downward_q2` - in :math:`Q^2` when just one single :class:`PathSegment` is given - Parameters - ---------- - path : - path - - Returns - ------- - bool - True for a downward path - """ if len(path) == 1: - return path[0].is_downward_q2 + return path[0].is_downward return path[1].nf < path[0].nf def flavor_shift(is_downward: bool) -> int: - """Determine the shift to number of light flavors. - - Parameters - ---------- - is_downward : bool - True for a downward path - - Returns - ------- - int - shift to number of light flavors which can be 3 or 4 - - """ + """Determine the shift to number of light flavors.""" return 4 if is_downward else 3 From 472b3bdedfac16f1d6af1a8f0f7c05658429445c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 21 Apr 2023 19:37:48 +0200 Subject: [PATCH 009/154] Add matched path determination --- src/eko/thresholds.py | 40 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/src/eko/thresholds.py b/src/eko/thresholds.py index 8759b5624..8087a4858 100644 --- a/src/eko/thresholds.py +++ b/src/eko/thresholds.py @@ -1,18 +1,18 @@ r"""Holds the classes that define the |FNS|.""" import logging from dataclasses import dataclass -from typing import List +from typing import List, Union import numpy as np from .io.types import EvolutionPoint as EPoint -from .io.types import FlavorsNumber, SquaredScale +from .io.types import FlavorIndex, FlavorsNumber, SquaredScale from .quantities.heavy_quarks import MatchingScales logger = logging.getLogger(__name__) -@dataclass +@dataclass(frozen=True) class Segment: """Oriented path in the threshold landscape.""" @@ -36,6 +36,21 @@ def __str__(self): Path = List[Segment] +@dataclass(frozen=True) +class Matching: + """Matching between two different segments. + + The meaning of the flavor index is the PID of the corresponding heavy quark. + + """ + + scale: SquaredScale + hq: FlavorIndex + + +MatchedPath = List[Union[Segment, Matching]] + + class Atlas: r"""Holds information about the matching scales. @@ -127,6 +142,25 @@ def path(self, target: EPoint) -> Path: for i, mu2 in enumerate(boundaries[1:]) ] + def matched_path(self, target: EPoint) -> MatchedPath: + """Determine the path to the target, including matchings. + + In practice, just a wrapper around :meth:`path` adding the intermediate + matchings. + + """ + path = self.path(target) + + prev = path[0] + matched: MatchedPath = [prev] + for seq in path[1:]: + matching = Matching(prev.target, max(prev.nf, seq.nf)) + + matched.append(matching) + matched.append(seq) + + return matched + def nf_default(mu2: SquaredScale, atlas: Atlas) -> FlavorsNumber: r"""Determine the number of active flavors in the *default flow*. From f95cc1e931193a85a28c4e40129d842dc1fdf16d Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 21 Apr 2023 19:38:32 +0200 Subject: [PATCH 010/154] Rename thresholds file --- src/eko/{thresholds.py => matchings.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/eko/{thresholds.py => matchings.py} (100%) diff --git a/src/eko/thresholds.py b/src/eko/matchings.py similarity index 100% rename from src/eko/thresholds.py rename to src/eko/matchings.py From 78d9db9e501450932aabad662cb6946954047363 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 21 Apr 2023 20:56:40 +0200 Subject: [PATCH 011/154] Fix runcards, rearrange some content --- src/eko/io/runcards.py | 155 ++++++----------------------------------- src/eko/io/struct.py | 112 ++++++++++++++++++++++++++++- 2 files changed, 133 insertions(+), 134 deletions(-) diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index 364e93285..33b0a76f7 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -5,19 +5,18 @@ Squares are consistenly taken inside. """ -from dataclasses import dataclass, fields +from dataclasses import dataclass from math import nan from typing import List, Optional, Union import numpy as np import numpy.typing as npt -from eko.thresholds import ThresholdsAtlas - from .. import basis_rotation as br from .. import interpolation, msbar_masses from .. import version as vmod from ..couplings import couplings_mod_ev +from ..matchings import Atlas, nf_default from ..quantities import heavy_quarks as hq from ..quantities.couplings import CouplingsInfo from ..quantities.heavy_quarks import HeavyInfo, QuarkMassScheme @@ -51,23 +50,6 @@ class TheoryCard(DictLike): """|N3LO| anomalous dimension variation: ``(gg_var, gq_var, qg_var, qq_var)``.""" -def masses(theory: TheoryCard, evmeth: EvolutionMethod): - """Compute masses in the chosen scheme.""" - if theory.heavy.masses_scheme is QuarkMassScheme.MSBAR: - return msbar_masses.compute( - theory.heavy.masses, - theory.couplings, - theory.order, - couplings_mod_ev(evmeth), - np.power(theory.heavy.matching_ratios, 2.0).tolist(), - xif2=theory.xif**2, - ).tolist() - if theory.heavy.masses_scheme is QuarkMassScheme.POLE: - return [mq.value**2 for mq in theory.heavy.masses] - - raise ValueError(f"Unknown mass scheme '{theory.heavy.masses_scheme}'") - - @dataclass class Debug(DictLike): """Debug configurations.""" @@ -108,113 +90,6 @@ class Configs(DictLike): """Number of cores used to parallelize integration.""" -@dataclass -class Rotations(DictLike): - """Rotations related configurations. - - Here "Rotation" is intended in a broad sense: it includes both rotations in - flavor space (labeled with suffix `pids`) and in :math:`x`-space (labeled - with suffix `grid`). - Rotations in :math:`x`-space correspond to reinterpolate the result on a - different basis of polynomials. - - """ - - xgrid: interpolation.XGrid - """Internal momentum fraction grid.""" - _targetgrid: Optional[interpolation.XGrid] = None - _inputgrid: Optional[interpolation.XGrid] = None - _targetpids: Optional[npt.NDArray] = None - _inputpids: Optional[npt.NDArray] = None - - def __post_init__(self): - """Adjust types when loaded from serialized object.""" - for attr in ("xgrid", "_inputgrid", "_targetgrid"): - value = getattr(self, attr) - if value is None: - continue - if isinstance(value, (np.ndarray, list)): - setattr(self, attr, interpolation.XGrid(value)) - elif not isinstance(value, interpolation.XGrid): - setattr(self, attr, interpolation.XGrid.load(value)) - - @property - def pids(self): - """Internal flavor basis, used for computation.""" - return np.array(br.flavor_basis_pids) - - @property - def inputpids(self) -> npt.NDArray: - """Provide pids expected on the input PDF.""" - if self._inputpids is None: - return self.pids - return self._inputpids - - @inputpids.setter - def inputpids(self, value): - self._inputpids = value - - @property - def targetpids(self) -> npt.NDArray: - """Provide pids corresponding to the output PDF.""" - if self._targetpids is None: - return self.pids - return self._targetpids - - @targetpids.setter - def targetpids(self, value): - self._targetpids = value - - @property - def inputgrid(self) -> interpolation.XGrid: - """Provide :math:`x`-grid expected on the input PDF.""" - if self._inputgrid is None: - return self.xgrid - return self._inputgrid - - @inputgrid.setter - def inputgrid(self, value: interpolation.XGrid): - self._inputgrid = value - - @property - def targetgrid(self) -> interpolation.XGrid: - """Provide :math:`x`-grid corresponding to the output PDF.""" - if self._targetgrid is None: - return self.xgrid - return self._targetgrid - - @targetgrid.setter - def targetgrid(self, value: interpolation.XGrid): - self._targetgrid = value - - @classmethod - def from_dict(cls, dictionary: dict): - """Deserialize rotation. - - Load from full state, but with public names. - - """ - d = dictionary.copy() - for f in fields(cls): - if f.name.startswith("_"): - d[f.name] = d.pop(f.name[1:]) - return cls._from_dict(d) - - @property - def raw(self): - """Serialize rotation. - - Pass through interfaces, access internal values but with a public name. - - """ - d = self._raw() - for key in d.copy(): - if key.startswith("_"): - d[key[1:]] = d.pop(key) - - return d - - @dataclass class OperatorCard(DictLike): """Operator Card info.""" @@ -414,8 +289,24 @@ def flavored_mugrid(mugrid: list, masses: list, matching_ratios: list): since it is possible to have PDFs in different |FNS| at the same scales. """ - tc = ThresholdsAtlas( - masses=(np.array(masses) ** 2).tolist(), - thresholds_ratios=(np.array(matching_ratios) ** 2).tolist(), - ) - return [(mu, int(tc.nf(mu**2))) for mu in mugrid] + matchings = (np.array(masses) * np.array(matching_ratios)) ** 2 + atlas = Atlas(matchings.tolist(), (0.0, 0)) + return [(mu, nf_default(mu**2, atlas)) for mu in mugrid] + + +# TODO: move to a more suitable place +def masses(theory: TheoryCard, evmeth: EvolutionMethod): + """Compute masses in the chosen scheme.""" + if theory.heavy.masses_scheme is QuarkMassScheme.MSBAR: + return msbar_masses.compute( + theory.heavy.masses, + theory.couplings, + theory.order, + couplings_mod_ev(evmeth), + np.power(theory.heavy.matching_ratios, 2.0).tolist(), + xif2=theory.xif**2, + ).tolist() + if theory.heavy.masses_scheme is QuarkMassScheme.POLE: + return [mq.value**2 for mq in theory.heavy.masses] + + raise ValueError(f"Unknown mass scheme '{theory.heavy.masses_scheme}'") diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 2580ff2a2..9dbaff131 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -9,7 +9,7 @@ import shutil import tarfile import tempfile -from dataclasses import dataclass +from dataclasses import dataclass, fields from typing import BinaryIO, Dict, List, Optional import lz4.frame @@ -18,11 +18,12 @@ import numpy.typing as npt import yaml +from .. import basis_rotation as br from .. import interpolation from .. import version as vmod from . import exceptions, raw from .dictlike import DictLike -from .runcards import OperatorCard, Rotations, TheoryCard +from .runcards import OperatorCard, TheoryCard from .types import EvolutionPoint as EPoint from .types import SquaredScale @@ -356,6 +357,113 @@ def write(self): return self.open and not self.readonly +@dataclass +class Rotations(DictLike): + """Rotations related configurations. + + Here "Rotation" is intended in a broad sense: it includes both rotations in + flavor space (labeled with suffix `pids`) and in :math:`x`-space (labeled + with suffix `grid`). + Rotations in :math:`x`-space correspond to reinterpolate the result on a + different basis of polynomials. + + """ + + xgrid: interpolation.XGrid + """Internal momentum fraction grid.""" + _targetgrid: Optional[interpolation.XGrid] = None + _inputgrid: Optional[interpolation.XGrid] = None + _targetpids: Optional[npt.NDArray] = None + _inputpids: Optional[npt.NDArray] = None + + def __post_init__(self): + """Adjust types when loaded from serialized object.""" + for attr in ("xgrid", "_inputgrid", "_targetgrid"): + value = getattr(self, attr) + if value is None: + continue + if isinstance(value, (np.ndarray, list)): + setattr(self, attr, interpolation.XGrid(value)) + elif not isinstance(value, interpolation.XGrid): + setattr(self, attr, interpolation.XGrid.load(value)) + + @property + def pids(self): + """Internal flavor basis, used for computation.""" + return np.array(br.flavor_basis_pids) + + @property + def inputpids(self) -> npt.NDArray: + """Provide pids expected on the input PDF.""" + if self._inputpids is None: + return self.pids + return self._inputpids + + @inputpids.setter + def inputpids(self, value): + self._inputpids = value + + @property + def targetpids(self) -> npt.NDArray: + """Provide pids corresponding to the output PDF.""" + if self._targetpids is None: + return self.pids + return self._targetpids + + @targetpids.setter + def targetpids(self, value): + self._targetpids = value + + @property + def inputgrid(self) -> interpolation.XGrid: + """Provide :math:`x`-grid expected on the input PDF.""" + if self._inputgrid is None: + return self.xgrid + return self._inputgrid + + @inputgrid.setter + def inputgrid(self, value: interpolation.XGrid): + self._inputgrid = value + + @property + def targetgrid(self) -> interpolation.XGrid: + """Provide :math:`x`-grid corresponding to the output PDF.""" + if self._targetgrid is None: + return self.xgrid + return self._targetgrid + + @targetgrid.setter + def targetgrid(self, value: interpolation.XGrid): + self._targetgrid = value + + @classmethod + def from_dict(cls, dictionary: dict): + """Deserialize rotation. + + Load from full state, but with public names. + + """ + d = dictionary.copy() + for f in fields(cls): + if f.name.startswith("_"): + d[f.name] = d.pop(f.name[1:]) + return cls._from_dict(d) + + @property + def raw(self): + """Serialize rotation. + + Pass through interfaces, access internal values but with a public name. + + """ + d = self._raw() + for key in d.copy(): + if key.startswith("_"): + d[key[1:]] = d.pop(key) + + return d + + @dataclass class Metadata(DictLike): """Manage metadata, and keep them synced on disk. From 48e7741bb321f4f5b94ca90bfa6bfe843032d075 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 21 Apr 2023 21:22:24 +0200 Subject: [PATCH 012/154] Fix couplings --- src/eko/couplings.py | 38 ++++++++++++++++---------------------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/src/eko/couplings.py b/src/eko/couplings.py index 2022ae7c4..dc8279b90 100644 --- a/src/eko/couplings.py +++ b/src/eko/couplings.py @@ -15,9 +15,9 @@ import numpy as np import scipy -from . import constants, thresholds +from . import constants, matchings from .beta import b_qcd, b_qed, beta_qcd, beta_qed -from .io.types import EvolutionMethod, Order +from .io.types import EvolutionMethod, Order, SquaredScale from .quantities.couplings import CouplingEvolutionMethod, CouplingsInfo from .quantities.heavy_quarks import QuarkMassScheme @@ -424,7 +424,7 @@ def __init__( couplings: CouplingsInfo, order: Order, method: CouplingEvolutionMethod, - masses: List[float], + masses: List[SquaredScale], hqm_scheme: QuarkMassScheme, thresholds_ratios: Iterable[float], ): @@ -446,24 +446,18 @@ def assert_positive(name, var): self.method = method.value nf_ref = couplings.num_flavs_ref - max_nf = couplings.max_num_flavs scheme_name = hqm_scheme.name self.alphaem_running = couplings.em_running self.decoupled_running = False # create new threshold object self.a_ref = np.array(couplings.values) / 4.0 / np.pi # convert to a_s and a_em - self.thresholds = thresholds.ThresholdsAtlas( - masses, - couplings.scale**2, - nf_ref, - thresholds_ratios=thresholds_ratios, - max_nf=max_nf, - ) + matching_scales = (np.array(masses) * np.array(thresholds_ratios)).tolist() + self.atlas = matchings.Atlas(matching_scales, (couplings.scale**2, nf_ref)) self.hqm_scheme = scheme_name logger.info( "Strong Coupling: a_s(Β΅_R^2=%f)%s=%f=%f/(4Ο€)", - self.q2_ref, + self.mu2_ref, f"^(nf={nf_ref})" if nf_ref else "", self.a_ref[0], self.a_ref[0] * 4 * np.pi, @@ -472,7 +466,7 @@ def assert_positive(name, var): logger.info( "Electromagnetic Coupling: a_em(Β΅_R^2=%f)%s=%f=%f/(4Ο€)\nalphaem" " running: %r\ndecoupled running: %r", - self.q2_ref, + self.mu2_ref, f"^(nf={nf_ref})" if nf_ref else "", self.a_ref[1], self.a_ref[1] * 4 * np.pi, @@ -483,9 +477,9 @@ def assert_positive(name, var): self.cache = {} @property - def q2_ref(self): + def mu2_ref(self): """Return reference scale.""" - return self.thresholds.q2_ref + return self.atlas.origin[0] def unidimensional_exact(self, beta0, b_vec, u, a_ref, method, rtol): """Compute single coupling via decoupled |RGE|. @@ -734,23 +728,23 @@ def a( numpy.ndarray couplings :math:`a_i(\mu_R^2) = \frac{\alpha_i(\mu_R^2)}{4\pi}` """ - # Set up the path to follow in order to go from q2_0 to q2_ref + # Set up the path to follow in order to go from mu2_0 to mu2_ref final_a = self.a_ref.copy() - path = self.thresholds.path(scale_to, nf_to) - is_downward = thresholds.is_downward_path(path) - shift = thresholds.flavor_shift(is_downward) + path = self.atlas.path((scale_to, nf_to)) + is_downward = matchings.is_downward_path(path) + shift = matchings.flavor_shift(is_downward) for k, seg in enumerate(path): # skip a very short segment, but keep the matching - if not np.isclose(seg.q2_from, seg.q2_to): - new_a = self.compute(final_a, seg.nf, seg.q2_from, seg.q2_to) + if not np.isclose(seg.origin, seg.target): + new_a = self.compute(final_a, seg.nf, seg.origin, seg.target) else: new_a = final_a # apply matching conditions: see hep-ph/9706430 # - if there is yet a step to go if k < len(path) - 1: # q2_to is the threshold value - L = np.log(self.thresholds.thresholds_ratios[seg.nf - shift]) + L = np.log(self.atlas.walls[seg.nf - shift + 1]) m_coeffs = ( compute_matching_coeffs_down(self.hqm_scheme, seg.nf - 1) if is_downward From d3f0f5d9e9df6d550fab91636ee4d9ea546d26b1 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 21 Apr 2023 21:42:21 +0200 Subject: [PATCH 013/154] Fix msbar masses and runner commons --- src/eko/io/runcards.py | 3 ++- src/eko/msbar_masses.py | 20 +++++++++----------- src/eko/runner/commons.py | 14 ++++---------- 3 files changed, 15 insertions(+), 22 deletions(-) diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index 33b0a76f7..53861a037 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -29,6 +29,7 @@ Order, RawCard, ScaleVariationsMethod, + SquaredScale, T, ) @@ -295,7 +296,7 @@ def flavored_mugrid(mugrid: list, masses: list, matching_ratios: list): # TODO: move to a more suitable place -def masses(theory: TheoryCard, evmeth: EvolutionMethod): +def masses(theory: TheoryCard, evmeth: EvolutionMethod) -> List[SquaredScale]: """Compute masses in the chosen scheme.""" if theory.heavy.masses_scheme is QuarkMassScheme.MSBAR: return msbar_masses.compute( diff --git a/src/eko/msbar_masses.py b/src/eko/msbar_masses.py index 36f966801..1793a7620 100644 --- a/src/eko/msbar_masses.py +++ b/src/eko/msbar_masses.py @@ -10,9 +10,9 @@ from .couplings import Couplings, invert_matching_coeffs from .gamma import gamma from .io.types import FlavorsNumber, Order +from .matchings import Atlas, flavor_shift, is_downward_path from .quantities.couplings import CouplingEvolutionMethod, CouplingsInfo from .quantities.heavy_quarks import HeavyQuarkMasses, QuarkMassRef, QuarkMassScheme -from .thresholds import ThresholdsAtlas, flavor_shift, is_downward_path def ker_exact(a0, a1, order, nf): @@ -297,13 +297,11 @@ def evolve(m2_ref, q2m_ref, strong_coupling, xif2, q2_to, nf_ref=None, nf_to=Non :math:`m_{\overline{MS}}(\mu_2)^2` """ - thr_atlas = ThresholdsAtlas( - np.array(strong_coupling.thresholds.area_walls)[1:-1], - q2m_ref, - nf_ref, - strong_coupling.thresholds.thresholds_ratios, + matching_scales = np.array(strong_coupling.atlas.walls)[1:-1] * np.array( + strong_coupling.thresholds.thresholds_ratios ) - path = thr_atlas.path(q2_to, nf_to) + atlas = Atlas(matching_scales.tolist(), (q2m_ref, nf_ref)) + path = atlas.path((q2_to, nf_to)) is_downward = is_downward_path(path) shift = flavor_shift(is_downward) @@ -311,14 +309,14 @@ def evolve(m2_ref, q2m_ref, strong_coupling, xif2, q2_to, nf_ref=None, nf_to=Non for k, seg in enumerate(path): # skip a very short segment, but keep the matching ker_evol = 1.0 - if not np.isclose(seg.q2_from, seg.q2_to): + if not np.isclose(seg.origin, seg.target): ker_evol = ( - ker_dispatcher(seg.q2_to, seg.q2_from, strong_coupling, xif2, seg.nf) + ker_dispatcher(seg.target, seg.origin, strong_coupling, xif2, seg.nf) ** 2 ) # apply matching condition if k < len(path) - 1: - L = np.log(thr_atlas.thresholds_ratios[seg.nf - shift]) + L = np.log(atlas.walls[seg.nf - shift + 1]) m_coeffs = ( compute_matching_coeffs_down(seg.nf - 1) if is_downward @@ -328,7 +326,7 @@ def evolve(m2_ref, q2m_ref, strong_coupling, xif2, q2_to, nf_ref=None, nf_to=Non for pto in range(1, strong_coupling.order[0]): # 0**0=1, from NNLO there is a matching also in this case for logpow in range(pto + 1): - as_thr = strong_coupling.a(seg.q2_to * xif2, seg.nf - shift + 4)[0] + as_thr = strong_coupling.a(seg.target * xif2, seg.nf - shift + 4)[0] matching += as_thr**pto * L**logpow * m_coeffs[pto, logpow] ker_evol *= matching ev_mass *= ker_evol diff --git a/src/eko/runner/commons.py b/src/eko/runner/commons.py index 7b49fab28..9635fd19e 100644 --- a/src/eko/runner/commons.py +++ b/src/eko/runner/commons.py @@ -6,7 +6,7 @@ from ..io import runcards from ..io.runcards import OperatorCard, TheoryCard from ..io.types import ScaleVariationsMethod -from ..thresholds import ThresholdsAtlas +from ..matchings import Atlas BANNER = r""" oooooooooooo oooo oooo \\ .oooooo. @@ -27,18 +27,12 @@ def interpolator(operator: OperatorCard) -> InterpolatorDispatcher: ) -def threshold_atlas(theory: TheoryCard, operator: OperatorCard) -> ThresholdsAtlas: +def threshold_atlas(theory: TheoryCard, operator: OperatorCard) -> Atlas: """Create thresholds atlas from runcards.""" - thresholds_ratios = np.power(theory.heavy.matching_ratios, 2.0) # TODO: cache result masses = runcards.masses(theory, operator.configs.evolution_method) - return ThresholdsAtlas( - masses=masses, - q2_ref=operator.mu20, - nf_ref=theory.heavy.num_flavs_init, - thresholds_ratios=thresholds_ratios, - max_nf=theory.heavy.num_flavs_max_pdf, - ) + matching_scales = np.power(theory.heavy.matching_ratios, 2.0) * np.array(masses) + return Atlas(matching_scales.tolist(), (operator.mu20, theory.heavy.num_flavs_init)) def couplings(theory: TheoryCard, operator: OperatorCard) -> Couplings: From 8324ab9c0e1f18ba14fdbe3c4bb5efd2dff85e4e Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 21 Apr 2023 22:26:29 +0200 Subject: [PATCH 014/154] Fix matching tests --- tests/eko/test_matchings.py | 141 +++++++++++++++++++++++++++ tests/eko/test_thresholds.py | 183 ----------------------------------- 2 files changed, 141 insertions(+), 183 deletions(-) create mode 100644 tests/eko/test_matchings.py delete mode 100644 tests/eko/test_thresholds.py diff --git a/tests/eko/test_matchings.py b/tests/eko/test_matchings.py new file mode 100644 index 000000000..cbca16cae --- /dev/null +++ b/tests/eko/test_matchings.py @@ -0,0 +1,141 @@ +""" + Tests for the threshold class +""" +from dataclasses import astuple + +import numpy as np + +from eko.matchings import Atlas, Segment, flavor_shift, is_downward_path, nf_default +from eko.quantities.heavy_quarks import MatchingScales + + +class TestPathSegment: + def test_tuple(self): + p = Segment(0, 1, 3) + assert astuple(p) == (0, 1, 3) + # is hashable? + d = {} + d[p] = 1 + assert d[p] == 1 + + def test_str(self): + p = Segment(0, 1, 3) + s = str(p) + assert s.index("0") > 0 + assert s.index("3") > 0 + assert s.index("3") > 0 + + +ZERO = (0.0, 0) + + +class TestAtlas: + def test_init(self): + # 3 thr + tc3 = Atlas(MatchingScales([1, 2, 3]), ZERO) + assert tc3.walls == [0, 1, 2, 3, np.inf] + # 2 thr + tc2 = Atlas(MatchingScales([0, 2, 3]), ZERO) + assert tc2.walls == [0, 0, 2, 3, np.inf] + + def test_nfref(self): + # weird but fine + tc = Atlas(MatchingScales([1, 2, 3]), (1.5, 3)) + p = tc.path((1.5, 4)) + assert len(p) == 2 + assert astuple(p[0]) == (1.5, 1.0, 3) + assert astuple(p[1]) == (1.0, 1.5, 4) + + def test_str(self): + walls = MatchingScales([1.23, 9.87, 14.54]) + stc3 = str(Atlas(walls, ZERO)) + + for w in walls: + assert f"{w:.2e}" in stc3 + + def test_ffns(self): + tc3 = Atlas.ffns(3, 0.0) + assert tc3.walls == [0] + [np.inf] * 4 + tc4 = Atlas.ffns(4, 3.0) + assert tc4.walls == [0] * 2 + [np.inf] * 3 + assert len(tc4.path((2.0, 4))) == 1 + + def test_path_3thr(self): + tc = Atlas(MatchingScales([1, 2, 3]), (0.5, 3)) + p1 = tc.path((0.7, 3)) + assert len(p1) == 1 + assert p1[0] == (0.5, 0.7, 3) + + def test_path_3thr_backward(self): + tc = Atlas(MatchingScales([1, 2, 3]), (2.5, 5)) + p1 = tc.path((0.7, 3)) + assert len(p1) == 3 + assert astuple(p1[0]) == (2.5, 2.0, 5) + assert astuple(p1[1]) == (2.0, 1.0, 4) + assert astuple(p1[2]) == (1.0, 0.7, 3) + + def test_path_3thr_on_threshold(self): + tc = Atlas(MatchingScales([1, 2, 3]), (0.5, 3)) + # on the right of mc + p3 = tc.path((1.0, 4)) + assert len(p3) == 2 + assert p3[0].nf == 3 + assert astuple(p3[1]) == (1.0, 1.0, 4) + # on the left of mc + p4 = tc.path((1.0, 3)) + assert len(p4) == 1 + assert p4[0].nf == 3 + + def test_path_3thr_weird(self): + tc = Atlas(MatchingScales([1, 2, 3]), (0.5, 3)) + # the whole distance underground + p6 = tc.path((3.5, 3)) + assert len(p6) == 1 + assert astuple(p6[0]) == (0.5, 3.5, 3) + mu2_from = 3.5 + mu2_to = 0.7 + # 0 + # 1 <----------- + # ---> 2 + # 3 < ----- + # | | | + origin = (mu2_from, 3) + target = (mu2_to, 5) + p7 = Atlas(MatchingScales([1, 2, 3]), origin).path(target) + assert [s.nf for s in p7] == list(range(3, 5 + 1)) + assert p7[0].origin == mu2_from + assert p7[2].target == mu2_to + # 0 + # 1 <----------- + # ---> 2 -> 3 + # 4 < --------- + # | | | + target = (mu2_to, 6) + p8 = Atlas(MatchingScales([1, 2, 3]), origin).path(target) + assert [s.nf for s in p8] == list(range(3, 6 + 1)) + assert p8[0].origin == mu2_from + assert p8[3].target == mu2_to + + +def test_nf(): + nf4 = Atlas.ffns(4, 0.0) + for q2 in [1.0, 1e1, 1e2, 1e3, 1e4]: + assert nf_default(q2, nf4) == 4 + at = Atlas(MatchingScales([1, 2, 3]), (0.5, 3)) + assert nf_default(0.9, at) == 3 + assert nf_default(1.1, at) == 4 + + +def test_downward_path(): + thr_atlas = Atlas(np.power([2, 3, 4], 2).tolist(), (91**2, 3)) + mu2_to = 5**2 + path_3 = thr_atlas.path((mu2_to, 3)) + # path_3 is downward in q2 + is_downward = is_downward_path(path_3) + assert is_downward is True + assert flavor_shift(is_downward) == 4 + # path_6 is downward in q2, but forward in nf + path_6 = thr_atlas.path((mu2_to, 6)) + is_downward = is_downward_path(path_6) + assert is_downward is False + assert flavor_shift(is_downward) == 3 diff --git a/tests/eko/test_thresholds.py b/tests/eko/test_thresholds.py deleted file mode 100644 index 0e3d9f699..000000000 --- a/tests/eko/test_thresholds.py +++ /dev/null @@ -1,183 +0,0 @@ -""" - Tests for the threshold class -""" -import numpy as np -import pytest - -from eko.thresholds import PathSegment, ThresholdsAtlas, flavor_shift, is_downward_path - - -class TestPathSegment: - def test_tuple(self): - p = PathSegment(0, 1, 3) - assert p.tuple == (0, 1, 3) - # is hashable? - d = {} - d[p.tuple] = 1 - assert d[p.tuple] == 1 - - def test_str(self): - p = PathSegment(0, 1, 3) - s = str(p) - assert s.index("0") > 0 - assert s.index("3") > 0 - assert s.index("3") > 0 - - -class TestThresholdsAtlas: - def test_init(self): - # 3 thr - tc3 = ThresholdsAtlas([1, 2, 3]) - assert tc3.area_walls == [0, 1, 2, 3, np.inf] - # 2 thr - tc2 = ThresholdsAtlas([0, 2, 3]) - assert tc2.area_walls == [0, 0, 2, 3, np.inf] - - # errors - with pytest.raises(ValueError): - ThresholdsAtlas([1.0, 0.0]) - - def test_nfref(self): - # errors - with pytest.raises(ValueError, match="Without"): - ThresholdsAtlas([0, 2, 3], nf_ref=3) - with pytest.raises(ValueError, match="starts"): - ThresholdsAtlas([0, 2, 3], 1, 3) - with pytest.raises(ValueError, match="stops"): - ThresholdsAtlas([0, np.inf, np.inf], 1, 5) - # weird but fine - tc = ThresholdsAtlas([1, 2, 3], 1.5, 3) - p = tc.path(1.5) - assert len(p) == 2 - assert p[0].q2_from == 1.5 - assert p[0].q2_to == 1.0 - assert p[0].nf == 3 - assert p[1].q2_from == 1.0 - assert p[1].q2_to == 1.5 - assert p[1].nf == 4 - - def test_str(self): - walls = [1.23, 9.87, 14.54] - stc3 = str(ThresholdsAtlas(walls)) - - for w in walls: - assert f"{w:.2e}" in stc3 - - def test_build_area_walls(self): - for k in range(3, 6 + 1): - walls = ThresholdsAtlas.build_area_walls([1, 2, 3], [1, 2, 3], k) - assert len(walls) == k - 3 - - with pytest.raises(ValueError): - ThresholdsAtlas.build_area_walls([1, 2], [1, 2, 3], 4) - with pytest.raises(ValueError): - ThresholdsAtlas.build_area_walls([1, 2, 3], [1, 2], 4) - with pytest.raises(ValueError): - ThresholdsAtlas.build_area_walls([1, 2], [1, 2], 4) - - def test_ffns(self): - tc3 = ThresholdsAtlas.ffns(3) - assert tc3.area_walls == [0] + [np.inf] * 4 - tc4 = ThresholdsAtlas.ffns(4) - assert tc4.area_walls == [0] * 2 + [np.inf] * 3 - assert len(tc4.path(q2_to=2.0, q2_from=3.0)) == 1 - - def test_path_3thr(self): - tc = ThresholdsAtlas([1, 2, 3], 0.5) - p1 = tc.path(0.7) - assert len(p1) == 1 - assert p1[0].q2_from == 0.5 - assert p1[0].q2_to == 0.7 - assert p1[0].nf == 3 - - p2 = tc.path(1.5, q2_from=2.5) - assert len(p2) == 2 - assert p2[0].nf == 5 - assert p2[1].nf == 4 - - def test_path_3thr_backward(self): - tc = ThresholdsAtlas([1, 2, 3], 2.5) - p1 = tc.path(0.7) - assert len(p1) == 3 - assert p1[0].tuple == (2.5, 2.0, 5) - assert p1[1].tuple == (2.0, 1.0, 4) - assert p1[2].tuple == (1.0, 0.7, 3) - - def test_path_3thr_on_threshold(self): - tc = ThresholdsAtlas([1, 2, 3], 0.5) - # on the right of mc - p3 = tc.path(1.0, nf_to=4) - assert len(p3) == 2 - assert p3[0].nf == 3 - assert p3[1].tuple == (1.0, 1.0, 4) - # on the left of mc - p4 = tc.path(1.0, nf_to=3) - assert len(p4) == 1 - assert p4[0].nf == 3 - # on the left of mt, across mb - p5 = tc.path(1.5, q2_from=3.0, nf_from=5) - assert len(p5) == 2 - assert p5[0].nf == 5 - assert p5[1].nf == 4 - - def test_path_3thr_weird(self): - tc = ThresholdsAtlas([1, 2, 3], 0.5) - # the whole distance underground - p6 = tc.path(3.5, nf_to=3) - assert len(p6) == 1 - assert p6[0].tuple == (0.5, 3.5, 3) - q2_from = 3.5 - q2_to = 0.7 - # 0 - # 1 <----------- - # ---> 2 - # 3 < ----- - # | | | - p7 = tc.path(q2_to=q2_to, nf_to=5, q2_from=q2_from, nf_from=3) - assert len(p7) == 3 - assert p7[0].nf == 3 - assert p7[1].nf == 4 - assert p7[2].nf == 5 - assert p7[0].q2_from == q2_from - assert p7[2].q2_to == q2_to - # 0 - # 1 <----------- - # ---> 2 -> 3 - # 4 < --------- - # | | | - p8 = tc.path(q2_to=q2_to, nf_to=6, q2_from=q2_from, nf_from=3) - assert len(p8) == 4 - assert p8[0].nf == 3 - assert p8[1].nf == 4 - assert p8[2].nf == 5 - assert p8[3].nf == 6 - assert p8[0].q2_from == q2_from - assert p8[3].q2_to == q2_to - - def test_nf(self): - nf4 = ThresholdsAtlas.ffns(4) - for q2 in [1.0, 1e1, 1e2, 1e3, 1e4]: - assert nf4.nf(q2) == 4 - ta = ThresholdsAtlas([1, 2, 3], 0.5) - assert ta.nf(0.9) == 3 - assert ta.nf(1.1) == 4 - - -def test_downward_path(): - thr_atlas = ThresholdsAtlas( - masses=np.power([2, 3, 4], 2), - q2_ref=91**2, - nf_ref=3, - thresholds_ratios=[1, 1, 1], - ) - q2_to = 5**2 - path_3 = thr_atlas.path(q2_to, nf_to=3) - # path_3 is downward in q2 - is_downward = is_downward_path(path_3) - assert is_downward is True - assert flavor_shift(is_downward) == 4 - # path_6 is downward in q2, but forward in nf - path_6 = thr_atlas.path(q2_to, nf_to=6) - is_downward = is_downward_path(path_6) - assert is_downward is False - assert flavor_shift(is_downward) == 3 From 99afc738383318b35bc86ac5a583747aea1395e1 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Mon, 24 Apr 2023 17:26:16 +0200 Subject: [PATCH 015/154] Fix import in grid.py --- src/eko/evolution_operator/grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index ff5b8830f..9532593df 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -15,7 +15,7 @@ from .. import member from .. import scale_variations as sv from ..io.runcards import Configs, Debug -from ..thresholds import flavor_shift, is_downward_path +from ..matchings import flavor_shift, is_downward_path from . import Operator, flavors, matching_condition, physical from .operator_matrix_element import OperatorMatrixElement From 8bd132fbae8265b023580c351fbb1d508c225400 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Mon, 24 Apr 2023 17:42:59 +0200 Subject: [PATCH 016/154] Add Atlas.normalize and more couplings test fixes --- src/eko/matchings.py | 10 ++++++++-- tests/eko/test_couplings.py | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/eko/matchings.py b/src/eko/matchings.py index 8087a4858..114a60fbb 100644 --- a/src/eko/matchings.py +++ b/src/eko/matchings.py @@ -62,10 +62,16 @@ class Atlas: def __init__(self, matching_scales: MatchingScales, origin: EPoint): """Create basic atlas.""" self.walls = [0] + matching_scales + [np.inf] - self.origin = origin + self.origin = self.normalize(origin) logger.info(str(self)) + def normalize(self, target: EPoint) -> EPoint: + """Fill number of flavors if needed.""" + if target[1] is not None: + return target + return (target[0], nf_default(target[0], self)) + def __str__(self): """Textual representation, mainly for logging purpose.""" walls = " - ".join([f"{w:.2e}" for w in self.walls]) @@ -129,7 +135,7 @@ def path(self, target: EPoint) -> Path: """ mu20, nf0 = self.origin - mu2f, nff = target + mu2f, nff = self.normalize(target) # determine direction and python slice modifier rc, shift = (-1, -3) if nff < nf0 else (1, -2) diff --git a/tests/eko/test_couplings.py b/tests/eko/test_couplings.py index 14f543dfe..c78a51914 100644 --- a/tests/eko/test_couplings.py +++ b/tests/eko/test_couplings.py @@ -70,7 +70,7 @@ def test_init(self): hqm_scheme=QuarkMassScheme.POLE, thresholds_ratios=[1.0, 1.0, 1.0], ) - assert sc.q2_ref == muref**2 + assert sc.mu2_ref == muref**2 assert sc.a_ref[0] == alpharef[0] / 4.0 / np.pi assert sc.a(muref**2)[0] == alpharef[0] / (4.0 * np.pi) assert sc.a_ref[1] == alpharef[1] / 4.0 / np.pi From 014a7948ed41db5bc78d690df9ce52ee7e5ad8a4 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Mon, 24 Apr 2023 17:46:34 +0200 Subject: [PATCH 017/154] Fix test_couplings --- tests/eko/test_matchings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/eko/test_matchings.py b/tests/eko/test_matchings.py index cbca16cae..0e6a77174 100644 --- a/tests/eko/test_matchings.py +++ b/tests/eko/test_matchings.py @@ -64,7 +64,7 @@ def test_path_3thr(self): tc = Atlas(MatchingScales([1, 2, 3]), (0.5, 3)) p1 = tc.path((0.7, 3)) assert len(p1) == 1 - assert p1[0] == (0.5, 0.7, 3) + assert astuple(p1[0]) == (0.5, 0.7, 3) def test_path_3thr_backward(self): tc = Atlas(MatchingScales([1, 2, 3]), (2.5, 5)) From d18fb003619cda38efc6b8a075a5e74577d47d81 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Mon, 24 Apr 2023 17:51:32 +0200 Subject: [PATCH 018/154] Pin nf in example cards --- src/ekobox/cards.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ekobox/cards.py b/src/ekobox/cards.py index ef7ada109..474e7d931 100644 --- a/src/ekobox/cards.py +++ b/src/ekobox/cards.py @@ -14,11 +14,11 @@ alphas=0.118, alphaem=0.007496252, scale=91.2, - num_flavs_ref=None, + num_flavs_ref=5, max_num_flavs=6, ), heavy=dict( - num_flavs_init=None, + num_flavs_init=4, num_flavs_max_pdf=6, intrinsic_flavors=[4], masses=[ReferenceRunning([mq, nan]) for mq in (2.0, 4.5, 173.07)], From dd15ec8bd87803821c3a3f0e8123066961a3fd9b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Mon, 24 Apr 2023 18:14:44 +0200 Subject: [PATCH 019/154] Minimal fix to recipes.create --- src/eko/runner/recipes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index c26776688..55bc0daea 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -41,5 +41,5 @@ def create(eko: EKO): # expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded # mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 - blocks = tc.path(*ep) + blocks = tc.path(ep) terminal.append(blocks.pop()) From 7b0aac06810fcd38e5f1af11bbbeb7ba8b74fee7 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Mon, 24 Apr 2023 18:26:02 +0200 Subject: [PATCH 020/154] Rename more q2->mu2 --- src/eko/evolution_operator/__init__.py | 2 +- tests/eko/evolution_operator/test_init.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/eko/evolution_operator/__init__.py b/src/eko/evolution_operator/__init__.py index df79e8c4f..222d827d5 100644 --- a/src/eko/evolution_operator/__init__.py +++ b/src/eko/evolution_operator/__init__.py @@ -703,7 +703,7 @@ def compute_aem_list(self): as1 = self.a_s[1] aem0 = self.a_em[0] aem1 = self.a_em[1] - q2ref = self.managers["couplings"].q2_ref + q2ref = self.managers["couplings"].mu2_ref delta_from = abs(self.q2_from - q2ref) delta_to = abs(self.q2_to - q2ref) # I compute the values in aem_list starting from the mu2 diff --git a/tests/eko/evolution_operator/test_init.py b/tests/eko/evolution_operator/test_init.py index e0a65b33a..0aab4f9fc 100644 --- a/tests/eko/evolution_operator/test_init.py +++ b/tests/eko/evolution_operator/test_init.py @@ -190,7 +190,7 @@ def test_quad_ker(monkeypatch): class FakeCoupling: def __init__(self): self.alphaem_running = None - self.q2_ref = 0.0 + self.mu2_ref = 10.0 def a(self, scale_to=None, nf_to=None): return (0.1, 0.01) From 717516ba7ee2afab420f6e4122e2939eee87640b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Mon, 24 Apr 2023 18:49:32 +0200 Subject: [PATCH 021/154] Fix test/eko/io/test_runcards --- src/eko/io/runcards.py | 26 +++++++++++++++++++++----- tests/eko/io/test_runcards.py | 13 +++++++------ 2 files changed, 28 insertions(+), 11 deletions(-) diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index 53861a037..27b6c54ce 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -173,6 +173,8 @@ def new_theory(self): new["order"] = [old["PTO"] + 1, old["QED"]] alphaem = self.fallback(old.get("alphaqed"), old.get("alphaem"), default=0.0) + ms = self.heavies("m%s", old) + mus = self.heavies("Qm%s", old) new["couplings"] = dict( alphas=old["alphas"], alphaem=alphaem, @@ -181,7 +183,9 @@ def new_theory(self): max_num_flavs=old["MaxNfAs"], ) new["heavy"] = { - "num_flavs_init": old["nf0"], + "num_flavs_init": nf_default(old["Qref"] ** 2.0, default_atlas(ms, mus)) + if old["nf0"] is None + else old["nf0"], "num_flavs_max_pdf": old["MaxNfPdf"], "matching_ratios": self.heavies("k%sThr", old), "masses_scheme": old["HQ"], @@ -191,8 +195,6 @@ def new_theory(self): if old.get(f"i{q}".upper()) == 1: intrinsic.append(idx + 4) new["heavy"]["intrinsic_flavors"] = intrinsic - ms = self.heavies("m%s", old) - mus = self.heavies("Qm%s", old) if old["HQ"] == "POLE": new["heavy"]["masses"] = [[m, nan] for m in ms] elif old["HQ"] == "MSBAR": @@ -276,6 +278,21 @@ def update(theory: Union[RawCard, TheoryCard], operator: Union[RawCard, Operator return cards.new_theory, cards.new_operator +def default_atlas(masses: list, matching_ratios: list): + r"""Create default landscape. + + This method should not be used to write new runcards, but rather to have a + consistent default for comparison with other softwares and existing PDF + sets. + There is no one-to-one relation between number of running flavors and final + scales, unless matchings are all applied. But this is a custom choice, + since it is possible to have PDFs in different |FNS| at the same scales. + + """ + matchings = (np.array(masses) * np.array(matching_ratios)) ** 2 + return Atlas(matchings.tolist(), (0.0, 0)) + + def flavored_mugrid(mugrid: list, masses: list, matching_ratios: list): r"""Upgrade :math:`\mu^2` grid to contain also target number flavors. @@ -290,8 +307,7 @@ def flavored_mugrid(mugrid: list, masses: list, matching_ratios: list): since it is possible to have PDFs in different |FNS| at the same scales. """ - matchings = (np.array(masses) * np.array(matching_ratios)) ** 2 - atlas = Atlas(matchings.tolist(), (0.0, 0)) + atlas = default_atlas(masses, matching_ratios) return [(mu, nf_default(mu**2, atlas)) for mu in mugrid] diff --git a/tests/eko/io/test_runcards.py b/tests/eko/io/test_runcards.py index 237f814fb..a96722614 100644 --- a/tests/eko/io/test_runcards.py +++ b/tests/eko/io/test_runcards.py @@ -9,6 +9,7 @@ from eko import interpolation from eko.io import runcards as rc +from eko.io.struct import Rotations from ekomark.data.operators import default_card as operator_card @@ -16,19 +17,19 @@ class TestRotations: XGRID_TEST = [1e-3, 1e-2, 1e-1, 1.0] def test_serialization(self): - rot = rc.Rotations(interpolation.XGrid(self.XGRID_TEST)) + rot = Rotations(interpolation.XGrid(self.XGRID_TEST)) d = rot.raw rot1 = rot.from_dict(d) - for f in fields(rc.Rotations): + for f in fields(Rotations): assert getattr(rot, f.name) == getattr(rot1, f.name) assert d["targetgrid"] is None assert "_targetgrid" not in d def test_pids(self): - rot = rc.Rotations(interpolation.XGrid(self.XGRID_TEST)) + rot = Rotations(interpolation.XGrid(self.XGRID_TEST)) # no check on correctness of value set rot.inputpids = [0, 1] @@ -38,7 +39,7 @@ def test_pids(self): assert np.all(rot.targetpids == rot.pids) def test_grids(self): - rot = rc.Rotations(interpolation.XGrid(self.XGRID_TEST)) + rot = Rotations(interpolation.XGrid(self.XGRID_TEST)) # no check on correctness of value set rot.inputgrid = interpolation.XGrid([0.1, 1]) @@ -49,12 +50,12 @@ def test_grids(self): def test_flavored_mu2grid(): - mugrid = list(range(0, 40, 5)) + mugrid = list(range(5, 40, 5)) masses = [10, 20, 30] ratios = [1, 1, 1] flavored = rc.flavored_mugrid(mugrid, masses, ratios) - assert pytest.approx([flav for _, flav in flavored]) == [3, 3, 4, 4, 5, 5, 6, 6] + assert pytest.approx([flav for _, flav in flavored]) == [3, 4, 4, 5, 5, 6, 6] # check we can dump stream = StringIO() From c22e82dc991754fc5e94d19f0790e151bba1449a Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 25 Apr 2023 18:33:41 +0200 Subject: [PATCH 022/154] Fix legacy loading --- src/eko/io/legacy.py | 38 +++++++++++++++++++++--------- src/eko/quantities/heavy_quarks.py | 4 ++-- tests/eko/io/test_init.py | 13 +++++----- 3 files changed, 36 insertions(+), 19 deletions(-) diff --git a/src/eko/io/legacy.py b/src/eko/io/legacy.py index 58899eae4..1df9941b1 100644 --- a/src/eko/io/legacy.py +++ b/src/eko/io/legacy.py @@ -6,17 +6,20 @@ import tarfile import tempfile import warnings +from typing import Dict, List import lz4.frame import numpy as np -import numpy.typing as npt import yaml from eko.interpolation import XGrid +from eko.io.runcards import flavored_mugrid +from eko.quantities.heavy_quarks import HeavyInfo, HeavyQuarkMasses, MatchingRatios from . import raw from .dictlike import DictLike from .struct import EKO, Operator +from .types import EvolutionPoint as EPoint from .types import RawCard @@ -55,7 +58,9 @@ def load_tar(source: os.PathLike, dest: os.PathLike, errors: bool = False): op5 = metaold.get("Q2grid") if op5 is None: op5 = metaold["mu2grid"] - grid = op5to4(op5, arrays) + grid = op5to4( + flavored_mugrid(op5, theory.heavy.masses, theory.heavy.matching_ratios), arrays + ) with EKO.create(dest) as builder: # here I'm plainly ignoring the static analyzer, the types are faking @@ -63,8 +68,8 @@ def load_tar(source: os.PathLike, dest: os.PathLike, errors: bool = False): # accept also these eko = builder.load_cards(theory, operator).build() # pylint: disable=E1101 - for mu2, op in grid.items(): - eko[mu2] = op + for ep, op in grid.items(): + eko[ep] = op eko.metadata.version = metaold.get("eko_version", "") eko.metadata.data_version = 0 @@ -80,12 +85,20 @@ class PseudoTheory(DictLike): """ - _void: str + heavy: HeavyInfo @classmethod def from_old(cls, old: RawCard): """Load from old metadata.""" - return cls(_void="") + heavy = HeavyInfo( + num_flavs_init=4, + num_flavs_max_pdf=None, + intrinsic_flavors=None, + masses=HeavyQuarkMasses([1.51, 4.92, 172.5]), + masses_scheme=None, + matching_ratios=MatchingRatios([1.0, 1.0, 1.0]), + ) + return cls(heavy=heavy) @dataclasses.dataclass @@ -98,7 +111,7 @@ class PseudoOperator(DictLike): """ mu20: float - mu2grid: npt.NDArray + evolgrid: List[EPoint] xgrid: XGrid configs: dict @@ -110,6 +123,9 @@ def from_old(cls, old: RawCard): if mu2list is None: mu2list = old["mu2grid"] mu2grid = np.array(mu2list) + evolgrid = flavored_mugrid( + np.sqrt(mu2grid).tolist(), [1.51, 4.92, 172.5], [1, 1, 1] + ) xgrid = XGrid(old["interpolation_xgrid"]) @@ -118,7 +134,7 @@ def from_old(cls, old: RawCard): interpolation_is_log=old.get("interpolation_is_log"), ) - return cls(mu20=mu20, mu2grid=mu2grid, xgrid=xgrid, configs=configs) + return cls(mu20=mu20, evolgrid=evolgrid, xgrid=xgrid, configs=configs) ARRAY_SUFFIX = ".npy.lz4" @@ -147,7 +163,7 @@ def load_arrays(dir: pathlib.Path) -> dict: """File name stem for operator errrors.""" -def op5to4(mu2grid: list, arrays: dict) -> dict: +def op5to4(evolgrid: List[EPoint], arrays: dict) -> Dict[EPoint, Operator]: """Load dictionary of 4-dim operators, from a single 5-dim one.""" def plural(name: str) -> list: @@ -169,7 +185,7 @@ def plural(name: str) -> list: err5 = [None] * len(op5) grid = {} - for mu2, op4, err4 in zip(mu2grid, op5, err5): - grid[mu2] = Operator(operator=op4, error=err4) + for ep, op4, err4 in zip(evolgrid, op5, err5): + grid[ep] = Operator(operator=op4, error=err4) return grid diff --git a/src/eko/quantities/heavy_quarks.py b/src/eko/quantities/heavy_quarks.py index 4f782e067..3d87b30aa 100644 --- a/src/eko/quantities/heavy_quarks.py +++ b/src/eko/quantities/heavy_quarks.py @@ -1,7 +1,7 @@ """Heavy quarks related quantities.""" import enum from dataclasses import dataclass -from typing import Generic, Sequence, TypeVar +from typing import Generic, Sequence, TypeVar, Union import numpy as np @@ -58,7 +58,7 @@ def t(self, value: T): QuarkMass = LinearScale QuarkMassRef = ReferenceRunning[QuarkMass] -HeavyQuarkMasses = HeavyQuarks[QuarkMassRef] +HeavyQuarkMasses = HeavyQuarks[Union[QuarkMass, QuarkMassRef]] MatchingRatio = float MatchingRatios = HeavyQuarks[MatchingRatio] MatchingScale = SquaredScale diff --git a/tests/eko/io/test_init.py b/tests/eko/io/test_init.py index d73d7d97e..55ec7ec28 100644 --- a/tests/eko/io/test_init.py +++ b/tests/eko/io/test_init.py @@ -34,12 +34,13 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): # create object muout = 10.0 mu2out = muout**2 + epout = (mu2out, 5) xg = interpolation.XGrid(np.geomspace(1e-5, 1.0, 21)) eko_factory.operator.mugrid = [(muout, 5)] eko_factory.operator.xgrid = xg o1 = eko_factory.get() lpids = 2 - o1[mu2out] = eko.io.Operator( + o1[epout] = eko.io.Operator( operator=eko_identity([1, lpids, len(xg), lpids, len(xg)])[0] ) xgp = interpolation.XGrid(np.geomspace(1e-5, 1.0, 11)) @@ -49,21 +50,21 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): with EKO.edit(otpath) as ot: manipulate.xgrid_reshape(ot, xgp) chk_keys(o1.raw, ot.raw) - assert ot[mu2out].operator.shape == (lpids, len(xgp), lpids, len(xg)) + assert ot[epout].operator.shape == (lpids, len(xgp), lpids, len(xg)) ottpath = tmp_path / "ott.tar" o1.deepcopy(ottpath) with EKO.edit(ottpath) as ott: with pytest.warns(Warning): manipulate.xgrid_reshape(ott, xg) chk_keys(o1.raw, ott.raw) - np.testing.assert_allclose(ott[mu2out].operator, o1[mu2out].operator) + np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) # only input oipath = tmp_path / "oi.tar" o1.deepcopy(oipath) with EKO.edit(oipath) as oi: manipulate.xgrid_reshape(oi, inputgrid=xgp) - assert oi[mu2out].operator.shape == (lpids, len(xg), lpids, len(xgp)) + assert oi[epout].operator.shape == (lpids, len(xg), lpids, len(xgp)) chk_keys(o1.raw, oi.raw) oiipath = tmp_path / "oii.tar" o1.deepcopy(oiipath) @@ -71,7 +72,7 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): with pytest.warns(Warning): manipulate.xgrid_reshape(oii, inputgrid=xg) chk_keys(o1.raw, oii.raw) - np.testing.assert_allclose(oii[mu2out].operator, o1[mu2out].operator) + np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) # both oitpath = tmp_path / "oit.tar" @@ -80,7 +81,7 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): manipulate.xgrid_reshape(oit, xgp, xgp) chk_keys(o1.raw, oit.raw) op = eko_identity([1, 2, len(xgp), 2, len(xgp)]) - np.testing.assert_allclose(oit[mu2out].operator, op[0], atol=1e-10) + np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) # error with pytest.raises(ValueError): From c1ca7e6bc6e5900950bea9b100a3389db36ad30d Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 25 Apr 2023 19:35:46 +0200 Subject: [PATCH 023/154] Improve xgrid rotations structure, fix EKO raw representation --- src/eko/io/manipulate.py | 176 ++++++++++++++++++++++----------------- src/eko/io/struct.py | 6 +- tests/conftest.py | 5 +- 3 files changed, 104 insertions(+), 83 deletions(-) diff --git a/src/eko/io/manipulate.py b/src/eko/io/manipulate.py index 15173e800..9b32da337 100644 --- a/src/eko/io/manipulate.py +++ b/src/eko/io/manipulate.py @@ -1,12 +1,14 @@ """Manipulate output generate by EKO.""" import logging import warnings -from typing import Optional +from typing import Callable, Optional, Union import numpy as np +import numpy.typing as npt from .. import basis_rotation as br from .. import interpolation +from ..interpolation import XGrid from .struct import EKO logger = logging.getLogger(__name__) @@ -16,24 +18,58 @@ SIMGRID_ROTATION = "ij,ajbk,kl->aibl" """Simultaneous grid rotation contraction indices.""" +Basis = Union[XGrid, npt.NDArray] + + +def rotation(new: Optional[Basis], old: Basis, check: Callable, compute: Callable): + """Define grid rotation. + + This function returns the new grid to be assigned and the rotation computed, + if the checks for a non-trivial new grid are passed. + + However, the check and the computation are delegated respectively to the + callables `check` and `compute`. + + """ + if new is None: + return old, None + + if check(new, old): + warnings.warn("The new grid is close to the current one") + return old, None + + return new, compute(new, old) + + +def xgrid_check(new: Optional[XGrid], old: XGrid): + """Check validity of new xgrid.""" + return new is not None and len(new) == len(old) and np.allclose(new.raw, old.raw) + + +def xgrid_compute_rotation(new: XGrid, old: XGrid, interpdeg: int, swap: bool = False): + """Compute rotation from old to new xgrid. + + By default, the roation is computed for a target xgrid. Whether the function + should be used for an input xgrid, the `swap` argument should be set to + `True`, in order to compute it in the other direction (i.e. the transposed). + + """ + if swap: + new, old = old, new + b = interpolation.InterpolatorDispatcher(old, interpdeg, False) + return b.get_interpolation(new.raw) + def xgrid_reshape( eko: EKO, - targetgrid: Optional[interpolation.XGrid] = None, - inputgrid: Optional[interpolation.XGrid] = None, + targetgrid: Optional[XGrid] = None, + inputgrid: Optional[XGrid] = None, ): """Reinterpolate operators on output and/or input grids. - The operation is in-place. + Target corresponds to the output PDF. - Parameters - ---------- - eko : - the operator to be rotated - targetgrid : - xgrid for the target (output PDF) - inputgrid : - xgrid for the input (input PDF) + The operation is in-place. """ eko.assert_permissions(write=True) @@ -41,77 +77,61 @@ def xgrid_reshape( # calling with no arguments is an error if targetgrid is None and inputgrid is None: raise ValueError("Nor inputgrid nor targetgrid was given") - # now check to the current status - if ( - targetgrid is not None - and len(targetgrid) == len(eko.rotations.targetgrid) - and np.allclose(targetgrid.raw, eko.rotations.targetgrid.raw) - ): - targetgrid = None - warnings.warn("The new targetgrid is close to the current targetgrid") - if ( - inputgrid is not None - and len(inputgrid) == len(eko.rotations.inputgrid) - and np.allclose(inputgrid.raw, eko.rotations.inputgrid.raw) - ): - inputgrid = None - warnings.warn("The new inputgrid is close to the current inputgrid") + + interpdeg = eko.operator_card.configs.interpolation_polynomial_degree + check = xgrid_check + crot = xgrid_compute_rotation + + # construct matrices + newtarget, targetrot = rotation( + targetgrid, + eko.rotations.targetgrid, + check, + lambda new, old: crot(new, old, interpdeg), + ) + newinput, inputrot = rotation( + inputgrid, + eko.rotations.inputgrid, + check, + lambda new, old: crot(new, old, interpdeg, swap=True), + ) + # after the checks: if there is still nothing to do, skip - if targetgrid is None and inputgrid is None: + if targetrot is None and inputrot is None: logger.debug("Nothing done.") return - - # construct matrices - if targetgrid is not None: - b = interpolation.InterpolatorDispatcher( - eko.rotations.targetgrid, - eko.operator_card.configs.interpolation_polynomial_degree, - False, - ) - target_rot = b.get_interpolation(targetgrid.raw) - eko.rotations.targetgrid = targetgrid - if inputgrid is not None: - b = interpolation.InterpolatorDispatcher( - inputgrid, - eko.operator_card.configs.interpolation_polynomial_degree, - False, - ) - input_rot = b.get_interpolation(eko.rotations.inputgrid.raw) - eko.rotations.inputgrid = inputgrid + # if no rotation is done, the grids are not modified + if targetrot is not None: + eko.rotations.targetgrid = newtarget + if targetrot is not None: + eko.rotations.targetgrid = newinput # build new grid - for q2, elem in eko.items(): - ops = elem.operator - errs = elem.error - if targetgrid is not None and inputgrid is None: - ops = np.einsum(TARGETGRID_ROTATION, target_rot, ops, optimize="optimal") - errs = ( - np.einsum(TARGETGRID_ROTATION, target_rot, errs, optimize="optimal") - if errs is not None - else None - ) - elif inputgrid is not None and targetgrid is None: - ops = np.einsum(INPUTGRID_ROTATION, ops, input_rot, optimize="optimal") - errs = ( - np.einsum(INPUTGRID_ROTATION, errs, input_rot, optimize="optimal") - if errs is not None - else None - ) + for ep, elem in eko.items(): + assert elem is not None + + operands = [elem.operator] + operands_errs = [elem.error] + + if targetrot is not None and inputrot is None: + contraction = TARGETGRID_ROTATION + elif inputrot is not None and targetrot is None: + contraction = INPUTGRID_ROTATION else: - ops = np.einsum( - SIMGRID_ROTATION, target_rot, ops, input_rot, optimize="optimal" - ) - errs = ( - np.einsum( - SIMGRID_ROTATION, target_rot, errs, input_rot, optimize="optimal" - ) - if errs is not None - else None - ) - elem.operator = ops - elem.error = errs + contraction = SIMGRID_ROTATION - eko[q2] = elem + if targetrot is not None: + operands.insert(0, targetrot) + operands_errs.insert(0, targetrot) + if inputrot is not None: + operands.append(inputrot) + operands_errs.append(inputrot) + + elem.operator = np.einsum(contraction, *operands, optimize="optimal") + if elem.error is not None: + elem.error = np.einsum(contraction, *operands_errs, optimize="optimal") + + eko[ep] = elem eko.update() @@ -124,8 +144,8 @@ def xgrid_reshape( def flavor_reshape( eko: EKO, - targetpids: Optional[np.ndarray] = None, - inputpids: Optional[np.ndarray] = None, + targetpids: Optional[npt.NDArray] = None, + inputpids: Optional[npt.NDArray] = None, update: bool = True, ): """Change the operators to have in the output targetpids and/or in the input inputpids. diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 9dbaff131..ca364e22d 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -773,8 +773,8 @@ def items(self): immediately after """ - for ep, op in self._operators.items(): - yield ep, op + for ep in self._operators: + yield ep, self[ep] del self[ep] def __contains__(self, q2: float) -> bool: @@ -1015,7 +1015,7 @@ def raw(self) -> dict: operators themselves """ - return dict(mu2grid=self.mu2grid.tolist(), metadata=self.metadata.raw) + return dict(mu2grid=self.mu2grid, metadata=self.metadata.raw) @dataclass diff --git a/tests/conftest.py b/tests/conftest.py index ce91ee8cb..2e54dc171 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,7 @@ from eko import interpolation from eko.io.runcards import OperatorCard, TheoryCard from eko.io.struct import EKO, Operator +from eko.io.types import EvolutionPoint from ekobox import cards @@ -80,7 +81,7 @@ def __init__(self, theory: TheoryCard, operator: OperatorCard, path: os.PathLike self.cache: Optional[EKO] = None @staticmethod - def _operators(mugrid: Iterable[float], shape: Tuple[int, int]): + def _operators(mugrid: Iterable[EvolutionPoint], shape: Tuple[int, int]): ops = {} for mu in mugrid: ops[mu] = Operator(np.random.rand(*shape, *shape)) @@ -94,7 +95,7 @@ def _create(self): lx = len(self.operator.xgrid) lpids = len(self.operator.pids) for mu2, op in self._operators( - mugrid=self.operator.mu2grid, shape=(lpids, lx) + mugrid=self.operator.evolgrid, shape=(lpids, lx) ).items(): self.cache[mu2] = op From 213b4dba6bf9bfeccfaad6d6201fab3d2026a8d2 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 25 Apr 2023 19:49:19 +0200 Subject: [PATCH 024/154] Fix newly introduced copy-paste bug --- src/eko/io/manipulate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/eko/io/manipulate.py b/src/eko/io/manipulate.py index 9b32da337..f06e90e32 100644 --- a/src/eko/io/manipulate.py +++ b/src/eko/io/manipulate.py @@ -103,8 +103,8 @@ def xgrid_reshape( # if no rotation is done, the grids are not modified if targetrot is not None: eko.rotations.targetgrid = newtarget - if targetrot is not None: - eko.rotations.targetgrid = newinput + if inputrot is not None: + eko.rotations.inputgrid = newinput # build new grid for ep, elem in eko.items(): From ea986e6ba528fb9a46545fb9d3991466e818874b Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 25 Apr 2023 19:53:14 +0200 Subject: [PATCH 025/154] Complete manipulation tests fix --- tests/eko/io/test_init.py | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/tests/eko/io/test_init.py b/tests/eko/io/test_init.py index 55ec7ec28..a4b9cfa74 100644 --- a/tests/eko/io/test_init.py +++ b/tests/eko/io/test_init.py @@ -120,12 +120,13 @@ def test_flavor_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): xg = interpolation.XGrid(np.geomspace(1e-5, 1.0, 21)) muout = 10.0 mu2out = muout**2 + epout = (mu2out, 5) eko_factory.operator.xgrid = xg eko_factory.operator.mugrid = [(muout, 5)] o1 = eko_factory.get() lpids = len(o1.rotations.pids) lx = len(xg) - o1[mu2out] = eko.io.Operator( + o1[epout] = eko.io.Operator( operator=eko_identity([1, lpids, lx, lpids, lx])[0], error=None, ) @@ -138,15 +139,15 @@ def test_flavor_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): with EKO.edit(tpath) as ot: manipulate.flavor_reshape(ot, target_r) chk_keys(o1.raw, ot.raw) - assert ot[mu2out].operator.shape == (lpids, len(xg), lpids, len(xg)) + assert ot[epout].operator.shape == (lpids, len(xg), lpids, len(xg)) ot.deepcopy(ttpath) with EKO.edit(ttpath) as ott: manipulate.flavor_reshape(ott, np.linalg.inv(target_r)) - np.testing.assert_allclose(ott[mu2out].operator, o1[mu2out].operator) + np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) with pytest.warns(Warning): manipulate.flavor_reshape(ott, np.eye(lpids)) chk_keys(o1.raw, ott.raw) - np.testing.assert_allclose(ott[mu2out].operator, o1[mu2out].operator) + np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) # only input input_r = np.eye(lpids) @@ -157,15 +158,15 @@ def test_flavor_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): with EKO.edit(ipath) as oi: manipulate.flavor_reshape(oi, inputpids=input_r) chk_keys(o1.raw, oi.raw) - assert oi[mu2out].operator.shape == (lpids, len(xg), lpids, len(xg)) + assert oi[epout].operator.shape == (lpids, len(xg), lpids, len(xg)) oi.deepcopy(iipath) with EKO.edit(iipath) as oii: manipulate.flavor_reshape(oii, inputpids=np.linalg.inv(input_r)) - np.testing.assert_allclose(oii[mu2out].operator, o1[mu2out].operator) + np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) with pytest.warns(Warning): manipulate.flavor_reshape(oii, inputpids=np.eye(lpids)) chk_keys(o1.raw, oii.raw) - np.testing.assert_allclose(oii[mu2out].operator, o1[mu2out].operator) + np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) # both itpath = tmp_path / "oit.tar" @@ -174,7 +175,7 @@ def test_flavor_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): manipulate.flavor_reshape(oit, target_r, input_r) chk_keys(o1.raw, oit.raw) op = eko_identity([1, lpids, len(xg), lpids, len(xg)]).copy() - np.testing.assert_allclose(oit[mu2out].operator, op[0], atol=1e-10) + np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) # error fpath = tmp_path / "fail.tar" o1.deepcopy(fpath) @@ -186,8 +187,10 @@ def test_to_evol(self, eko_factory: EKOFactory, tmp_path): xgrid = interpolation.XGrid([0.5, 1.0]) mu_out = 2.0 mu2_out = mu_out**2 + nfout = 4 + epout = (mu2_out, nfout) eko_factory.operator.mu0 = float(np.sqrt(1.0)) - eko_factory.operator.mugrid = [(mu_out, 4)] + eko_factory.operator.mugrid = [(mu_out, nfout)] eko_factory.operator.xgrid = xgrid eko_factory.operator.configs.interpolation_polynomial_degree = 1 eko_factory.operator.configs.interpolation_is_log = False @@ -221,9 +224,7 @@ def test_to_evol(self, eko_factory: EKOFactory, tmp_path): ) # rotate also target manipulate.to_evol(o01, False, True) - np.testing.assert_allclose( - o01[mu2_out].operator, o11[mu2_out].operator - ) + np.testing.assert_allclose(o01[epout].operator, o11[epout].operator) chk_keys(o00.raw, o01.raw) # check the target rotated one np.testing.assert_allclose( @@ -234,7 +235,5 @@ def test_to_evol(self, eko_factory: EKOFactory, tmp_path): ) # rotate also input manipulate.to_evol(o10) - np.testing.assert_allclose( - o10[mu2_out].operator, o11[mu2_out].operator - ) + np.testing.assert_allclose(o10[epout].operator, o11[epout].operator) chk_keys(o00.raw, o10.raw) From 3b5c390d0650b2eb821520625639a65f2a8f1d14 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 25 Apr 2023 20:10:51 +0200 Subject: [PATCH 026/154] Upgrade struct tests to evolution points --- tests/eko/io/test_struct.py | 93 +++++++++++++++++++------------------ 1 file changed, 49 insertions(+), 44 deletions(-) diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index 67b4d3347..e8c97f67d 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -114,13 +114,14 @@ def test_load_error(self, tmp_path): def test_properties(self, eko_factory: EKOFactory): mu = 10.0 - mugrid = [(mu, 5)] + nf = 5 + mugrid = [(mu, nf)] eko_factory.operator.mugrid = mugrid eko = eko_factory.get() assert hasattr(eko.theory_card.heavy, "masses") assert hasattr(eko.operator_card, "debug") np.testing.assert_allclose(eko.mu2grid, [mu**2]) - assert mu**2 in eko + assert (mu**2, nf) in eko default_grid = eko.operator_card.xgrid assert eko.xgrid == default_grid xg = interpolation.XGrid([0.1, 1.0]) @@ -137,59 +138,63 @@ def test_properties(self, eko_factory: EKOFactory): def test_ops(self, eko_factory: EKOFactory): mu = 10.0 mu2 = mu**2 - mugrid = [(mu, 5)] + nf = 5 + ep = (mu2, nf) + mugrid = [(mu, nf)] eko_factory.operator.mugrid = mugrid eko = eko_factory.get() v = np.random.rand(2, 2) opv = struct.Operator(operator=v) # try setting not an operator with pytest.raises(ValueError): - eko[mu2] = "bla" + eko[ep] = "bla" # approx - eko[mu2] = opv - assert eko.approx(2 * mu2) is None - assert eko.approx(mu2 + 1.0, atol=2) == mu2 - eko[mu2 + 1.0] = opv + eko[ep] = opv + assert eko.approx((2 * mu2, nf)) is None + assert eko.approx((mu2 + 1.0, nf), atol=2) == mu2 + eko[(mu2 + 1.0, nf)] = opv with pytest.raises(ValueError): - eko.approx(mu2 + 0.5, atol=2) + eko.approx((mu2 + 0.5, nf), atol=2) # iterate - for q2, q2eko in zip((mu2, mu2 + 1.0), eko): - assert q2 == q2eko - np.testing.assert_allclose(v, eko[q2].operator) - for q2, (q2eko, op) in zip((mu2, mu2 + 1.0), eko.items()): - assert q2 == q2eko + for mu2_, ep in zip((mu2, mu2 + 1.0), eko): + assert mu2_ == ep[0] + np.testing.assert_allclose(v, eko[(mu2, nf)].operator) + for mu2_, (mu2eko, op) in zip((mu2, mu2 + 1.0), eko.items()): + assert mu2_ == mu2eko[0] np.testing.assert_allclose(v, op.operator) # getter with pytest.raises(ValueError): - eko[mu2 + 2.0] - with eko.operator(mu2) as op: + eko[mu2 + 2.0, nf] + with eko.operator(ep) as op: np.testing.assert_allclose(v, op.operator) # overwrite vv = np.random.rand(2, 2) opvv = struct.Operator(operator=vv) - eko[mu2 + 1.0] = opvv - np.testing.assert_allclose(vv, eko[mu2 + 1.0].operator) + eko[mu2 + 1.0, nf] = opvv + np.testing.assert_allclose(vv, eko[mu2 + 1.0, nf].operator) def test_copy(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): mu = 10.0 mu2 = mu**2 - mugrid = [(mu, 5)] + nf = 5 + ep = (mu2, nf) + mugrid = [(mu, nf)] eko_factory.operator.mugrid = mugrid eko1 = eko_factory.get() v = np.random.rand(2, 2) opv = struct.Operator(operator=v) - eko1[mu2] = opv - np.testing.assert_allclose(eko1[mu2].operator, v) + eko1[ep] = opv + np.testing.assert_allclose(eko1[ep].operator, v) p = tmp_path / "eko2.tar" eko1.deepcopy(p) with EKO.edit(p) as eko2: - np.testing.assert_allclose(eko1[mu2].operator, v) - np.testing.assert_allclose(eko2[mu2].operator, v) + np.testing.assert_allclose(eko1[ep].operator, v) + np.testing.assert_allclose(eko2[ep].operator, v) vv = np.random.rand(2, 2) opvv = struct.Operator(operator=vv) - eko2[mu2] = opvv - np.testing.assert_allclose(eko1[mu2].operator, v) - np.testing.assert_allclose(eko2[mu2].operator, vv) + eko2[ep] = opvv + np.testing.assert_allclose(eko1[ep].operator, v) + np.testing.assert_allclose(eko2[ep].operator, vv) # dump does not happen before closing, unless explicitly called, and # without a dump the path would be empty eko2.dump() @@ -205,39 +210,39 @@ def test_items(self, eko_factory: EKOFactory): eko = eko_factory.get() v = np.random.rand(2, 2) opv = struct.Operator(operator=v) - for mu2 in eko.operator_card.mu2grid: - eko[mu2] = opv + for ep in eko.operator_card.evolgrid: + eko[ep] = opv - mu2 = next(iter(eko.mu2grid)) + ep = next(iter(eko)) # unload - eko._operators[mu2] = None + eko._operators[ep] = None # test autoloading - assert isinstance(eko[mu2], struct.Operator) - assert isinstance(eko._operators[mu2], struct.Operator) + assert isinstance(eko[ep], struct.Operator) + assert isinstance(eko._operators[ep], struct.Operator) - del eko[mu2] + del eko[ep] - assert eko._operators[mu2] is None + assert eko._operators[ep] is None - def test_iter(self, eko_factory): + def test_iter(self, eko_factory: EKOFactory): """Test managed iteration.""" eko_factory.operator.mugrid = [(3.0, 4), (20.0, 5), (300.0, 6)] eko = eko_factory.get() - mu2prev = None - for mu2, op in eko.items(): - if mu2prev is not None: - assert eko._operators[mu2prev] is None + epprev = None + for ep, op in eko.items(): + if epprev is not None: + assert eko._operators[epprev] is None assert isinstance(op, struct.Operator) - mu2prev = mu2 + epprev = ep - def test_context_operator(self, eko_factory): + def test_context_operator(self, eko_factory: EKOFactory): """Test automated handling through context.""" eko = eko_factory.get() - mu2 = eko.mu2grid[0] + ep = next(iter(eko)) - with eko.operator(mu2) as op: + with eko.operator(ep) as op: assert isinstance(op, struct.Operator) - assert eko._operators[mu2] is None + assert eko._operators[ep] is None From 73d038b91f30a40dfa9b1edee381cb7177f13a0b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 26 Apr 2023 12:49:25 +0200 Subject: [PATCH 027/154] Introduce eko.epgrid --- src/eko/io/struct.py | 5 +++++ tests/ekobox/test_apply.py | 14 +++++++------- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index ca364e22d..115480a3b 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -627,6 +627,11 @@ def mu2grid(self) -> List[SquaredScale]: """Provide the list of :math:`Q^2` as an array.""" return [mu2 for mu2, _ in self._operators] + @property + def epgrid(self) -> List[EPoint]: + """Provide the list of evolution points as an array.""" + return list(self._operators.keys()) + @property def theory_card(self): """Provide theory card, retrieving from the dump.""" diff --git a/tests/ekobox/test_apply.py b/tests/ekobox/test_apply.py index 8e980818f..2232d1a4f 100644 --- a/tests/ekobox/test_apply.py +++ b/tests/ekobox/test_apply.py @@ -7,22 +7,22 @@ class TestApply: def test_apply(self, eko_factory: EKOFactory, fake_pdf): eko = eko_factory.get() - mu2_out = eko.mu2grid[0] + ep_out = eko.epgrid[0] # fake pdfs pdf_grid = apply.apply_pdf(eko, fake_pdf) - assert len(pdf_grid) == len(eko.mu2grid) - pdfs = pdf_grid[mu2_out]["pdfs"] + assert len(pdf_grid) == len(eko.epgrid) + pdfs = pdf_grid[ep_out]["pdfs"] assert list(pdfs.keys()) == list(eko.rotations.targetpids) # rotate to target_grid target_grid = [0.75] pdf_grid = apply.apply_pdf(eko, fake_pdf, target_grid) assert len(pdf_grid) == 1 - pdfs = pdf_grid[mu2_out]["pdfs"] + pdfs = pdf_grid[ep_out]["pdfs"] assert list(pdfs.keys()) == list(eko.rotations.targetpids) def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): eko = eko_factory.get() - mu2_out = eko.mu2grid[0] + ep_out = eko.epgrid[0] # fake pdfs monkeypatch.setattr( "eko.basis_rotation.rotate_flavor_to_evolution", np.ones((14, 14)) @@ -36,6 +36,6 @@ def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): ) monkeypatch.setattr("eko.basis_rotation.evol_basis", fake_evol_basis) pdf_grid = apply.apply_pdf(eko, fake_pdf, rotate_to_evolution_basis=True) - assert len(pdf_grid) == len(eko.mu2grid) - pdfs = pdf_grid[mu2_out]["pdfs"] + assert len(pdf_grid) == len(eko.epgrid) + pdfs = pdf_grid[ep_out]["pdfs"] assert list(pdfs.keys()) == list(fake_evol_basis) From 8dcad8fec288b5e0c41e4e3ecaaa2049a5f045f4 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 26 Apr 2023 14:00:24 +0200 Subject: [PATCH 028/154] Adjust eko.evolgrid --- src/eko/io/struct.py | 4 ++-- tests/ekobox/test_apply.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 115480a3b..b639a45ca 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -628,9 +628,9 @@ def mu2grid(self) -> List[SquaredScale]: return [mu2 for mu2, _ in self._operators] @property - def epgrid(self) -> List[EPoint]: + def evolgrid(self) -> List[EPoint]: """Provide the list of evolution points as an array.""" - return list(self._operators.keys()) + return list(self) @property def theory_card(self): diff --git a/tests/ekobox/test_apply.py b/tests/ekobox/test_apply.py index 2232d1a4f..8c5817f06 100644 --- a/tests/ekobox/test_apply.py +++ b/tests/ekobox/test_apply.py @@ -7,10 +7,10 @@ class TestApply: def test_apply(self, eko_factory: EKOFactory, fake_pdf): eko = eko_factory.get() - ep_out = eko.epgrid[0] + ep_out = eko.evolgrid[0] # fake pdfs pdf_grid = apply.apply_pdf(eko, fake_pdf) - assert len(pdf_grid) == len(eko.epgrid) + assert len(pdf_grid) == len(eko.evolgrid) pdfs = pdf_grid[ep_out]["pdfs"] assert list(pdfs.keys()) == list(eko.rotations.targetpids) # rotate to target_grid @@ -22,7 +22,7 @@ def test_apply(self, eko_factory: EKOFactory, fake_pdf): def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): eko = eko_factory.get() - ep_out = eko.epgrid[0] + ep_out = eko.evolgrid[0] # fake pdfs monkeypatch.setattr( "eko.basis_rotation.rotate_flavor_to_evolution", np.ones((14, 14)) @@ -36,6 +36,6 @@ def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): ) monkeypatch.setattr("eko.basis_rotation.evol_basis", fake_evol_basis) pdf_grid = apply.apply_pdf(eko, fake_pdf, rotate_to_evolution_basis=True) - assert len(pdf_grid) == len(eko.epgrid) + assert len(pdf_grid) == len(eko.evolgrid) pdfs = pdf_grid[ep_out]["pdfs"] assert list(pdfs.keys()) == list(fake_evol_basis) From 2308cf201d78a8ce0953abc21f6c66a6a5e6bb9d Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Wed, 26 Apr 2023 15:07:40 +0200 Subject: [PATCH 029/154] Update src/eko/matchings.py Co-authored-by: Felix Hekhorn --- src/eko/matchings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/matchings.py b/src/eko/matchings.py index 114a60fbb..d03a5143a 100644 --- a/src/eko/matchings.py +++ b/src/eko/matchings.py @@ -75,7 +75,7 @@ def normalize(self, target: EPoint) -> EPoint: def __str__(self): """Textual representation, mainly for logging purpose.""" walls = " - ".join([f"{w:.2e}" for w in self.walls]) - return f"ThresholdsAtlas [{walls}], ref={self.origin[0]} @ {self.origin[1]}" + return f"Atlas [{walls}], ref={self.origin[0]} @ {self.origin[1]}" @classmethod def ffns(cls, nf: int, mu2: SquaredScale): From be108637629c50e84c4390839d73f345bc8045fe Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Wed, 26 Apr 2023 17:03:15 +0200 Subject: [PATCH 030/154] Keep old and new solve side by side --- src/eko/evolution_operator/grid.py | 3 ++- src/eko/runner/__init__.py | 14 ++++++++++++-- src/eko/runner/commons.py | 2 +- src/eko/runner/legacy.py | 8 ++++---- src/eko/runner/recipes.py | 5 ++--- 5 files changed, 21 insertions(+), 11 deletions(-) diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index 9532593df..d1637efa4 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -7,6 +7,7 @@ import logging import numbers +from dataclasses import astuple from typing import List import numpy as np @@ -150,7 +151,7 @@ def get_threshold_operators(self, path): is_downward = is_downward_path(path) shift = flavor_shift(is_downward) for seg in path[:-1]: - new_op_key = seg.tuple + new_op_key = astuple(seg) thr_config = self.managers["thresholds_config"] kthr = thr_config.thresholds_ratios[seg.nf - shift] ome = OperatorMatrixElement( diff --git a/src/eko/runner/__init__.py b/src/eko/runner/__init__.py index 0d05183f5..feba847eb 100644 --- a/src/eko/runner/__init__.py +++ b/src/eko/runner/__init__.py @@ -6,7 +6,7 @@ from ..io import runcards from ..io.runcards import OperatorCard, TheoryCard from ..io.types import RawCard -from . import managed +from . import legacy, managed # TODO: drop this altogether, replacing just with managed.solve @@ -38,7 +38,17 @@ def solve( """ # TODO: drop this - # legacy.Runner(theory_card, operators_card, path).compute() + legacy.Runner(theory_card, operators_card, path).compute() + +def solve_jets( + theory_card: Union[RawCard, TheoryCard], + operators_card: Union[RawCard, OperatorCard], + path: os.PathLike, +): + """Implement solve for new output.""" new_theory, new_operator = runcards.update(theory_card, operators_card) managed.solve(new_theory, new_operator, Path(path)) + + +# solve = solve_jets diff --git a/src/eko/runner/commons.py b/src/eko/runner/commons.py index 9635fd19e..c27b4c8a2 100644 --- a/src/eko/runner/commons.py +++ b/src/eko/runner/commons.py @@ -27,7 +27,7 @@ def interpolator(operator: OperatorCard) -> InterpolatorDispatcher: ) -def threshold_atlas(theory: TheoryCard, operator: OperatorCard) -> Atlas: +def atlas(theory: TheoryCard, operator: OperatorCard) -> Atlas: """Create thresholds atlas from runcards.""" # TODO: cache result masses = runcards.masses(theory, operator.configs.evolution_method) diff --git a/src/eko/runner/legacy.py b/src/eko/runner/legacy.py index de6988e7d..dd5c462bb 100644 --- a/src/eko/runner/legacy.py +++ b/src/eko/runner/legacy.py @@ -54,7 +54,7 @@ def __init__( # call explicitly iter to explain the static analyzer that is an # iterable - tc = commons.threshold_atlas(new_theory, new_operator) + tc = commons.atlas(new_theory, new_operator) # strong coupling cs = commons.couplings(new_theory, new_operator) # setup operator grid @@ -63,7 +63,7 @@ def __init__( masses = runcards.masses(new_theory, new_operator.configs.evolution_method) self.op_grid = OperatorGrid( - mu2grid=new_operator.mu2grid, + mu2grid=new_operator.evolgrid, order=new_theory.order, masses=masses, mass_scheme=new_theory.heavy.masses_scheme.value, @@ -94,5 +94,5 @@ def compute(self): """ with EKO.edit(self.path) as eko: # add all operators - for final_scale, op in self.op_grid.compute().items(): - eko[float(final_scale)] = Operator.from_dict(op) + for ep, op in self.op_grid.compute().items(): + eko[ep] = Operator.from_dict(op) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index 55bc0daea..a7e62b825 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -3,7 +3,6 @@ from dataclasses import dataclass from .. import EKO -from .. import scale_variations as sv from ..io.dictlike import DictLike from ..io.types import SquaredScale from . import commons @@ -34,12 +33,12 @@ class MatchingRecipe(Recipe): def create(eko: EKO): """Create all associated recipes.""" - tc = commons.threshold_atlas(eko.theory_card, eko.operator_card) + atlas = commons.atlas(eko.theory_card, eko.operator_card) terminal = [] for ep in eko: # expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded # mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 - blocks = tc.path(ep) + blocks = atlas.path(ep) terminal.append(blocks.pop()) From 2e96815893e32252ba3c719dbcc544322c824eda Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Wed, 26 Apr 2023 17:38:04 +0200 Subject: [PATCH 031/154] Introduce cliff operators --- src/eko/runner/parts.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index e1b5a77a4..5ac0fbc71 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -1,9 +1,8 @@ """Operator components.""" from abc import ABC from dataclasses import dataclass -from typing import Optional -import numpy.typing as npt +from eko.io.struct import Operator from ..io.dictlike import DictLike from ..io.types import SquaredScale @@ -20,22 +19,41 @@ class Part(DictLike, ABC): """ - operator: npt.NDArray - error: Optional[npt.NDArray] + operator: Operator @dataclass class Evolution(Part): """Evolution in a fixed number of flavors.""" - final: bool - """Whether the operator is the terminal segment of evolution. + cliff: bool + """Whether the operator is reaching a matching scale. + + Cliff operators are the only ones allowed to be intermediate, even though + they can also be final segments of an evolution path (see + :meth:`eko.matchings.Atlas.path`). - If it is not final, then the operator is an intermediate one. Intermediate ones always have final scales :attr:`mu2` corresponding to matching scales, and initial scales :attr:`mu20` corresponding to either matching scales or the global initial scale of the EKO. + Note + ---- + + The name of *cliff* operators stems from the following diagram:: + + nf = 3 -------------------------------------------------------- + | + nf = 4 -------------------------------------------------------- + | + nf = 5 -------------------------------------------------------- + | + nf = 6 -------------------------------------------------------- + + where each lane corresponds to DGLAP evolution with the relative number of + running flavors, and the vertical bridges are the perturbative matchings + between two different "adjacent" schemes. + """ mu20: SquaredScale """Initial scale.""" From cb018dd7b9319c0874e5ca88a10255c3652c9217 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 27 Apr 2023 14:31:56 +0200 Subject: [PATCH 032/154] Start reorganizing inventories --- src/eko/io/inventory.py | 0 src/eko/io/items.py | 104 ++++++++++++++++++++++++++++++++++++++++ src/eko/io/struct.py | 101 +------------------------------------- 3 files changed, 106 insertions(+), 99 deletions(-) create mode 100644 src/eko/io/inventory.py create mode 100644 src/eko/io/items.py diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/eko/io/items.py b/src/eko/io/items.py new file mode 100644 index 000000000..ea862e939 --- /dev/null +++ b/src/eko/io/items.py @@ -0,0 +1,104 @@ +import io +from dataclasses import dataclass +from typing import BinaryIO, Optional + +import lz4.frame +import numpy as np +import numpy.lib.npyio as npyio +import numpy.typing as npt + +from . import exceptions + + +@dataclass +class Recipe: + matching: bool + + +@dataclass +class RecipeMatching: + matching: bool + + +@dataclass +class Part: + matching: bool + + +@dataclass +class PartMatching: + matching: bool + + +@dataclass +class Operator: + """Operator representation. + + To be used to hold the result of a computed 4-dim operator (from a given + scale to another given one). + + Note + ---- + IO works with streams in memory, in order to avoid intermediate write on + disk (keep read from and write to tar file only). + + """ + + operator: npt.NDArray + """Content of the evolution operator.""" + error: Optional[npt.NDArray] = None + """Errors on individual operator elements (mainly used for integration + error, but it can host any kind of error). + """ + + def save(self, stream: BinaryIO) -> bool: + """Save content of operator to bytes. + + The content is saved on a `stream`, in order to be able to perform the + operation both on disk and in memory. + + The returned value tells whether the operator saved contained or not + the error (this control even the format, ``npz`` with errors, ``npy`` + otherwise). + + """ + aux = io.BytesIO() + if self.error is None: + np.save(aux, self.operator) + else: + np.savez(aux, operator=self.operator, error=self.error) + aux.seek(0) + + # compress if requested + content = lz4.frame.compress(aux.read()) + + # write compressed data + stream.write(content) + stream.seek(0) + + # return the type of array dumped (i.e. 'npy' or 'npz') + return self.error is None + + @classmethod + def load(cls, stream: BinaryIO): + """Load operator from bytes. + + An input `stream` is used to load the operator from, in order to + support the operation both on disk and in memory. + + """ + extracted_stream = io.BytesIO(lz4.frame.decompress(stream.read())) + content = np.load(extracted_stream) + + if isinstance(content, np.ndarray): + op = content + err = None + elif isinstance(content, npyio.NpzFile): + op = content["operator"] + err = content["error"] + else: + raise exceptions.OperatorLoadingError( + "Not possible to load operator, content format not recognized" + ) + + return cls(operator=op, error=err) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index b639a45ca..458b0acb1 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -2,7 +2,6 @@ import base64 import contextlib import copy -import io import logging import os import pathlib @@ -10,11 +9,9 @@ import tarfile import tempfile from dataclasses import dataclass, fields -from typing import BinaryIO, Dict, List, Optional +from typing import Dict, List, Optional -import lz4.frame import numpy as np -import numpy.lib.npyio as npyio import numpy.typing as npt import yaml @@ -23,6 +20,7 @@ from .. import version as vmod from . import exceptions, raw from .dictlike import DictLike +from .items import Operator from .runcards import OperatorCard, TheoryCard from .types import EvolutionPoint as EPoint from .types import SquaredScale @@ -39,101 +37,6 @@ COMPRESSED_SUFFIX = ".lz4" -@dataclass -class Operator(DictLike): - """Operator representation. - - To be used to hold the result of a computed 4-dim operator (from a given - scale to another given one). - - Note - ---- - IO works with streams in memory, in order to avoid intermediate write on - disk (keep read from and write to tar file only). - - """ - - operator: npt.NDArray - """Content of the evolution operator.""" - error: Optional[npt.NDArray] = None - """Errors on individual operator elements (mainly used for integration - error, but it can host any kind of error). - """ - - def save(self, stream: BinaryIO, compress: bool = True) -> bool: - """Save content of operator to bytes. - - Parameters - ---------- - stream : BinaryIO - a stream where to save the operator content (in order to be able to - perform the operation both on disk and in memory) - compress : bool - flag to control optional compression (default: `True`) - - Returns - ------- - bool - whether the operator saved contained or not the error (this control - even the format, ``npz`` with errors, ``npy`` otherwise) - - """ - aux = io.BytesIO() - if self.error is None: - np.save(aux, self.operator) - else: - np.savez(aux, operator=self.operator, error=self.error) - aux.seek(0) - - # compress if requested - if compress: - content = lz4.frame.compress(aux.read()) - else: - content = aux.read() - - # write compressed data - stream.write(content) - stream.seek(0) - - # return the type of array dumped (i.e. 'npy' or 'npz') - return self.error is None - - @classmethod - def load(cls, stream: BinaryIO, compressed: bool = True): - """Load operator from bytes. - - Parameters - ---------- - stream : BinaryIO - a stream to load the operator from (to support the operation both on - disk and in memory) - compressed : bool - declare whether the stream is or is not compressed (default: `True`) - - Returns - ------- - Operator - the loaded instance - - """ - if compressed: - stream = io.BytesIO(lz4.frame.decompress(stream.read())) - content = np.load(stream) - - if isinstance(content, np.ndarray): - op = content - err = None - elif isinstance(content, npyio.NpzFile): - op = content["operator"] - err = content["error"] - else: - raise exceptions.OperatorLoadingError( - "Not possible to load operator, content format not recognized" - ) - - return cls(operator=op, error=err) - - @dataclass class InternalPaths: """Paths inside an EKO folder. From 7f3ea5ebab4cf61ac0aefd69e6e18f876a2acbf7 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 13:01:29 +0200 Subject: [PATCH 033/154] Extract inventory implementation --- src/eko/io/access.py | 63 ++++++++++++++++++ src/eko/io/inventory.py | 138 ++++++++++++++++++++++++++++++++++++++++ src/eko/io/items.py | 60 +++++++++++------ src/eko/io/struct.py | 66 ------------------- 4 files changed, 242 insertions(+), 85 deletions(-) create mode 100644 src/eko/io/access.py diff --git a/src/eko/io/access.py b/src/eko/io/access.py new file mode 100644 index 000000000..d5a9d70af --- /dev/null +++ b/src/eko/io/access.py @@ -0,0 +1,63 @@ +"""Manage file system resources access.""" + +from dataclasses import dataclass +from pathlib import Path +from typing import Optional + +from . import exceptions + + +@dataclass +class AccessConfigs: + """Configurations specified during opening of an EKO.""" + + path: Path + """The path to the permanent object.""" + readonly: bool + "Read-only flag" + open: bool + "EKO status" + + def assert_open(self): + """Assert operator is open. + + Raises + ------ + exceptions.ClosedOperator + if operator is closed + + """ + if not self.open: + raise exceptions.ClosedOperator + + def assert_writeable(self, msg: Optional[str] = None): + """Assert operator is writeable. + + Raises + ------ + exceptions.ClosedOperator + see :meth:`assert_open` + exceptions.ReadOnlyOperator + if operators has been declared read-only + + """ + if msg is None: + msg = "" + + self.assert_open() + if self.readonly: + raise exceptions.ReadOnlyOperator(msg) + + @property + def read(self): + """Check reading permission. + + Reading access is always granted on open operator. + + """ + return self.open + + @property + def write(self): + """Check writing permission.""" + return self.open and not self.readonly diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index e69de29bb..8d8e2fc4b 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -0,0 +1,138 @@ +"""Manage assets used during computation.""" +import base64 +from dataclasses import asdict, dataclass +from pathlib import Path +from typing import Dict, Optional + +import yaml + +from .access import AccessConfigs +from .items import Header, Operator + +NBYTES = 8 +ENDIANNES = "little" + +HEADER_EXT = ".yaml" +OPERATOR_EXT = [".npy", ".npz"] + + +class LookupError(ValueError): + """Failure in content retrieval from inventory.""" + + +def encode(header: Header): + """Extract an hash from a header.""" + return base64.urlsafe_b64encode( + hash(header).to_bytes(NBYTES, byteorder=ENDIANNES) + ).decode(encoding="utf-8") + + +def header_name(header: Header): + """Determine header file name.""" + stem = encode(header) + return stem + HEADER_EXT + + +def operator_name(header: Header, err: bool): + """Determine operator file name, from the associated header.""" + stem = encode(header) + return stem + OPERATOR_EXT[1 if err else 0] + + +@dataclass(frozen=True) +class Inventory: + """Assets manager. + + In particular, manaage autosave, autoload, and memory caching. + + """ + + access: AccessConfigs + cache: Dict[Header, Optional[Operator]] = {} + contentful: bool = True + + def lookup(self, stem: str) -> Path: + """Look up for content path in inventory.""" + found = [ + path + for path in self.access.path.iterdir() + if path.name.startswith(stem) + if path.suffix in OPERATOR_EXT + ] + + if len(found) == 0: + raise LookupError(f"Target value '{stem}' not available.") + elif len(found) > 1: + raise LookupError(f"Too many operators associated to '{stem}':\n{found}") + + return found[0] + + def __getitem__(self, header: Header) -> Optional[Operator]: + r"""Retrieve operator for given header. + + If the operator is not already in memory, it will be automatically + loaded. + + """ + self.access.assert_open() + + if header in self.cache: + op = self.cache[header] + if op is not None or not self.contentful: + return op + + stem = encode(header) + oppath = self.lookup(stem) + + with open(oppath, "rb") as fd: + op = Operator.load(fd) + + self.cache[header] = op + return op + + def __setitem__(self, header: Header, operator: Optional[Operator]): + """Set operator for given :math:`mu^2`. + + Header and operator are automatically dumped on disk. + + """ + self.access.assert_writeable() + + if not self.contentful: + self.cache[header] = None + + assert operator is not None + + headpath = self.access.path / header_name(header) + with_err = operator.error is not None + oppath = self.access.path / operator_name(header, err=with_err) + + headpath.write_text(yaml.dump(asdict(header))) + with open(oppath, "wb") as fd: + operator.save(fd) + + self.cache[header] = operator + + def __delitem__(self, header: Header): + """Drop operator from memory. + + Irrelevant for contentless inventories. + + Note + ---- + This method only drops the operator from memory, and it's not expected + to do anything else. + + Autosave is done on set, and explicit saves are performed by the + computation functions. + + If a further explicit save is required, repeat explicit assignment:: + + inventory[header] = inventory[header] + + This is only useful if the operator has been mutated in place, that in + general should be avoided, since the operator should only be the result + of a full computation or a library manipulation. + + """ + self.cache[header] = None diff --git a/src/eko/io/items.py b/src/eko/io/items.py index ea862e939..67a77ba24 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -1,3 +1,4 @@ +"""Inventory items definition.""" import io from dataclasses import dataclass from typing import BinaryIO, Optional @@ -10,27 +11,12 @@ from . import exceptions -@dataclass -class Recipe: - matching: bool - - -@dataclass -class RecipeMatching: - matching: bool +@dataclass(frozen=True) +class Header: + """Item header, containing metadata.""" -@dataclass -class Part: - matching: bool - - -@dataclass -class PartMatching: - matching: bool - - -@dataclass +@dataclass(frozen=True) class Operator: """Operator representation. @@ -102,3 +88,39 @@ def load(cls, stream: BinaryIO): ) return cls(operator=op, error=err) + + +@dataclass(frozen=True) +class Recipe(Header): + """Information to compute an evolution operator.""" + + matching: bool + + +@dataclass(frozen=True) +class RecipeMatching(Header): + """Information to compute a matching operator.""" + + matching: bool + + +@dataclass(frozen=True) +class Part(Header): + """Evolution operator.""" + + matching: bool + + +@dataclass(frozen=True) +class PartMatching(Header): + """Matching operator.""" + + matching: bool + + +@dataclass(frozen=True) +class Item: + """Inventory item.""" + + header: Header + content: Operator diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 458b0acb1..6a0c58fb0 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -204,62 +204,6 @@ def grid(self) -> List[EPoint]: ) -@dataclass -class AccessConfigs: - """Configurations specified during opening of an EKO.""" - - path: pathlib.Path - """The path to the permanent object.""" - readonly: bool - "Read-only flag" - open: bool - "EKO status" - - def assert_open(self): - """Assert operator is open. - - Raises - ------ - exceptions.ClosedOperator - if operator is closed - - """ - if not self.open: - raise exceptions.ClosedOperator - - def assert_writeable(self, msg: Optional[str] = None): - """Assert operator is writeable. - - Raises - ------ - exceptions.ClosedOperator - see :meth:`assert_open` - exceptions.ReadOnlyOperator - if operators has been declared read-only - - """ - if msg is None: - msg = "" - - self.assert_open() - if self.readonly: - raise exceptions.ReadOnlyOperator(msg) - - @property - def read(self): - """Check reading permission. - - Reading access is always granted on open operator. - - """ - return self.open - - @property - def write(self): - """Check writing permission.""" - return self.open and not self.readonly - - @dataclass class Rotations(DictLike): """Rotations related configurations. @@ -577,16 +521,6 @@ def __getitem__(self, ep: EPoint) -> Operator: If the operator is not already in memory, it will be automatically loaded. - Parameters - ---------- - mu2 : float - :math:`\mu^2` value labeling the operator to be retrieved - - Returns - ------- - Operator - the retrieved operator - """ self.access.assert_open() From 99b66b24920c32a0bee24c226a3ab26073550fce Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 13:30:24 +0200 Subject: [PATCH 034/154] Split struct in many modules --- src/eko/io/access.py | 64 +++-- src/eko/io/bases.py | 117 +++++++++ src/eko/io/builder.py | 102 ++++++++ src/eko/io/exceptions.py | 32 --- src/eko/io/inventory.py | 6 +- src/eko/io/metadata.py | 103 ++++++++ src/eko/io/paths.py | 74 ++++++ src/eko/io/struct.py | 542 +-------------------------------------- 8 files changed, 460 insertions(+), 580 deletions(-) create mode 100644 src/eko/io/bases.py create mode 100644 src/eko/io/builder.py create mode 100644 src/eko/io/metadata.py create mode 100644 src/eko/io/paths.py diff --git a/src/eko/io/access.py b/src/eko/io/access.py index d5a9d70af..b8dec50a8 100644 --- a/src/eko/io/access.py +++ b/src/eko/io/access.py @@ -7,6 +7,38 @@ from . import exceptions +class ReadOnlyOperator(RuntimeError, exceptions.OutputError): + """It is not possible to write on a read-only operator. + + In particular, the behavior would be deceitful, since writing is possible + in-memory and even on the temporary folder. + But eventually, no writing will happen on a persistent archive, so any + modification is lost after exiting the program. + + """ + + +class ClosedOperator(RuntimeError, exceptions.OutputError): + """It is not possible to write on nor to read from a closed operator. + + This is milder issue than :class:`ReadOnlyOperator`, since in this case not + even the writing on the temporary folder would be possible. + + Instead, it will look like you can access some properties, but the operator + is actually closed, so it should not be used any longer in general. + However, for extremely simple properties, like those available in memory + from :class:`eko.io.struct.Metadata` or :class:`eko.io.struct.AccessConfigs`, there + is no need to raise on read, since those properties are actually available, + but they should always raise on writing, since there is no persistence for + the content written, and it can be deceitful. + + Still, the level of protection will be mild, since a thoruough protection + would clutter a lot the code, requiring a lot of maintenance. + "We are adult here". + + """ + + @dataclass class AccessConfigs: """Configurations specified during opening of an EKO.""" @@ -18,6 +50,20 @@ class AccessConfigs: open: bool "EKO status" + @property + def read(self): + """Check reading permission. + + Reading access is always granted on open operator. + + """ + return self.open + + @property + def write(self): + """Check writing permission.""" + return self.open and not self.readonly + def assert_open(self): """Assert operator is open. @@ -28,7 +74,7 @@ def assert_open(self): """ if not self.open: - raise exceptions.ClosedOperator + raise ClosedOperator def assert_writeable(self, msg: Optional[str] = None): """Assert operator is writeable. @@ -46,18 +92,4 @@ def assert_writeable(self, msg: Optional[str] = None): self.assert_open() if self.readonly: - raise exceptions.ReadOnlyOperator(msg) - - @property - def read(self): - """Check reading permission. - - Reading access is always granted on open operator. - - """ - return self.open - - @property - def write(self): - """Check writing permission.""" - return self.open and not self.readonly + raise ReadOnlyOperator(msg) diff --git a/src/eko/io/bases.py b/src/eko/io/bases.py new file mode 100644 index 000000000..97a28dc36 --- /dev/null +++ b/src/eko/io/bases.py @@ -0,0 +1,117 @@ +"""Operators bases.""" +from dataclasses import dataclass, fields +from typing import Optional + +import numpy as np +import numpy.typing as npt + +from .. import basis_rotation as br +from .. import interpolation +from .dictlike import DictLike + + +@dataclass +class Bases(DictLike): + """Rotations related configurations. + + Here "Rotation" is intended in a broad sense: it includes both rotations in + flavor space (labeled with suffix `pids`) and in :math:`x`-space (labeled + with suffix `grid`). + Rotations in :math:`x`-space correspond to reinterpolate the result on a + different basis of polynomials. + + """ + + xgrid: interpolation.XGrid + """Internal momentum fraction grid.""" + _targetgrid: Optional[interpolation.XGrid] = None + _inputgrid: Optional[interpolation.XGrid] = None + _targetpids: Optional[npt.NDArray] = None + _inputpids: Optional[npt.NDArray] = None + + def __post_init__(self): + """Adjust types when loaded from serialized object.""" + for attr in ("xgrid", "_inputgrid", "_targetgrid"): + value = getattr(self, attr) + if value is None: + continue + if isinstance(value, (np.ndarray, list)): + setattr(self, attr, interpolation.XGrid(value)) + elif not isinstance(value, interpolation.XGrid): + setattr(self, attr, interpolation.XGrid.load(value)) + + @property + def pids(self): + """Internal flavor basis, used for computation.""" + return np.array(br.flavor_basis_pids) + + @property + def inputpids(self) -> npt.NDArray: + """Provide pids expected on the input PDF.""" + if self._inputpids is None: + return self.pids + return self._inputpids + + @inputpids.setter + def inputpids(self, value): + self._inputpids = value + + @property + def targetpids(self) -> npt.NDArray: + """Provide pids corresponding to the output PDF.""" + if self._targetpids is None: + return self.pids + return self._targetpids + + @targetpids.setter + def targetpids(self, value): + self._targetpids = value + + @property + def inputgrid(self) -> interpolation.XGrid: + """Provide :math:`x`-grid expected on the input PDF.""" + if self._inputgrid is None: + return self.xgrid + return self._inputgrid + + @inputgrid.setter + def inputgrid(self, value: interpolation.XGrid): + self._inputgrid = value + + @property + def targetgrid(self) -> interpolation.XGrid: + """Provide :math:`x`-grid corresponding to the output PDF.""" + if self._targetgrid is None: + return self.xgrid + return self._targetgrid + + @targetgrid.setter + def targetgrid(self, value: interpolation.XGrid): + self._targetgrid = value + + @classmethod + def from_dict(cls, dictionary: dict): + """Deserialize rotation. + + Load from full state, but with public names. + + """ + d = dictionary.copy() + for f in fields(cls): + if f.name.startswith("_"): + d[f.name] = d.pop(f.name[1:]) + return cls._from_dict(d) + + @property + def raw(self): + """Serialize rotation. + + Pass through interfaces, access internal values but with a public name. + + """ + d = self._raw() + for key in d.copy(): + if key.startswith("_"): + d[key[1:]] = d.pop(key) + + return d diff --git a/src/eko/io/builder.py b/src/eko/io/builder.py new file mode 100644 index 000000000..55cb7c9e9 --- /dev/null +++ b/src/eko/io/builder.py @@ -0,0 +1,102 @@ +"""Build an EKO from scratch.""" +import pathlib +from dataclasses import dataclass +from typing import Optional + +from . import exceptions +from .access import AccessConfigs +from .runcards import OperatorCard, TheoryCard +from .struct import EKO, InternalPaths, Metadata, Rotations + + +@dataclass +class Builder: + """Build EKO instances.""" + + path: pathlib.Path + """Path on disk to .""" + access: AccessConfigs + """Access related configurations.""" + + # optional arguments, required at build time + theory: Optional[TheoryCard] = None + operator: Optional[OperatorCard] = None + + eko: Optional[EKO] = None + + def __post_init__(self): + """Validate paths.""" + if self.access.path.suffix != ".tar": + raise exceptions.OutputNotTar(self.access.path) + if self.access.path.exists(): + raise exceptions.OutputExistsError(self.access.path) + + def load_cards(self, theory: TheoryCard, operator: OperatorCard): + """Load both theory and operator card.""" + self.theory = theory + self.operator = operator + + return self + + def build(self) -> EKO: + """Build EKO instance. + + Returns + ------- + EKO + the constructed instance + + Raises + ------ + RuntimeError + if not enough information is available (at least one card missing) + + """ + missing = [] + for card in ["theory", "operator"]: + if getattr(self, card) is None: + missing.append(card) + + if len(missing) > 0: + raise RuntimeError( + f"Can not build an EKO, since following cards are missing: {missing}" + ) + + # tell the static analyzer as well + assert self.theory is not None + assert self.operator is not None + + self.access.open = True + metadata = Metadata( + _path=self.path, + mu20=(self.operator.mu20, self.theory.heavy.num_flavs_init), + rotations=Rotations(xgrid=self.operator.xgrid), + ) + InternalPaths(self.path).bootstrap( + theory=self.theory.raw, + operator=self.operator.raw, + metadata=metadata.raw, + ) + + self.eko = EKO( + _operators={ep: None for ep in self.operator.evolgrid}, + metadata=metadata, + access=self.access, + ) + + return self.eko + + def __enter__(self): + """Allow Builder to be used in :obj:`with` statements.""" + return self + + def __exit__(self, exc_type: type, _exc_value, _traceback): + """Ensure EKO to be closed properly.""" + if exc_type is not None: + return + + # assign to variable to help type checker, otherwise self.eko might be + # a property, and its type can change at every evaluation + eko = self.eko + if eko is not None: + eko.close() diff --git a/src/eko/io/exceptions.py b/src/eko/io/exceptions.py index 5411c7401..b924f8057 100644 --- a/src/eko/io/exceptions.py +++ b/src/eko/io/exceptions.py @@ -24,35 +24,3 @@ class OperatorLocationError(ValueError, OutputError): def __init__(self, path: os.PathLike): self.path = path super().__init__(f"Path '{path}' not in operators folder") - - -class ReadOnlyOperator(RuntimeError, OutputError): - """It is not possible to write on a read-only operator. - - In particular, the behavior would be deceitful, since writing is possible - in-memory and even on the temporary folder. - But eventually, no writing will happen on a persistent archive, so any - modification is lost after exiting the program. - - """ - - -class ClosedOperator(RuntimeError, OutputError): - """It is not possible to write on nor to read from a closed operator. - - This is milder issue than :class:`ReadOnlyOperator`, since in this case not - even the writing on the temporary folder would be possible. - - Instead, it will look like you can access some properties, but the operator - is actually closed, so it should not be used any longer in general. - However, for extremely simple properties, like those available in memory - from :class:`eko.io.struct.Metadata` or :class:`eko.io.struct.AccessConfigs`, there - is no need to raise on read, since those properties are actually available, - but they should always raise on writing, since there is no persistence for - the content written, and it can be deceitful. - - Still, the level of protection will be mild, since a thoruough protection - would clutter a lot the code, requiring a lot of maintenance. - "We are adult here". - - """ diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 8d8e2fc4b..9cc7fea88 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -13,7 +13,9 @@ ENDIANNES = "little" HEADER_EXT = ".yaml" -OPERATOR_EXT = [".npy", ".npz"] +ARRAY_EXT = [".npy", ".npz"] +COMPRESSED_EXT = ".lz4" +OPERATOR_EXT = [ext + COMPRESSED_EXT for ext in ARRAY_EXT] class LookupError(ValueError): @@ -57,7 +59,7 @@ def lookup(self, stem: str) -> Path: path for path in self.access.path.iterdir() if path.name.startswith(stem) - if path.suffix in OPERATOR_EXT + if "".join(path.suffixes) in OPERATOR_EXT ] if len(found) == 0: diff --git a/src/eko/io/metadata.py b/src/eko/io/metadata.py new file mode 100644 index 000000000..415b2dfa5 --- /dev/null +++ b/src/eko/io/metadata.py @@ -0,0 +1,103 @@ +"""Define `eko.EKO` metadata.""" +import logging +import os +import pathlib +from dataclasses import dataclass +from typing import Optional + +import yaml + +from .. import version as vmod +from .bases import Bases +from .dictlike import DictLike +from .paths import InternalPaths +from .types import EvolutionPoint as EPoint + +logger = logging.getLogger(__name__) + + +@dataclass +class Metadata(DictLike): + """Manage metadata, and keep them synced on disk. + + It is possible to have a metadata view, in which the path is not actually + connected (i.e. it is set to ``None``). In this case, no update will be + possible, of course. + + Note + ---- + Unfortunately, for nested structures it is not possible to detect a change + in their attributes, so a call to :meth:`update` has to be performed + manually. + + """ + + origin: EPoint + """Inital scale.""" + bases: Bases + """Manipulation information, describing the current status of the EKO (e.g. + `inputgrid` and `targetgrid`). + """ + # tagging information + _path: Optional[pathlib.Path] = None + """Path to temporary dir.""" + version: str = vmod.__version__ + """Library version used to create the corresponding file.""" + data_version: int = vmod.__data_version__ + """Specs version, to which the file adheres.""" + + @classmethod + def load(cls, path: os.PathLike): + """Load metadata from open folder. + + Parameters + ---------- + path: os.PathLike + the path to the open EKO folder + + Returns + ------- + bool + loaded metadata + + """ + path = pathlib.Path(path) + content = cls.from_dict( + yaml.safe_load(InternalPaths(path).metadata.read_text(encoding="utf-8")) + ) + content._path = path + return content + + def update(self): + """Update the disk copy of metadata.""" + if self._path is None: + logger.info("Impossible to set metadata, no file attached.") + else: + with open(InternalPaths(self._path).metadata, "w") as fd: + yaml.safe_dump(self.raw, fd) + + @property + def path(self): + """Access temporary dir path. + + Raises + ------ + RuntimeError + if path has not been initialized before + + """ + if self._path is None: + raise RuntimeError( + "Access to EKO directory attempted, but not dir has been set." + ) + return self._path + + @path.setter + def path(self, value: pathlib.Path): + """Set temporary dir path.""" + self._path = value + + @property + def raw(self): + """Override default :meth:`DictLike.raw` representation to exclude path.""" + return self.public_raw diff --git a/src/eko/io/paths.py b/src/eko/io/paths.py new file mode 100644 index 000000000..44b9821a2 --- /dev/null +++ b/src/eko/io/paths.py @@ -0,0 +1,74 @@ +"""Define paths inside an `eko.EKO` object.""" +import pathlib +from dataclasses import dataclass + +import yaml + +THEORYFILE = "theory.yaml" +OPERATORFILE = "operator.yaml" +METADATAFILE = "metadata.yaml" +RECIPESDIR = "recipes" +PARTSDIR = "parts" +OPERATORSDIR = "operators" + + +@dataclass +class InternalPaths: + """Paths inside an EKO folder. + + This structure exists to locate in a single place the internal structure of + an EKO folder. + + The only value required is the root path, everything else is computed + relative to this root. + In case only the relative paths are required, just create this structure + with :attr:`root` equal to emtpty string or ``"."``. + + """ + + root: pathlib.Path + "The root of the EKO folder (use placeholder if not relevant)" + + @property + def metadata(self): + """Metadata file.""" + return self.root / METADATAFILE + + @property + def recipes(self): + """Recipes folder.""" + return self.root / RECIPESDIR + + @property + def parts(self): + """Parts folder.""" + return self.root / PARTSDIR + + @property + def operators(self): + """Operators folder. + + This is the one containing the actual EKO components, after + computation has been performed. + + """ + return self.root / OPERATORSDIR + + @property + def theory_card(self): + """Theory card dump.""" + return self.root / THEORYFILE + + @property + def operator_card(self): + """Operator card dump.""" + return self.root / OPERATORFILE + + def bootstrap(self, theory: dict, operator: dict, metadata: dict): + """Create directory structure.""" + self.metadata.write_text(yaml.dump(metadata), encoding="utf-8") + self.theory_card.write_text(yaml.dump(theory), encoding="utf-8") + self.operator_card.write_text(yaml.dump(operator), encoding="utf-8") + self.recipes.mkdir() + self.parts.mkdir() + self.operators.mkdir() diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 6a0c58fb0..70b03fb76 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -1,5 +1,4 @@ """Define output representation structures.""" -import base64 import contextlib import copy import logging @@ -8,395 +7,26 @@ import shutil import tarfile import tempfile -from dataclasses import dataclass, fields +from dataclasses import dataclass from typing import Dict, List, Optional import numpy as np -import numpy.typing as npt import yaml -from .. import basis_rotation as br from .. import interpolation -from .. import version as vmod from . import exceptions, raw -from .dictlike import DictLike +from .access import AccessConfigs +from .bases import Bases +from .builder import Builder from .items import Operator +from .metadata import Metadata +from .paths import InternalPaths from .runcards import OperatorCard, TheoryCard from .types import EvolutionPoint as EPoint from .types import SquaredScale logger = logging.getLogger(__name__) -THEORYFILE = "theory.yaml" -OPERATORFILE = "operator.yaml" -METADATAFILE = "metadata.yaml" -RECIPESDIR = "recipes" -PARTSDIR = "parts" -OPERATORSDIR = "operators" - -COMPRESSED_SUFFIX = ".lz4" - - -@dataclass -class InternalPaths: - """Paths inside an EKO folder. - - This structure exists to locate in a single place the internal structure of - an EKO folder. - - The only value required is the root path, everything else is computed - relative to this root. - In case only the relative paths are required, just create this structure - with :attr:`root` equal to emtpty string or ``"."``. - - """ - - root: pathlib.Path - "The root of the EKO folder (use placeholder if not relevant)" - - @property - def metadata(self): - """Metadata file.""" - return self.root / METADATAFILE - - @property - def recipes(self): - """Recipes folder.""" - return self.root / RECIPESDIR - - @property - def parts(self): - """Parts folder.""" - return self.root / PARTSDIR - - @property - def operators(self): - """Operators folder. - - This is the one containing the actual EKO components, after - computation has been performed. - - """ - return self.root / OPERATORSDIR - - @property - def theory_card(self): - """Theory card dump.""" - return self.root / THEORYFILE - - @property - def operator_card(self): - """Operator card dump.""" - return self.root / OPERATORFILE - - def bootstrap(self, theory: dict, operator: dict, metadata: dict): - """Create directory structure. - - Parameters - ---------- - dirpath : os.PathLike - path to create the directory into - theory : dict - theory card to be dumped - operator : dict - operator card to be dumped - metadata : dict - metadata of the operator - - """ - self.metadata.write_text(yaml.dump(metadata), encoding="utf-8") - self.theory_card.write_text(yaml.dump(theory), encoding="utf-8") - self.operator_card.write_text(yaml.dump(operator), encoding="utf-8") - self.recipes.mkdir() - self.parts.mkdir() - self.operators.mkdir() - - @staticmethod - def opname(ep: EPoint) -> str: - r"""Operator file name from :math:`(\mu^2, n_f)` value.""" - mu2 = ep[0] - nf = ep[1] - decoded = np.float64(mu2).tobytes() - mu2bytes = base64.urlsafe_b64encode(decoded).decode() - return f"{mu2bytes}_{nf}" - - def oppath(self, ep: EPoint) -> pathlib.Path: - r"""Retrieve operator file path from :math:`\mu^2` value. - - This method looks for an existing path matching. - - Raises - ------ - ValueError - if the path is not found, or more than one are matching the - specified value of :math:`\mu2` - - """ - name = self.opname(ep) - oppaths = list( - filter(lambda path: path.name.startswith(name), self.operators.iterdir()) - ) - - if len(oppaths) == 0: - raise ValueError(f"Target value '{ep}' not available.") - elif len(oppaths) > 1: - raise ValueError(f"Too many operators associated to '{ep}':\n{oppaths}") - - return oppaths[0] - - def opcompressed(self, path: os.PathLike) -> bool: - """Check if the operator at the path specified is compressed. - - Raises - ------ - OperatorLocationError - if the path is not inside the operators folder - - """ - path = pathlib.Path(path) - if self.operators not in path.parents: - raise exceptions.OperatorLocationError(path) - - return path.suffix == COMPRESSED_SUFFIX - - def optarget(self, path: os.PathLike) -> EPoint: - r"""Extract :math:`\mu2` value from operator path. - - Raises - ------ - OperatorLocationError - if the path is not inside the operators folder - - """ - path = pathlib.Path(path) - if self.operators not in path.parents: - raise exceptions.OperatorLocationError(path) - - name = path.stem.split(".")[0] - encoded, nfseg = name.split("_") - decoded = base64.urlsafe_b64decode(encoded) - mu2 = float(np.frombuffer(decoded, dtype=np.float64)[0]) - nf = int(nfseg) - return (mu2, nf) - - def opnewpath( - self, ep: EPoint, compress: bool = True, without_err: bool = True - ) -> pathlib.Path: - r"""Compute the path associated to :math:`(\mu^2, n_f)` value.""" - suffix = ".npy" if without_err else ".npz" - if compress: - suffix += COMPRESSED_SUFFIX - return self.operators / (self.opname(ep) + suffix) - - @property - def grid(self) -> List[EPoint]: - r"""Provide the array of :math:`(\mu^2, n_f)` values of existing operators.""" - if self.root is None: - raise RuntimeError() - - return list( - sorted( - map( - lambda p: self.optarget(p), - InternalPaths(self.root).operators.iterdir(), - ) - ) - ) - - -@dataclass -class Rotations(DictLike): - """Rotations related configurations. - - Here "Rotation" is intended in a broad sense: it includes both rotations in - flavor space (labeled with suffix `pids`) and in :math:`x`-space (labeled - with suffix `grid`). - Rotations in :math:`x`-space correspond to reinterpolate the result on a - different basis of polynomials. - - """ - - xgrid: interpolation.XGrid - """Internal momentum fraction grid.""" - _targetgrid: Optional[interpolation.XGrid] = None - _inputgrid: Optional[interpolation.XGrid] = None - _targetpids: Optional[npt.NDArray] = None - _inputpids: Optional[npt.NDArray] = None - - def __post_init__(self): - """Adjust types when loaded from serialized object.""" - for attr in ("xgrid", "_inputgrid", "_targetgrid"): - value = getattr(self, attr) - if value is None: - continue - if isinstance(value, (np.ndarray, list)): - setattr(self, attr, interpolation.XGrid(value)) - elif not isinstance(value, interpolation.XGrid): - setattr(self, attr, interpolation.XGrid.load(value)) - - @property - def pids(self): - """Internal flavor basis, used for computation.""" - return np.array(br.flavor_basis_pids) - - @property - def inputpids(self) -> npt.NDArray: - """Provide pids expected on the input PDF.""" - if self._inputpids is None: - return self.pids - return self._inputpids - - @inputpids.setter - def inputpids(self, value): - self._inputpids = value - - @property - def targetpids(self) -> npt.NDArray: - """Provide pids corresponding to the output PDF.""" - if self._targetpids is None: - return self.pids - return self._targetpids - - @targetpids.setter - def targetpids(self, value): - self._targetpids = value - - @property - def inputgrid(self) -> interpolation.XGrid: - """Provide :math:`x`-grid expected on the input PDF.""" - if self._inputgrid is None: - return self.xgrid - return self._inputgrid - - @inputgrid.setter - def inputgrid(self, value: interpolation.XGrid): - self._inputgrid = value - - @property - def targetgrid(self) -> interpolation.XGrid: - """Provide :math:`x`-grid corresponding to the output PDF.""" - if self._targetgrid is None: - return self.xgrid - return self._targetgrid - - @targetgrid.setter - def targetgrid(self, value: interpolation.XGrid): - self._targetgrid = value - - @classmethod - def from_dict(cls, dictionary: dict): - """Deserialize rotation. - - Load from full state, but with public names. - - """ - d = dictionary.copy() - for f in fields(cls): - if f.name.startswith("_"): - d[f.name] = d.pop(f.name[1:]) - return cls._from_dict(d) - - @property - def raw(self): - """Serialize rotation. - - Pass through interfaces, access internal values but with a public name. - - """ - d = self._raw() - for key in d.copy(): - if key.startswith("_"): - d[key[1:]] = d.pop(key) - - return d - - -@dataclass -class Metadata(DictLike): - """Manage metadata, and keep them synced on disk. - - It is possible to have a metadata view, in which the path is not actually - connected (i.e. it is set to ``None``). In this case, no update will be - possible, of course. - - Note - ---- - Unfortunately, for nested structures it is not possible to detect a change - in their attributes, so a call to :meth:`update` has to be performed - manually. - - """ - - mu20: EPoint - """Inital scale.""" - rotations: Rotations - """Manipulation information, describing the current status of the EKO (e.g. - `inputgrid` and `targetgrid`). - """ - # tagging information - _path: Optional[pathlib.Path] = None - """Path to temporary dir.""" - version: str = vmod.__version__ - """Library version used to create the corresponding file.""" - data_version: int = vmod.__data_version__ - """Specs version, to which the file adheres.""" - - @classmethod - def load(cls, path: os.PathLike): - """Load metadata from open folder. - - Parameters - ---------- - path: os.PathLike - the path to the open EKO folder - - Returns - ------- - bool - loaded metadata - - """ - path = pathlib.Path(path) - content = cls.from_dict( - yaml.safe_load(InternalPaths(path).metadata.read_text(encoding="utf-8")) - ) - content._path = path - return content - - def update(self): - """Update the disk copy of metadata.""" - if self._path is None: - logger.info("Impossible to set metadata, no file attached.") - else: - with open(InternalPaths(self._path).metadata, "w") as fd: - yaml.safe_dump(self.raw, fd) - - @property - def path(self): - """Access temporary dir path. - - Raises - ------ - RuntimeError - if path has not been initialized before - - """ - if self._path is None: - raise RuntimeError( - "Access to EKO directory attempted, but not dir has been set." - ) - return self._path - - @path.setter - def path(self, value: pathlib.Path): - """Set temporary dir path.""" - self._path = value - - @property - def raw(self): - """Override default :meth:`DictLike.raw` representation to exclude path.""" - return self.public_raw - @dataclass class EKO: @@ -449,7 +79,7 @@ def paths(self) -> InternalPaths: return InternalPaths(self.metadata.path) @property - def rotations(self) -> Rotations: + def rotations(self) -> Bases: """Rotations information.""" return self.metadata.rotations @@ -515,70 +145,15 @@ def permissions(self): # operator management # ------------------- - def __getitem__(self, ep: EPoint) -> Operator: - r"""Retrieve operator for given :math:`\mu^2`. - - If the operator is not already in memory, it will be automatically - loaded. - - """ - self.access.assert_open() - - if ep in self._operators: - op = self._operators[ep] - if op is not None: - return op - - oppath = self.paths.oppath(ep) - compressed = self.paths.opcompressed(oppath) - - with open(oppath, "rb") as fd: - op = Operator.load(fd, compressed=compressed) - - self._operators[ep] = op - return op + def __getitem__(self, ep: EPoint) -> Optional[Operator]: + r"""Retrieve operator for given evolution point.""" + return None def __setitem__(self, ep: EPoint, op: Operator, compress: bool = True): - """Set operator for given :math:`mu^2`. - - The operator is automatically dumped on disk. - - If ``compress``, the output array is compressed. - - """ - self.access.assert_writeable() - - if not isinstance(op, Operator): - raise ValueError("Only an Operator can be added to an EKO") - - without_err = op.error is None - oppath = self.paths.opnewpath(ep, compress=compress, without_err=without_err) - - with open(oppath, "wb") as fd: - without_err2 = op.save(fd, compress) - assert without_err == without_err2 - - self._operators[ep] = op + """Set operator associated to an evolution point.""" def __delitem__(self, ep: EPoint): - """Drop operator from memory. - - This method only drops the operator from memory, and it's not expected - to do anything else. - - Autosave is done on set, and explicit saves are performed by the - computation functions. - - If a further explicit save is required, repeat explicit assignment:: - - eko[ep] = eko[ep] - - This is only useful if the operator has been mutated in place, that in - general should be avoided, since the operator should only be the result - of a full computation or a library manipulation. - - """ - self._operators[ep] = None + """Drop operator from memory.""" @contextlib.contextmanager def operator(self, ep: EPoint): @@ -858,96 +433,3 @@ def raw(self) -> dict: """ return dict(mu2grid=self.mu2grid, metadata=self.metadata.raw) - - -@dataclass -class Builder: - """Build EKO instances.""" - - path: pathlib.Path - """Path on disk to .""" - access: AccessConfigs - """Access related configurations.""" - - # optional arguments, required at build time - theory: Optional[TheoryCard] = None - operator: Optional[OperatorCard] = None - - eko: Optional[EKO] = None - - def __post_init__(self): - """Validate paths.""" - if self.access.path.suffix != ".tar": - raise exceptions.OutputNotTar(self.access.path) - if self.access.path.exists(): - raise exceptions.OutputExistsError(self.access.path) - - def load_cards(self, theory: TheoryCard, operator: OperatorCard): - """Load both theory and operator card.""" - self.theory = theory - self.operator = operator - - return self - - def build(self) -> EKO: - """Build EKO instance. - - Returns - ------- - EKO - the constructed instance - - Raises - ------ - RuntimeError - if not enough information is available (at least one card missing) - - """ - missing = [] - for card in ["theory", "operator"]: - if getattr(self, card) is None: - missing.append(card) - - if len(missing) > 0: - raise RuntimeError( - f"Can not build an EKO, since following cards are missing: {missing}" - ) - - # tell the static analyzer as well - assert self.theory is not None - assert self.operator is not None - - self.access.open = True - metadata = Metadata( - _path=self.path, - mu20=(self.operator.mu20, self.theory.heavy.num_flavs_init), - rotations=Rotations(xgrid=self.operator.xgrid), - ) - InternalPaths(self.path).bootstrap( - theory=self.theory.raw, - operator=self.operator.raw, - metadata=metadata.raw, - ) - - self.eko = EKO( - _operators={ep: None for ep in self.operator.evolgrid}, - metadata=metadata, - access=self.access, - ) - - return self.eko - - def __enter__(self): - """Allow Builder to be used in :obj:`with` statements.""" - return self - - def __exit__(self, exc_type: type, _exc_value, _traceback): - """Ensure EKO to be closed properly.""" - if exc_type is not None: - return - - # assign to variable to help type checker, otherwise self.eko might be - # a property, and its type can change at every evaluation - eko = self.eko - if eko is not None: - eko.close() From c684026dfb5509428ffe1fbf6bee1fe198be8f79 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 13:30:54 +0200 Subject: [PATCH 035/154] Update src/eko/io/inventory.py Co-authored-by: Felix Hekhorn --- src/eko/io/inventory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 9cc7fea88..e3689c11c 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -45,7 +45,7 @@ def operator_name(header: Header, err: bool): class Inventory: """Assets manager. - In particular, manaage autosave, autoload, and memory caching. + In particular, manage autosave, autoload, and memory caching. """ From 405a84b58c2bc5f5686fe5ae76b9ef462818bdca Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 19:10:57 +0200 Subject: [PATCH 036/154] Simplify headers, drop old equivalent --- src/eko/io/items.py | 101 +++++++++++++++++++++++++++----------- src/eko/runner/compute.py | 8 --- src/eko/runner/parts.py | 72 +++------------------------ src/eko/runner/recipes.py | 28 ----------- 4 files changed, 78 insertions(+), 131 deletions(-) delete mode 100644 src/eko/runner/compute.py diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 67a77ba24..6c2d2afed 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -1,6 +1,6 @@ """Inventory items definition.""" import io -from dataclasses import dataclass +from dataclasses import asdict, dataclass from typing import BinaryIO, Optional import lz4.frame @@ -8,7 +8,10 @@ import numpy.lib.npyio as npyio import numpy.typing as npt +from eko import matchings + from . import exceptions +from .types import FlavorIndex, FlavorsNumber, SquaredScale @dataclass(frozen=True) @@ -16,6 +19,74 @@ class Header: """Item header, containing metadata.""" +@dataclass(frozen=True) +class Evolution(Header): + """Information to compute an evolution operator. + + Describe evolution with a fixed number of light flavors. + + """ + + origin: SquaredScale + """Starting point.""" + target: SquaredScale + """Final point.""" + nf: FlavorsNumber + """Number of active flavors.""" + + cliff: bool = False + """Whether the operator is reaching a matching scale. + + Cliff operators are the only ones allowed to be intermediate, even though + they can also be final segments of an evolution path (see + :meth:`eko.matchings.Atlas.path`). + + Intermediate ones always have final scales :attr:`mu2` corresponding to + matching scales, and initial scales :attr:`mu20` corresponding to either + matching scales or the global initial scale of the EKO. + + Note + ---- + + The name of *cliff* operators stems from the following diagram:: + + nf = 3 -------------------------------------------------------- + | + nf = 4 -------------------------------------------------------- + | + nf = 5 -------------------------------------------------------- + | + nf = 6 -------------------------------------------------------- + + where each lane corresponds to DGLAP evolution with the relative number of + running flavors, and the vertical bridges are the perturbative matchings + between two different "adjacent" schemes. + + """ + + @classmethod + def from_atlas(cls, segment: matchings.Segment, cliff: bool = False): + """Create instance from analogous Atlas object.""" + return cls(**asdict(segment), cliff=cliff) + + +@dataclass(frozen=True) +class Matching(Header): + """Information to compute a matching operator. + + Describe the matching between two different flavor number schemes. + + """ + + scale: SquaredScale + hq: FlavorIndex + + @classmethod + def from_atlas(cls, matching: matchings.Matching): + """Create instance from analogous Atlas object.""" + return cls(**asdict(matching)) + + @dataclass(frozen=True) class Operator: """Operator representation. @@ -90,34 +161,6 @@ def load(cls, stream: BinaryIO): return cls(operator=op, error=err) -@dataclass(frozen=True) -class Recipe(Header): - """Information to compute an evolution operator.""" - - matching: bool - - -@dataclass(frozen=True) -class RecipeMatching(Header): - """Information to compute a matching operator.""" - - matching: bool - - -@dataclass(frozen=True) -class Part(Header): - """Evolution operator.""" - - matching: bool - - -@dataclass(frozen=True) -class PartMatching(Header): - """Matching operator.""" - - matching: bool - - @dataclass(frozen=True) class Item: """Inventory item.""" diff --git a/src/eko/runner/compute.py b/src/eko/runner/compute.py deleted file mode 100644 index acaabe605..000000000 --- a/src/eko/runner/compute.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Compute parts from recipes.""" -from .parts import Part -from .recipes import Recipe - - -def compute(recipe: Recipe) -> Part: - """Compute EKO component in isolation.""" - return Part("ciao") diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 5ac0fbc71..67b91f5f6 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -1,69 +1,9 @@ -"""Operator components.""" -from abc import ABC -from dataclasses import dataclass +"""Compute operator components.""" +import numpy as np -from eko.io.struct import Operator +from ..io.items import Operator, Recipe -from ..io.dictlike import DictLike -from ..io.types import SquaredScale - -@dataclass -class Part(DictLike, ABC): - """An atomic operator ingredient. - - The operator is always in flavor basis, and on the x-grid specified for - computation, i.e. it is a rank-4 tensor of shape:: - - (flavor basis) x (x-grid) x (flavor basis) x (x-grid) - - """ - - operator: Operator - - -@dataclass -class Evolution(Part): - """Evolution in a fixed number of flavors.""" - - cliff: bool - """Whether the operator is reaching a matching scale. - - Cliff operators are the only ones allowed to be intermediate, even though - they can also be final segments of an evolution path (see - :meth:`eko.matchings.Atlas.path`). - - Intermediate ones always have final scales :attr:`mu2` corresponding to - matching scales, and initial scales :attr:`mu20` corresponding to either - matching scales or the global initial scale of the EKO. - - Note - ---- - - The name of *cliff* operators stems from the following diagram:: - - nf = 3 -------------------------------------------------------- - | - nf = 4 -------------------------------------------------------- - | - nf = 5 -------------------------------------------------------- - | - nf = 6 -------------------------------------------------------- - - where each lane corresponds to DGLAP evolution with the relative number of - running flavors, and the vertical bridges are the perturbative matchings - between two different "adjacent" schemes. - - """ - mu20: SquaredScale - """Initial scale.""" - mu2: SquaredScale - """Final scale.""" - - -@dataclass -class Matching(Part): - """Matching conditions between two different flavor schemes, at a given scale.""" - - mu2: SquaredScale - """Matching scale.""" +def compute(recipe: Recipe) -> Operator: + """Compute EKO component in isolation.""" + return Operator(np.array([])) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index a7e62b825..07275e4d9 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -1,36 +1,8 @@ """Recipes containing instructions for atomic computations.""" -from abc import ABC -from dataclasses import dataclass - from .. import EKO -from ..io.dictlike import DictLike -from ..io.types import SquaredScale from . import commons -@dataclass -class Recipe(DictLike, ABC): - """Base recipe structure.""" - - name: str - - -@dataclass -class EvolutionRecipe(Recipe): - """Recipe compute evolution with a fixed number of light flavors.""" - - final: bool - mu20: SquaredScale - mu2: SquaredScale - - -@dataclass -class MatchingRecipe(Recipe): - """Recipe to compute the matching between two different flavor number schemes.""" - - mu2: SquaredScale - - def create(eko: EKO): """Create all associated recipes.""" atlas = commons.atlas(eko.theory_card, eko.operator_card) From f3c8366635ed02a72416a30696a53706fa106c87 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 19:20:01 +0200 Subject: [PATCH 037/154] Reimport builder in struct, to avoid circular import --- src/eko/io/builder.py | 102 ------------------------------------------ src/eko/io/items.py | 14 ++++++ src/eko/io/struct.py | 94 +++++++++++++++++++++++++++++++++++++- 3 files changed, 107 insertions(+), 103 deletions(-) delete mode 100644 src/eko/io/builder.py diff --git a/src/eko/io/builder.py b/src/eko/io/builder.py deleted file mode 100644 index 55cb7c9e9..000000000 --- a/src/eko/io/builder.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Build an EKO from scratch.""" -import pathlib -from dataclasses import dataclass -from typing import Optional - -from . import exceptions -from .access import AccessConfigs -from .runcards import OperatorCard, TheoryCard -from .struct import EKO, InternalPaths, Metadata, Rotations - - -@dataclass -class Builder: - """Build EKO instances.""" - - path: pathlib.Path - """Path on disk to .""" - access: AccessConfigs - """Access related configurations.""" - - # optional arguments, required at build time - theory: Optional[TheoryCard] = None - operator: Optional[OperatorCard] = None - - eko: Optional[EKO] = None - - def __post_init__(self): - """Validate paths.""" - if self.access.path.suffix != ".tar": - raise exceptions.OutputNotTar(self.access.path) - if self.access.path.exists(): - raise exceptions.OutputExistsError(self.access.path) - - def load_cards(self, theory: TheoryCard, operator: OperatorCard): - """Load both theory and operator card.""" - self.theory = theory - self.operator = operator - - return self - - def build(self) -> EKO: - """Build EKO instance. - - Returns - ------- - EKO - the constructed instance - - Raises - ------ - RuntimeError - if not enough information is available (at least one card missing) - - """ - missing = [] - for card in ["theory", "operator"]: - if getattr(self, card) is None: - missing.append(card) - - if len(missing) > 0: - raise RuntimeError( - f"Can not build an EKO, since following cards are missing: {missing}" - ) - - # tell the static analyzer as well - assert self.theory is not None - assert self.operator is not None - - self.access.open = True - metadata = Metadata( - _path=self.path, - mu20=(self.operator.mu20, self.theory.heavy.num_flavs_init), - rotations=Rotations(xgrid=self.operator.xgrid), - ) - InternalPaths(self.path).bootstrap( - theory=self.theory.raw, - operator=self.operator.raw, - metadata=metadata.raw, - ) - - self.eko = EKO( - _operators={ep: None for ep in self.operator.evolgrid}, - metadata=metadata, - access=self.access, - ) - - return self.eko - - def __enter__(self): - """Allow Builder to be used in :obj:`with` statements.""" - return self - - def __exit__(self, exc_type: type, _exc_value, _traceback): - """Ensure EKO to be closed properly.""" - if exc_type is not None: - return - - # assign to variable to help type checker, otherwise self.eko might be - # a property, and its type can change at every evaluation - eko = self.eko - if eko is not None: - eko.close() diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 6c2d2afed..09fba572a 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -11,6 +11,7 @@ from eko import matchings from . import exceptions +from .types import EvolutionPoint as EPoint from .types import FlavorIndex, FlavorsNumber, SquaredScale @@ -87,6 +88,19 @@ def from_atlas(cls, matching: matchings.Matching): return cls(**asdict(matching)) +@dataclass(frozen=True) +class Target(Header): + """Target evolution point, labeling evolution from origin to there.""" + + scale: SquaredScale + nf: FlavorsNumber + + @classmethod + def from_ep(cls, ep: EPoint): + """Create instance from the :cls:`EPoint` analogue.""" + return cls(*ep) + + @dataclass(frozen=True) class Operator: """Operator representation. diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 70b03fb76..5cb89ad0b 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -17,7 +17,6 @@ from . import exceptions, raw from .access import AccessConfigs from .bases import Bases -from .builder import Builder from .items import Operator from .metadata import Metadata from .paths import InternalPaths @@ -433,3 +432,96 @@ def raw(self) -> dict: """ return dict(mu2grid=self.mu2grid, metadata=self.metadata.raw) + + +@dataclass +class Builder: + """Build EKO instances.""" + + path: pathlib.Path + """Path on disk to .""" + access: AccessConfigs + """Access related configurations.""" + + # optional arguments, required at build time + theory: Optional[TheoryCard] = None + operator: Optional[OperatorCard] = None + + eko: Optional[EKO] = None + + def __post_init__(self): + """Validate paths.""" + if self.access.path.suffix != ".tar": + raise exceptions.OutputNotTar(self.access.path) + if self.access.path.exists(): + raise exceptions.OutputExistsError(self.access.path) + + def load_cards(self, theory: TheoryCard, operator: OperatorCard): + """Load both theory and operator card.""" + self.theory = theory + self.operator = operator + + return self + + def build(self) -> EKO: + """Build EKO instance. + + Returns + ------- + EKO + the constructed instance + + Raises + ------ + RuntimeError + if not enough information is available (at least one card missing) + + """ + missing = [] + for card in ["theory", "operator"]: + if getattr(self, card) is None: + missing.append(card) + + if len(missing) > 0: + raise RuntimeError( + f"Can not build an EKO, since following cards are missing: {missing}" + ) + + # tell the static analyzer as well + assert self.theory is not None + assert self.operator is not None + + self.access.open = True + metadata = Metadata( + _path=self.path, + origin=(self.operator.mu20, self.theory.heavy.num_flavs_init), + bases=Bases(xgrid=self.operator.xgrid), + ) + InternalPaths(self.path).bootstrap( + theory=self.theory.raw, + operator=self.operator.raw, + metadata=metadata.raw, + ) + + self.eko = EKO( + _operators={ep: None for ep in self.operator.evolgrid}, + metadata=metadata, + access=self.access, + ) + + return self.eko + + def __enter__(self): + """Allow Builder to be used in :obj:`with` statements.""" + return self + + def __exit__(self, exc_type: type, _exc_value, _traceback): + """Ensure EKO to be closed properly.""" + if exc_type is not None: + return + + # assign to variable to help type checker, otherwise self.eko might be + # a property, and its type can change at every evaluation + eko = self.eko + if eko is not None: + eko.close() From 1dca8f779f145d9d7400a8971b35a4d916009e32 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 19:38:01 +0200 Subject: [PATCH 038/154] Fix small bugs in classes definitions --- src/eko/io/inventory.py | 4 ++-- src/eko/io/items.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index e3689c11c..7da3d628f 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -1,6 +1,6 @@ """Manage assets used during computation.""" import base64 -from dataclasses import asdict, dataclass +from dataclasses import asdict, dataclass, field from pathlib import Path from typing import Dict, Optional @@ -50,7 +50,7 @@ class Inventory: """ access: AccessConfigs - cache: Dict[Header, Optional[Operator]] = {} + cache: Dict[Header, Optional[Operator]] = field(default_factory=dict) contentful: bool = True def lookup(self, stem: str) -> Path: diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 09fba572a..3d28e860c 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -97,7 +97,7 @@ class Target(Header): @classmethod def from_ep(cls, ep: EPoint): - """Create instance from the :cls:`EPoint` analogue.""" + """Create instance from the :class:`eko.io.types.EvolutionPoint` analogue.""" return cls(*ep) From a932cbef8936849ff2c91a390b8e9589f760b1fa Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 19:40:52 +0200 Subject: [PATCH 039/154] Implement first version of recipe creation --- src/eko/io/items.py | 5 ++++- src/eko/runner/operators.py | 1 + src/eko/runner/parts.py | 4 ++-- src/eko/runner/recipes.py | 21 +++++++++++++++++---- 4 files changed, 24 insertions(+), 7 deletions(-) create mode 100644 src/eko/runner/operators.py diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 3d28e860c..5867d73fa 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -1,7 +1,7 @@ """Inventory items definition.""" import io from dataclasses import asdict, dataclass -from typing import BinaryIO, Optional +from typing import BinaryIO, Optional, Union import lz4.frame import numpy as np @@ -88,6 +88,9 @@ def from_atlas(cls, matching: matchings.Matching): return cls(**asdict(matching)) +Recipe = Union[Evolution, Matching] + + @dataclass(frozen=True) class Target(Header): """Target evolution point, labeling evolution from origin to there.""" diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py new file mode 100644 index 000000000..d66bd89f1 --- /dev/null +++ b/src/eko/runner/operators.py @@ -0,0 +1 @@ +"""Combine parts into operators.""" diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 67b91f5f6..f93c498e4 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -1,9 +1,9 @@ """Compute operator components.""" import numpy as np -from ..io.items import Operator, Recipe +from ..io.items import Operator -def compute(recipe: Recipe) -> Operator: +def compute(recipe) -> Operator: """Compute EKO component in isolation.""" return Operator(np.array([])) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index 07275e4d9..e447f64e4 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -1,16 +1,29 @@ """Recipes containing instructions for atomic computations.""" +from typing import List + from .. import EKO +from ..io.items import Evolution, Matching, Recipe +from ..matchings import Segment from . import commons -def create(eko: EKO): +def create(eko: EKO) -> List[Recipe]: """Create all associated recipes.""" atlas = commons.atlas(eko.theory_card, eko.operator_card) - terminal = [] + recipes = [] for ep in eko: # expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded # mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 - blocks = atlas.path(ep) - terminal.append(blocks.pop()) + blocks = atlas.matched_path(ep) + for block in blocks: + if isinstance(block, Segment): + cliff = block.target in atlas.walls + recipe = Evolution.from_atlas(block, cliff=cliff) + else: + recipe = Matching.from_atlas(block) + + recipes.append(recipe) + + return recipes From 1e773b5a9f702ead9ae5b6c08c2f1954d3c7b53e Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 19:44:49 +0200 Subject: [PATCH 040/154] Fix minimally outdated tests, to make pytest run --- src/eko/io/struct.py | 6 +++--- tests/eko/io/test_runcards.py | 14 +++++++------- tests/eko/runner/test_compute.py | 9 --------- 3 files changed, 10 insertions(+), 19 deletions(-) delete mode 100644 tests/eko/runner/test_compute.py diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 5cb89ad0b..28db4ace9 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -80,7 +80,7 @@ def paths(self) -> InternalPaths: @property def rotations(self) -> Bases: """Rotations information.""" - return self.metadata.rotations + return self.metadata.bases @property def xgrid(self) -> interpolation.XGrid: @@ -96,7 +96,7 @@ def xgrid(self, value: interpolation.XGrid): @property def mu20(self) -> SquaredScale: """Provide squared initial scale.""" - return self.metadata.mu20[0] + return self.metadata.origin[0] @property def mu2grid(self) -> List[SquaredScale]: @@ -148,7 +148,7 @@ def __getitem__(self, ep: EPoint) -> Optional[Operator]: r"""Retrieve operator for given evolution point.""" return None - def __setitem__(self, ep: EPoint, op: Operator, compress: bool = True): + def __setitem__(self, ep: EPoint, op: Operator): """Set operator associated to an evolution point.""" def __delitem__(self, ep: EPoint): diff --git a/tests/eko/io/test_runcards.py b/tests/eko/io/test_runcards.py index a96722614..995f2b63b 100644 --- a/tests/eko/io/test_runcards.py +++ b/tests/eko/io/test_runcards.py @@ -9,27 +9,27 @@ from eko import interpolation from eko.io import runcards as rc -from eko.io.struct import Rotations +from eko.io.bases import Bases from ekomark.data.operators import default_card as operator_card -class TestRotations: +class TestBases: XGRID_TEST = [1e-3, 1e-2, 1e-1, 1.0] def test_serialization(self): - rot = Rotations(interpolation.XGrid(self.XGRID_TEST)) + rot = Bases(interpolation.XGrid(self.XGRID_TEST)) d = rot.raw rot1 = rot.from_dict(d) - for f in fields(Rotations): + for f in fields(Bases): assert getattr(rot, f.name) == getattr(rot1, f.name) assert d["targetgrid"] is None assert "_targetgrid" not in d def test_pids(self): - rot = Rotations(interpolation.XGrid(self.XGRID_TEST)) + rot = Bases(interpolation.XGrid(self.XGRID_TEST)) # no check on correctness of value set rot.inputpids = [0, 1] @@ -39,7 +39,7 @@ def test_pids(self): assert np.all(rot.targetpids == rot.pids) def test_grids(self): - rot = Rotations(interpolation.XGrid(self.XGRID_TEST)) + rot = Bases(interpolation.XGrid(self.XGRID_TEST)) # no check on correctness of value set rot.inputgrid = interpolation.XGrid([0.1, 1]) @@ -59,7 +59,7 @@ def test_flavored_mu2grid(): # check we can dump stream = StringIO() - ser = yaml.safe_dump(flavored, stream) + _ = yaml.safe_dump(flavored, stream) stream.seek(0) deser = yaml.safe_load(stream) assert len(flavored) == len(deser) diff --git a/tests/eko/runner/test_compute.py b/tests/eko/runner/test_compute.py deleted file mode 100644 index b00a3e362..000000000 --- a/tests/eko/runner/test_compute.py +++ /dev/null @@ -1,9 +0,0 @@ -from eko.runner.compute import compute -from eko.runner.recipes import Recipe - - -def test_compute(): - recipe = Recipe("ciao") - part = compute(recipe) - - assert hasattr(part, "operator") From 8d914bd1f09c1c740136943dd3d00259677c8eb0 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 28 Apr 2023 20:17:07 +0200 Subject: [PATCH 041/154] Update inventory iteration --- src/eko/io/inventory.py | 37 ++++++++++++++++++++++++++++++++++++- src/eko/io/items.py | 7 ++++++- src/eko/io/struct.py | 39 ++++++++++++++++++++++++++++----------- 3 files changed, 70 insertions(+), 13 deletions(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 7da3d628f..9a33f43f0 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -2,7 +2,7 @@ import base64 from dataclasses import asdict, dataclass, field from pathlib import Path -from typing import Dict, Optional +from typing import Dict, Optional, Type import yaml @@ -50,6 +50,7 @@ class Inventory: """ access: AccessConfigs + header_type: Type[Header] cache: Dict[Header, Optional[Operator]] = field(default_factory=dict) contentful: bool = True @@ -138,3 +139,37 @@ def __delitem__(self, header: Header): """ self.cache[header] = None + + def __iter__(self): + """Iterate over loaded content. + + This iteration is only over cache, so it might not be faithful with + respect to the real content on disk. + To iterate the full content of the disk, just call right before + :meth:`sync`. + + """ + yield from self.cache + + def sync(self): + """Sync the headers in the cache with the content on disk. + + In particular, headers on disk that are missing in the :attr:`cache` + are added to it, without loading actual operators in memory. + + Despite the name, the operation is non-destructive, so, even if cache + has been abused, nothing will be deleted nor unloaded. + + """ + for path in self.access.path.iterdir(): + if path.suffix != HEADER_EXT: + continue + + header = self.header_type( + **yaml.safe_load(path.read_text(encoding="utf-8")) + ) + self.cache[header] = None + + def __invert__(self): + """Alias for :meth:`sync`.""" + self.sync() diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 5867d73fa..0c642f8af 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -100,9 +100,14 @@ class Target(Header): @classmethod def from_ep(cls, ep: EPoint): - """Create instance from the :class:`eko.io.types.EvolutionPoint` analogue.""" + """Create instance from the :class:`EPoint` analogue.""" return cls(*ep) + @property + def ep(self) -> EPoint: + """Cast to :class:`EPoint`.""" + return (self.scale, self.nf) + @dataclass(frozen=True) class Operator: diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 28db4ace9..a4453561d 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -8,7 +8,7 @@ import tarfile import tempfile from dataclasses import dataclass -from typing import Dict, List, Optional +from typing import List, Optional import numpy as np import yaml @@ -17,7 +17,8 @@ from . import exceptions, raw from .access import AccessConfigs from .bases import Bases -from .items import Operator +from .inventory import Inventory +from .items import Operator, Recipe, Target from .metadata import Metadata from .paths import InternalPaths from .runcards import OperatorCard, TheoryCard @@ -60,8 +61,11 @@ class EKO: """ - # operators cache, contains the Q2 grid information - _operators: Dict[EPoint, Optional[Operator]] + recipes: Inventory + recipes_matching: Inventory + parts: Inventory + parts_matching: Inventory + operators: Inventory # public containers # ----------------- @@ -78,19 +82,19 @@ def paths(self) -> InternalPaths: return InternalPaths(self.metadata.path) @property - def rotations(self) -> Bases: - """Rotations information.""" + def bases(self) -> Bases: + """Bases information.""" return self.metadata.bases @property def xgrid(self) -> interpolation.XGrid: """Momentum fraction internal grid.""" - return self.rotations.xgrid + return self.bases.xgrid @xgrid.setter def xgrid(self, value: interpolation.XGrid): """Set `xgrid` value.""" - self.rotations.xgrid = value + self.bases.xgrid = value self.update() @property @@ -101,7 +105,7 @@ def mu20(self) -> SquaredScale: @property def mu2grid(self) -> List[SquaredScale]: """Provide the list of :math:`Q^2` as an array.""" - return [mu2 for mu2, _ in self._operators] + return [ep.scale for ep in self.operators] @property def evolgrid(self) -> List[EPoint]: @@ -142,6 +146,16 @@ def permissions(self): """Provide permissions information.""" return dict(read=self.access.read, write=self.access.write) + # recipes management + # ------------------- + + def load_recipes(self, recipes: List[Recipe]): + """Load recipes in bulk.""" + + def __ilshift__(self, recipes: List[Recipe]): + """Shortcut for :meth:`load_recipes`.""" + self.load_recipes(recipes) + # operator management # ------------------- def __getitem__(self, ep: EPoint) -> Optional[Operator]: @@ -170,7 +184,10 @@ def operator(self, ep: EPoint): def __iter__(self): """Iterate over keys (i.e. evolution points).""" - yield from self._operators + self.operators.sync() + for target in self.operators: + assert isinstance(target, Target) + yield target.ep def items(self): """Iterate operators, with minimal load. @@ -189,7 +206,7 @@ def items(self): immediately after """ - for ep in self._operators: + for ep in self.operators: yield ep, self[ep] del self[ep] From 08fdef1798ca2870534240c563749eb7e9059f89 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 13:39:53 +0200 Subject: [PATCH 042/154] Update EKO methods --- src/eko/io/struct.py | 43 +++++++++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index a4453561d..d144f6f91 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -160,13 +160,15 @@ def __ilshift__(self, recipes: List[Recipe]): # ------------------- def __getitem__(self, ep: EPoint) -> Optional[Operator]: r"""Retrieve operator for given evolution point.""" - return None + return self.operators[Target.from_ep(ep)] def __setitem__(self, ep: EPoint, op: Operator): """Set operator associated to an evolution point.""" + self.operators[Target.from_ep(ep)] = op def __delitem__(self, ep: EPoint): """Drop operator from memory.""" + del self.operators[Target.from_ep(ep)] @contextlib.contextmanager def operator(self, ep: EPoint): @@ -206,29 +208,33 @@ def items(self): immediately after """ - for ep in self.operators: + for target in self.operators: + # recast to evolution point + assert isinstance(target, Target) + ep = target.ep + + # auto-load yield ep, self[ep] + # auto-unload del self[ep] - def __contains__(self, q2: float) -> bool: - """Check whether :math:`Q^2` operators are present. + def __contains__(self, ep: EPoint) -> bool: + """Check whether the operator related to the evolution point is present. - 'Present' means, in this case, they are conceptually part of the - :class:`EKO`. But it is telling nothing about being loaded in memory or - not. - - Returns - ------- - bool - the result of checked condition + 'Present' means, in this case, they are available in the :class:`EKO`. + But it is telling nothing about being loaded in memory or not. """ - return q2 in self._operators + return Target.from_ep(ep) in self.operators def approx( self, ep: EPoint, rtol: float = 1e-6, atol: float = 1e-10 - ) -> Optional[float]: - """Look for close enough :math:`mu^2` value in the :class:`EKO`. + ) -> Optional[EPoint]: + r"""Look for close enough evolution point in the :class:`EKO`. + + The distance is mostly evaluated along the :math:`\mu^2` dimension, + while :math:`n_f` is considered with a discrete distance: if two points + have not the same, they are classified as far. Raises ------ @@ -236,14 +242,15 @@ def approx( if multiple values are find in the neighbourhood """ - mu2s = np.array([mu2 for mu2, nf in self if nf == ep[1]]) - close = mu2s[np.isclose(ep[0], mu2s, rtol=rtol, atol=atol)] + eps = np.array([ep_ for ep_ in self if ep_[1] == ep[1]]) + mu2s = np.array([mu2 for mu2, _ in eps]) + close = eps[np.isclose(ep[0], mu2s, rtol=rtol, atol=atol)] if close.size == 1: return close[0] if close.size == 0: return None - raise ValueError(f"Multiple values of Q2 have been found close to {ep[0]}") + raise ValueError(f"Multiple values of Q2 have been found close to {ep}") def unload(self): """Fully unload the operators in memory.""" From 819d10234222b858e32f14a9bb5b2cb359b16c0a Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 14:12:38 +0200 Subject: [PATCH 043/154] Fix contentless inventories --- src/eko/io/inventory.py | 44 +++++++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 10 deletions(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 9a33f43f0..4fac46ea1 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -52,21 +52,29 @@ class Inventory: access: AccessConfigs header_type: Type[Header] cache: Dict[Header, Optional[Operator]] = field(default_factory=dict) - contentful: bool = True + contentless: bool = False + name: Optional[str] = None + """Only for logging purpose.""" - def lookup(self, stem: str) -> Path: + def __str__(self) -> str: + return f"Inventory '{self.name}'" + + def lookup(self, stem: str, header: bool = False) -> Path: """Look up for content path in inventory.""" + EXT = OPERATOR_EXT if not header else [HEADER_EXT] found = [ path for path in self.access.path.iterdir() if path.name.startswith(stem) - if "".join(path.suffixes) in OPERATOR_EXT + if "".join(path.suffixes) in EXT ] if len(found) == 0: - raise LookupError(f"Target value '{stem}' not available.") + raise LookupError(f"Item '{stem}' not available in {self}.") elif len(found) > 1: - raise LookupError(f"Too many operators associated to '{stem}':\n{found}") + raise LookupError( + f"Too many items associated to '{stem}' in {self}:\n{found}" + ) return found[0] @@ -79,12 +87,23 @@ def __getitem__(self, header: Header) -> Optional[Operator]: """ self.access.assert_open() - if header in self.cache: + try: op = self.cache[header] - if op is not None or not self.contentful: + if op is not None or self.contentless: return op + except KeyError: + pass stem = encode(header) + + # in case of contentless, check header availability instead + if self.contentless: + if self.lookup(stem, header=True): + self.cache[header] = None + return None + raise LookupError(f"Impossible to retrieve {header} from {self}.") + + # for contentful inventories, check operator availability oppath = self.lookup(stem) with open(oppath, "rb") as fd: @@ -101,16 +120,21 @@ def __setitem__(self, header: Header, operator: Optional[Operator]): """ self.access.assert_writeable() - if not self.contentful: + # always save the header on disk + headpath = self.access.path / header_name(header) + headpath.write_text(yaml.dump(asdict(header)), encoding="utf-8") + + # in case of contentless, set empty cache and exit + if self.contentless: self.cache[header] = None + return + # otherwise save also the operator, and add to the cache assert operator is not None - headpath = self.access.path / header_name(header) with_err = operator.error is not None oppath = self.access.path / operator_name(header, err=with_err) - headpath.write_text(yaml.dump(asdict(header))) with open(oppath, "wb") as fd: operator.save(fd) From 597f841ceecb2bf5405cd7207acf44386ed6400c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 14:38:49 +0200 Subject: [PATCH 044/154] Fix recipe loading implementation --- src/eko/io/struct.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index d144f6f91..839fc89ab 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -18,7 +18,7 @@ from .access import AccessConfigs from .bases import Bases from .inventory import Inventory -from .items import Operator, Recipe, Target +from .items import Evolution, Operator, Recipe, Target from .metadata import Metadata from .paths import InternalPaths from .runcards import OperatorCard, TheoryCard @@ -151,6 +151,12 @@ def permissions(self): def load_recipes(self, recipes: List[Recipe]): """Load recipes in bulk.""" + for recipe in recipes: + # leverage auto-save + if isinstance(recipe, Evolution): + self.recipes[recipe] = None + else: + self.recipes_matching[recipe] = None def __ilshift__(self, recipes: List[Recipe]): """Shortcut for :meth:`load_recipes`.""" From bebd35dc68cc2a307b60c0b897bed93a037345bb Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 14:39:09 +0200 Subject: [PATCH 045/154] Sketch the entire jets runner --- src/eko/runner/managed.py | 19 ++++++++++++++----- src/eko/runner/operators.py | 14 ++++++++++++++ src/eko/runner/parts.py | 3 ++- src/eko/runner/recipes.py | 36 +++++++++++++++++++++++------------- 4 files changed, 53 insertions(+), 19 deletions(-) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 3aaef21ef..5775c0f60 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -10,12 +10,13 @@ but not automatically performed. """ - +from itertools import chain from pathlib import Path +from ..io.items import Target from ..io.runcards import OperatorCard, TheoryCard from ..io.struct import EKO -from . import recipes +from . import commons, operators, parts, recipes def solve(theory: TheoryCard, operator: OperatorCard, path: Path): @@ -23,7 +24,15 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): with EKO.create(path) as builder: eko = builder.load_cards(theory, operator).build() - recipes.create(eko) + recs = recipes.create(eko) + eko.load_recipes(recs) + + for recipe in chain(eko.recipes, eko.recipes_matching): + parts.compute(eko, recipe) + + atlas = commons.atlas(eko.theory_card, eko.operator_card) - # for recipe in eko.recipes: - # pass + for ep in operator.evolgrid: + headers = recipes.elements(ep, atlas) + parts_ = operators.retrieve(eko, headers) + eko.operators[Target.from_ep(ep)] = operators.join(eko, parts_) diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py index d66bd89f1..567ec7514 100644 --- a/src/eko/runner/operators.py +++ b/src/eko/runner/operators.py @@ -1 +1,15 @@ """Combine parts into operators.""" +from typing import List + +from .. import EKO +from ..io.items import Operator, Recipe + + +def retrieve(eko: EKO, elements: List[Recipe]) -> List[Operator]: + """Retrieve parts to be joined.""" + return [] + + +def join(eko: EKO, elements: List[Operator]) -> Operator: + """Join the elements into the final operator.""" + return Operator(None) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index f93c498e4..839740b07 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -1,9 +1,10 @@ """Compute operator components.""" import numpy as np +from .. import EKO from ..io.items import Operator -def compute(recipe) -> Operator: +def compute(eko: EKO, recipe) -> Operator: """Compute EKO component in isolation.""" return Operator(np.array([])) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index e447f64e4..a8537f553 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -3,27 +3,37 @@ from .. import EKO from ..io.items import Evolution, Matching, Recipe -from ..matchings import Segment +from ..io.types import EvolutionPoint as EPoint +from ..matchings import Atlas, Segment from . import commons +def elements(ep: EPoint, atlas: Atlas) -> List[Recipe]: + """Determine recipes to compute a given operator.""" + recipes = [] + + # expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded + # mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 + + blocks = atlas.matched_path(ep) + for block in blocks: + if isinstance(block, Segment): + cliff = block.target in atlas.walls + recipe = Evolution.from_atlas(block, cliff=cliff) + else: + recipe = Matching.from_atlas(block) + + recipes.append(recipe) + + return recipes + + def create(eko: EKO) -> List[Recipe]: """Create all associated recipes.""" atlas = commons.atlas(eko.theory_card, eko.operator_card) recipes = [] for ep in eko: - # expanded = eko.operator_card.configs.scvar_method is sv.Modes.expanded - # mu2f = mu2 * eko.theory_card.xif**2 if expanded else mu2 - - blocks = atlas.matched_path(ep) - for block in blocks: - if isinstance(block, Segment): - cliff = block.target in atlas.walls - recipe = Evolution.from_atlas(block, cliff=cliff) - else: - recipe = Matching.from_atlas(block) - - recipes.append(recipe) + recipes.extend(elements(ep, atlas)) return recipes From f963503dfa3920a375956b811349e4a1c985181e Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 14:55:26 +0200 Subject: [PATCH 046/154] Simplify operator grid --- src/eko/evolution_operator/grid.py | 98 +++++++----------------------- 1 file changed, 22 insertions(+), 76 deletions(-) diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index d1637efa4..df54b0996 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -6,22 +6,28 @@ """ import logging -import numbers from dataclasses import astuple -from typing import List +from typing import Dict, List, Optional import numpy as np import numpy.typing as npt from .. import member from .. import scale_variations as sv +from ..couplings import Couplings +from ..interpolation import InterpolatorDispatcher from ..io.runcards import Configs, Debug -from ..matchings import flavor_shift, is_downward_path +from ..io.types import EvolutionPoint as EPoint +from ..io.types import Order +from ..matchings import Atlas, Segment, flavor_shift, is_downward_path from . import Operator, flavors, matching_condition, physical from .operator_matrix_element import OperatorMatrixElement logger = logging.getLogger(__name__) +OpDict = Dict[str, Optional[npt.NDArray]] +"""In particular, only the ``operator`` and ``error`` fields are expected.""" + class OperatorGrid(sv.ModeMixin): """Collection of evolution operators for several scales. @@ -42,7 +48,7 @@ class OperatorGrid(sv.ModeMixin): def __init__( self, mu2grid: npt.NDArray, - order: tuple, + order: Order, masses: List[float], mass_scheme, intrinsic_flavors: list, @@ -50,31 +56,10 @@ def __init__( n3lo_ad_variation: tuple, configs: Configs, debug: Debug, - thresholds_config, - couplings, - interpol_dispatcher, + thresholds_config: Atlas, + couplings: Couplings, + interpol_dispatcher: InterpolatorDispatcher, ): - """Initialize `OperatorGrid`. - - Parameters - ---------- - config: dict - configuration dictionary - q2_grid: array - Grid in Q2 on where to to compute the operators - order: tuple(int,int) - orders in perturbation theory - thresholds_config: eko.thresholds.ThresholdsAtlas - Instance of :class:`~eko.thresholds.Threshold` containing information about the - thresholds - couplings: eko.couplings.StrongCoupling - Instance of :class:`~eko.couplings.StrongCoupling` able to generate a_s for - any q - kernel_dispatcher: eko.kernel_generation.KernelDispatcher - Instance of the :class:`~eko.kernel_generation.KernelDispatcher` with the - information about the kernels - - """ # check config = {} config["order"] = order @@ -123,9 +108,8 @@ def __init__( self._threshold_operators = {} self._matching_operators = {} - def get_threshold_operators(self, path): - """ - Generate the threshold operators. + def get_threshold_operators(self, path: List[Segment]) -> List[Operator]: + """Generate the threshold operators. This method is called everytime the OperatorGrid is asked for a grid on Q^2 with a list of the relevant areas. @@ -135,15 +119,8 @@ def get_threshold_operators(self, path): The internal dictionary is self._threshold_operators and its structure is: (q2_from, q2_to) -> eko.operators.Operator - It computes and stores the necessary macthing operators - Parameters - ---------- - path: list(`eko.thresholds.PathSegment`) - thresholds path + It computes and stores the necessary macthing operators. - Returns - ------- - thr_ops: list(eko.evolution_operator.Operator) """ # The base area is always that of the reference q thr_ops = [] @@ -184,43 +161,15 @@ def get_threshold_operators(self, path): self._matching_operators[seg.q2_to] = ome.op_members return thr_ops - def compute(self, q2grid=None): - """Compute all ekos for the `q2grid`. - - Parameters - ---------- - q2grid: list(float) - List of :math:`Q^2` - - Returns - ------- - list(dict) - List of ekos for each value of :math:`Q^2` - """ - # use input? - if q2grid is None: - q2grid = self.q2_grid - # normalize input - if isinstance(q2grid, numbers.Number): - q2grid = [q2grid] - # And now return the grid - grid_return = {} - for q2 in q2grid: - grid_return[q2] = self.generate(q2) - return grid_return + def compute(self) -> Dict[EPoint, dict]: + """Compute all ekos for the `q2grid`.""" + return {q2: self.generate(q2) for q2 in self.q2_grid} - def generate(self, q2): + def generate(self, q2: EPoint) -> OpDict: r"""Compute a single EKO. - Parameters - ---------- - q2: float - Target value of :math:`Q^2` + eko :math:`\mathbf E(Q^2 \leftarrow Q_0^2)` in flavor basis as numpy array. - Returns - ------- - dict - eko :math:`\mathbf E(Q^2 \leftarrow Q_0^2)` in flavor basis as numpy array """ # The lists of areas as produced by the thresholds path = self.managers["thresholds_config"].path(q2) @@ -271,7 +220,4 @@ def generate(self, q2): ) final_op = final_op @ rot @ matching @ phys_op values, errors = final_op.to_flavor_basis_tensor(qed) - return { - "operator": values, - "error": errors, - } + return {"operator": values, "error": errors} From 30edb8faaec136fc29df561f04e11cf5b327f251 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 18:16:37 +0200 Subject: [PATCH 047/154] Fix and further simplify OperatorGrid computation --- src/eko/evolution_operator/__init__.py | 9 ++-- src/eko/evolution_operator/grid.py | 59 ++++++++++---------------- 2 files changed, 27 insertions(+), 41 deletions(-) diff --git a/src/eko/evolution_operator/__init__.py b/src/eko/evolution_operator/__init__.py index 222d827d5..649ffc332 100644 --- a/src/eko/evolution_operator/__init__.py +++ b/src/eko/evolution_operator/__init__.py @@ -26,6 +26,7 @@ from ..kernels import singlet_qed as qed_s from ..kernels import utils from ..kernels import valence_qed as qed_v +from ..matchings import Segment from ..member import OpMember logger = logging.getLogger(__name__) @@ -606,13 +607,13 @@ class Operator(sv.ModeMixin): full_labels_qed = br.full_unified_labels def __init__( - self, config, managers, nf, q2_from, q2_to, mellin_cut=5e-2, is_threshold=False + self, config, managers, segment: Segment, mellin_cut=5e-2, is_threshold=False ): self.config = config self.managers = managers - self.nf = nf - self.q2_from = q2_from - self.q2_to = q2_to + self.nf = segment.nf + self.q2_from = segment.origin + self.q2_to = segment.target # TODO make 'cut' external parameter? self._mellin_cut = mellin_cut self.is_threshold = is_threshold diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index df54b0996..ffa894719 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -4,7 +4,6 @@ previously instantiated information and does the actual computation of the Q2s. """ - import logging from dataclasses import astuple from typing import Dict, List, Optional @@ -47,16 +46,17 @@ class OperatorGrid(sv.ModeMixin): def __init__( self, - mu2grid: npt.NDArray, + mu2grid: List[EPoint], order: Order, masses: List[float], mass_scheme, + thresholds_ratios: List[float], intrinsic_flavors: list, xif: float, n3lo_ad_variation: tuple, configs: Configs, debug: Debug, - thresholds_config: Atlas, + atlas: Atlas, couplings: Couplings, interpol_dispatcher: InterpolatorDispatcher, ): @@ -71,6 +71,7 @@ def __init__( for i, q in enumerate("cbt"): config[f"m{q}"] = masses[i] + config["thresholds_ratios"] = thresholds_ratios method = config["method"] = configs.evolution_method.value config["backward_inversion"] = configs.inversion_method config["ev_op_max_order"] = configs.ev_op_max_order @@ -101,7 +102,7 @@ def __init__( self.config = config self.q2_grid = mu2grid self.managers = dict( - thresholds_config=thresholds_config, + thresholds_config=atlas, couplings=couplings, interpol_dispatcher=interpol_dispatcher, ) @@ -129,13 +130,12 @@ def get_threshold_operators(self, path: List[Segment]) -> List[Operator]: shift = flavor_shift(is_downward) for seg in path[:-1]: new_op_key = astuple(seg) - thr_config = self.managers["thresholds_config"] - kthr = thr_config.thresholds_ratios[seg.nf - shift] + kthr = self.config["thresholds_ratios"][seg.nf - shift] ome = OperatorMatrixElement( self.config, self.managers, seg.nf - shift + 3, - seg.q2_to, + seg.target, is_downward, np.log(kthr), self.config["HQ"] == "MSBAR", @@ -143,22 +143,15 @@ def get_threshold_operators(self, path: List[Segment]) -> List[Operator]: if new_op_key not in self._threshold_operators: # Compute the operator and store it logger.info("Prepare threshold operator") - op_th = Operator( - self.config, - self.managers, - seg.nf, - seg.q2_from, - seg.q2_to, - is_threshold=True, - ) + op_th = Operator(self.config, self.managers, seg, is_threshold=True) op_th.compute() self._threshold_operators[new_op_key] = op_th thr_ops.append(self._threshold_operators[new_op_key]) # Compute the matching conditions and store it - if seg.q2_to not in self._matching_operators: + if seg.target not in self._matching_operators: ome.compute() - self._matching_operators[seg.q2_to] = ome.op_members + self._matching_operators[seg.target] = ome.op_members return thr_ops def compute(self) -> Dict[EPoint, dict]: @@ -176,15 +169,13 @@ def generate(self, q2: EPoint) -> OpDict: # Prepare the path for the composition of the operator thr_ops = self.get_threshold_operators(path) # we start composing with the highest operator ... - operator = Operator( - self.config, self.managers, path[-1].nf, path[-1].q2_from, path[-1].q2_to - ) + operator = Operator(self.config, self.managers, path[-1]) operator.compute() - intrinsic_range = self.config["intrinsic_range"] + is_downward = is_downward_path(path) - if is_downward: - intrinsic_range = [4, 5, 6] + intrinsic_range = [4, 5, 6] if is_downward else self.config["intrinsic_range"] qed = self.config["order"][1] > 0 + final_op = physical.PhysicalOperator.ad_to_evol_map( operator.op_members, operator.nf, operator.q2_to, intrinsic_range, qed ) @@ -195,26 +186,20 @@ def generate(self, q2: EPoint) -> OpDict: ) # join with the basis rotation, since matching requires c+ (or likewise) + nf_match = op.nf - 1 if is_downward else op.nf + matching = matching_condition.MatchingCondition.split_ad_to_evol_map( + self._matching_operators[op.q2_to], + nf_match, + op.q2_to, + intrinsic_range=intrinsic_range, + qed=qed, + ) if is_downward: - matching = matching_condition.MatchingCondition.split_ad_to_evol_map( - self._matching_operators[op.q2_to], - op.nf - 1, - op.q2_to, - intrinsic_range=intrinsic_range, - qed=qed, - ) invrot = member.ScalarOperator.promote_names( flavors.rotate_matching_inverse(op.nf, qed), op.q2_to ) final_op = final_op @ matching @ invrot @ phys_op else: - matching = matching_condition.MatchingCondition.split_ad_to_evol_map( - self._matching_operators[op.q2_to], - op.nf, - op.q2_to, - intrinsic_range=intrinsic_range, - qed=qed, - ) rot = member.ScalarOperator.promote_names( flavors.rotate_matching(op.nf + 1, qed), op.q2_to ) From 895d93ca2346fa37729de050ac8420f6e9c6a063 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 18:17:12 +0200 Subject: [PATCH 048/154] Add cast to atlas types from items --- src/eko/io/items.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 0c642f8af..4eacff548 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -8,8 +8,7 @@ import numpy.lib.npyio as npyio import numpy.typing as npt -from eko import matchings - +from .. import matchings from . import exceptions from .types import EvolutionPoint as EPoint from .types import FlavorIndex, FlavorsNumber, SquaredScale @@ -70,6 +69,11 @@ def from_atlas(cls, segment: matchings.Segment, cliff: bool = False): """Create instance from analogous Atlas object.""" return cls(**asdict(segment), cliff=cliff) + @property + def as_atlas(self) -> matchings.Segment: + """The associated segment.""" + return matchings.Segment(self.origin, self.target, self.nf) + @dataclass(frozen=True) class Matching(Header): @@ -81,11 +85,17 @@ class Matching(Header): scale: SquaredScale hq: FlavorIndex + inverse: bool = False @classmethod - def from_atlas(cls, matching: matchings.Matching): + def from_atlas(cls, matching: matchings.Matching, inverse: bool = False): """Create instance from analogous Atlas object.""" - return cls(**asdict(matching)) + return cls(**asdict(matching), inverse=inverse) + + @property + def as_atlas(self) -> matchings.Matching: + """The associated segment.""" + return matchings.Matching(self.scale, self.hq) Recipe = Union[Evolution, Matching] From 165a722e24d457a8d9272571ea84d71de84fe4c8 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 18:17:51 +0200 Subject: [PATCH 049/154] Add explicit squared ratios Because for ratios its even harder to propagate the linear vs squared information --- src/eko/quantities/heavy_quarks.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/eko/quantities/heavy_quarks.py b/src/eko/quantities/heavy_quarks.py index 3d87b30aa..ae1ca86c6 100644 --- a/src/eko/quantities/heavy_quarks.py +++ b/src/eko/quantities/heavy_quarks.py @@ -1,7 +1,7 @@ """Heavy quarks related quantities.""" import enum from dataclasses import dataclass -from typing import Generic, Sequence, TypeVar, Union +from typing import Generic, List, Sequence, TypeVar, Union import numpy as np @@ -96,6 +96,11 @@ class HeavyInfo(DictLike): This is meant to be used mainly as a theory card section, and to be passed around when all or a large part of this information is required. + Note + ---- + All scales and ratios in this structure are linear, so you can consider + them as quantities in :math:`GeV` or ratios of them. + """ num_flavs_init: FlavorsNumber @@ -119,3 +124,8 @@ class HeavyInfo(DictLike): def matching_scales(self) -> MatchingScales: """Compute matching scales.""" return scales_from_ratios(self.matching_ratios, self.masses) + + @property + def squared_ratios(self) -> List[float]: + """Squared ratios of matching scales.""" + return np.power(self.matching_ratios, 2.0).tolist() From 5e5d9f2df9d7c13e83403393dfba1d389db3d36d Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 18:18:18 +0200 Subject: [PATCH 050/154] Small fix to legacy runner --- src/eko/runner/legacy.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/eko/runner/legacy.py b/src/eko/runner/legacy.py index dd5c462bb..ca8c54f15 100644 --- a/src/eko/runner/legacy.py +++ b/src/eko/runner/legacy.py @@ -67,11 +67,12 @@ def __init__( order=new_theory.order, masses=masses, mass_scheme=new_theory.heavy.masses_scheme.value, + thresholds_ratios=new_theory.heavy.squared_ratios, intrinsic_flavors=new_theory.heavy.intrinsic_flavors, xif=new_theory.xif, configs=new_operator.configs, debug=new_operator.debug, - thresholds_config=tc, + atlas=tc, couplings=cs, interpol_dispatcher=bfd, n3lo_ad_variation=new_theory.n3lo_ad_variation, @@ -95,4 +96,4 @@ def compute(self): with EKO.edit(self.path) as eko: # add all operators for ep, op in self.op_grid.compute().items(): - eko[ep] = Operator.from_dict(op) + eko[ep] = Operator(**op) From 1e7a476df7f2bd4b725af459728ee8b4f8697930 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 1 May 2023 18:23:34 +0200 Subject: [PATCH 051/154] Export OperatorGrid computation to managed runner (first steps) --- src/eko/runner/managed.py | 21 ++++---- src/eko/runner/operators.py | 12 +++-- src/eko/runner/parts.py | 98 +++++++++++++++++++++++++++++++++++-- src/eko/runner/recipes.py | 8 +-- 4 files changed, 116 insertions(+), 23 deletions(-) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 5775c0f60..01712adaa 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -10,10 +10,9 @@ but not automatically performed. """ -from itertools import chain from pathlib import Path -from ..io.items import Target +from ..io.items import Evolution, Matching, Target from ..io.runcards import OperatorCard, TheoryCard from ..io.struct import EKO from . import commons, operators, parts, recipes @@ -24,15 +23,19 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): with EKO.create(path) as builder: eko = builder.load_cards(theory, operator).build() - recs = recipes.create(eko) - eko.load_recipes(recs) + atlas = commons.atlas(eko.theory_card, eko.operator_card) - for recipe in chain(eko.recipes, eko.recipes_matching): - parts.compute(eko, recipe) + recs = recipes.create(eko.evolgrid, atlas) + eko.load_recipes(recs) - atlas = commons.atlas(eko.theory_card, eko.operator_card) + for recipe in eko.recipes: + assert isinstance(recipe, Evolution) + eko.parts[recipe] = parts.evolve(eko, recipe) + for recipe in eko.recipes_matching: + assert isinstance(recipe, Matching) + eko.parts[recipe] = parts.match(eko, recipe) for ep in operator.evolgrid: headers = recipes.elements(ep, atlas) - parts_ = operators.retrieve(eko, headers) - eko.operators[Target.from_ep(ep)] = operators.join(eko, parts_) + parts_ = operators.retrieve(headers, eko.parts, eko.parts_matching) + eko.operators[Target.from_ep(ep)] = operators.join(parts_) diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py index 567ec7514..4f84a0884 100644 --- a/src/eko/runner/operators.py +++ b/src/eko/runner/operators.py @@ -1,15 +1,19 @@ """Combine parts into operators.""" from typing import List -from .. import EKO +import numpy as np + +from ..io.inventory import Inventory from ..io.items import Operator, Recipe -def retrieve(eko: EKO, elements: List[Recipe]) -> List[Operator]: +def retrieve( + elements: List[Recipe], parts: Inventory, parts_matching: Inventory +) -> List[Operator]: """Retrieve parts to be joined.""" return [] -def join(eko: EKO, elements: List[Operator]) -> Operator: +def join(elements: List[Operator]) -> Operator: """Join the elements into the final operator.""" - return Operator(None) + return Operator(np.array([])) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 839740b07..9f88482c1 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -1,10 +1,100 @@ -"""Compute operator components.""" +"""Compute operator components. + +.. todo:: + + This is the only part of the new runner making use of the global context + through the :class:`EKO` object. + + After #242, the goal is to update :class:`Operator` and + :class:`OperatorMatrixElement` to simplify the computation and passing down + parameters. + +""" import numpy as np from .. import EKO -from ..io.items import Operator +from .. import evolution_operator as evop +from ..evolution_operator import operator_matrix_element as ome +from ..io.items import Evolution, Matching, Operator +from ..quantities.heavy_quarks import QuarkMassScheme + + +def managers(eko: EKO) -> dict: + """Collect managers for operator computation. + + .. todo:: + + Legacy interface, avoid managers usage. + + """ + return {} + + +def evolve_configs(eko: EKO) -> dict: + """Create configs for :class:`Operator`. + + .. todo:: + + Legacy interface, make use of a dedicated object. + + """ + # self.config: + # - n_integration_cores + # - xif2 + # - ev_op_iterations + # - debug_skip_non_singlet + # - debug_skip_singlet + # - method + # - ev_op_max_order + # - polarized + # - time_like + ocard = eko.operator_card + return dict( + xif2=eko.theory_card.xif**2, + method=ocard.configs.evolution_method, + ev_op_iterations=ocard.configs.ev_op_iterations, + ev_op_max_order=ocard.configs.ev_op_max_order, + polarized=ocard.configs.polarized, + time_like=ocard.configs.time_like, + debug_skip_singlet=ocard.debug.skip_singlet, + debug_skip_non_singlet=ocard.debug.skip_non_singlet, + n_integration_cores=ocard.configs.n_integration_cores, + ) + + +def evolve(eko: EKO, recipe: Evolution) -> Operator: + """Compute evolution in isolation.""" + op = evop.Operator( + evolve_configs(eko), managers(eko), recipe.as_atlas, is_threshold=recipe.cliff + ) + op.compute() + return Operator(np.array([])) + + +def matching_configs(eko: EKO) -> dict: + """Create configs for :class:`OperatorMatrixElement`. + + .. todo:: + + Legacy interface, make use of a dedicated object. + + """ + # self.config: + # - + return dict() -def compute(eko: EKO, recipe) -> Operator: - """Compute EKO component in isolation.""" +def match(eko: EKO, recipe: Matching) -> Operator: + """Compute matching in isolation.""" + kthr = eko.theory_card.heavy.squared_ratios[recipe.hq - 4] + op = ome.OperatorMatrixElement( + matching_configs(eko), + managers(eko), + recipe.hq, + recipe.scale, + recipe.inverse, + np.log(kthr), + eko.theory_card.heavy.masses_scheme is QuarkMassScheme.MSBAR, + ) + op.compute() return Operator(np.array([])) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index a8537f553..83810bb5c 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -1,11 +1,9 @@ """Recipes containing instructions for atomic computations.""" from typing import List -from .. import EKO from ..io.items import Evolution, Matching, Recipe from ..io.types import EvolutionPoint as EPoint from ..matchings import Atlas, Segment -from . import commons def elements(ep: EPoint, atlas: Atlas) -> List[Recipe]: @@ -28,12 +26,10 @@ def elements(ep: EPoint, atlas: Atlas) -> List[Recipe]: return recipes -def create(eko: EKO) -> List[Recipe]: +def create(evolgrid: List[EPoint], atlas: Atlas) -> List[Recipe]: """Create all associated recipes.""" - atlas = commons.atlas(eko.theory_card, eko.operator_card) - recipes = [] - for ep in eko: + for ep in evolgrid: recipes.extend(elements(ep, atlas)) return recipes From f1c4b48d0e58985bfb9a3c2b7f7ee63686b7cbc6 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 2 May 2023 17:20:48 +0200 Subject: [PATCH 052/154] Complete Operator/MatrixElement arguments collection --- src/eko/runner/parts.py | 44 +++++++++++++++++++++++++++++++++++++---- 1 file changed, 40 insertions(+), 4 deletions(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 9f88482c1..ca359535a 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -17,6 +17,7 @@ from ..evolution_operator import operator_matrix_element as ome from ..io.items import Evolution, Matching, Operator from ..quantities.heavy_quarks import QuarkMassScheme +from . import commons def managers(eko: EKO) -> dict: @@ -27,7 +28,13 @@ def managers(eko: EKO) -> dict: Legacy interface, avoid managers usage. """ - return {} + tcard = eko.theory_card + ocard = eko.operator_card + return dict( + thresholds_config=commons.atlas(tcard, ocard), + couplings=commons.couplings(tcard, ocard), + interpol_dispatcher=commons.interpolator(ocard), + ) def evolve_configs(eko: EKO) -> dict: @@ -39,6 +46,7 @@ def evolve_configs(eko: EKO) -> dict: """ # self.config: + # - order # - n_integration_cores # - xif2 # - ev_op_iterations @@ -48,9 +56,11 @@ def evolve_configs(eko: EKO) -> dict: # - ev_op_max_order # - polarized # - time_like + tcard = eko.theory_card ocard = eko.operator_card return dict( - xif2=eko.theory_card.xif**2, + order=tcard.order, + xif2=tcard.xif**2, method=ocard.configs.evolution_method, ev_op_iterations=ocard.configs.ev_op_iterations, ev_op_max_order=ocard.configs.ev_op_max_order, @@ -80,8 +90,34 @@ def matching_configs(eko: EKO) -> dict: """ # self.config: - # - - return dict() + # - order + # - n_integration_cores + # - xif2 + # - ev_op_iterations + # - debug_skip_non_singlet + # - debug_skip_singlet + # - method + # - ev_op_max_order + # - polarized + # - time_like + # - backward_inversion + # - intrinsic_range + tcard = eko.theory_card + ocard = eko.operator_card + return dict( + order=tcard.order, + xif2=tcard.xif**2, + method=ocard.configs.evolution_method, + ev_op_iterations=ocard.configs.ev_op_iterations, + ev_op_max_order=ocard.configs.ev_op_max_order, + polarized=ocard.configs.polarized, + time_like=ocard.configs.time_like, + debug_skip_singlet=ocard.debug.skip_singlet, + debug_skip_non_singlet=ocard.debug.skip_non_singlet, + n_integration_cores=ocard.configs.n_integration_cores, + backward_inversion=ocard.configs.inversion_method, + intrinsic_range=tcard.heavy.intrinsic_flavors, + ) def match(eko: EKO, recipe: Matching) -> Operator: From 93e662d5c311377df6ec01f8cdd06b1855b91101 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 2 May 2023 18:23:23 +0200 Subject: [PATCH 053/154] Drop method check, since determined from enum value Thus the domain is already restricted by the enum definition, which would coincide with the removed check --- src/eko/evolution_operator/grid.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index ffa894719..ca8a46d51 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -82,17 +82,6 @@ def __init__( config["polarized"] = configs.polarized config["time_like"] = configs.time_like - if method not in [ - "iterate-exact", - "iterate-expanded", - "truncated", - "ordered-truncated", - "decompose-exact", - "decompose-expanded", - "perturbative-exact", - "perturbative-expanded", - ]: - raise ValueError(f"Unknown evolution mode {method}") if order == (1, 0) and method != "iterate-exact": logger.warning("Evolution: In LO we use the exact solution always!") From 5fe955c796506a7cec813b02951d69228d67674c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 2 May 2023 18:23:50 +0200 Subject: [PATCH 054/154] Implement evolution operator blow-up --- src/eko/runner/parts.py | 37 +++++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index ca359535a..ce3dbe9cd 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -15,6 +15,7 @@ from .. import EKO from .. import evolution_operator as evop from ..evolution_operator import operator_matrix_element as ome +from ..evolution_operator import physical from ..io.items import Evolution, Matching, Operator from ..quantities.heavy_quarks import QuarkMassScheme from . import commons @@ -72,13 +73,45 @@ def evolve_configs(eko: EKO) -> dict: ) +def evolve_blowup_info(eko: EKO) -> dict: + """Prepare common information to blow up to flavor basis. + + Note + ---- + ``intrinsic_range`` is a fully deprecated feature, here and anywhere else, + since a full range is already always used for backward evolution, and it is + not harmful to use it also for forward. + + Indeed, the only feature of non-intrinsic evolution is to absorb a + non-trivial boundary condition when an intrinsic PDF is defined. + But to achieve this, is sufficient to not specify any intrinsic boundary + condition at all, while if something is there, it is intuitive enough that + it will be consistently evolved. + + Moreover, since two different behavior are applied for the forward and + backward evolution, the intrinsic range is a "non-local" function, since it + does not depend only on the evolution segment, but also on the previous + evolution history (to determine if evolution is backward in flavor, + irrespectively of happening for an increasing or decreasing interval in + scale at fixed flavor). + + """ + return dict(intrinsic_range=[4, 5, 6], qed=eko.theory_card.order[1] > 0) + + def evolve(eko: EKO, recipe: Evolution) -> Operator: """Compute evolution in isolation.""" op = evop.Operator( evolve_configs(eko), managers(eko), recipe.as_atlas, is_threshold=recipe.cliff ) op.compute() - return Operator(np.array([])) + + binfo = evolve_blowup_info(eko) + res, err = physical.PhysicalOperator.ad_to_evol_map( + op.op_members, op.nf, op.q2_to, **binfo + ).to_flavor_basis_tensor(qed=binfo["qed"]) + + return Operator(res, err) def matching_configs(eko: EKO) -> dict: @@ -126,7 +159,7 @@ def match(eko: EKO, recipe: Matching) -> Operator: op = ome.OperatorMatrixElement( matching_configs(eko), managers(eko), - recipe.hq, + recipe.hq - 1, recipe.scale, recipe.inverse, np.log(kthr), From 771c6ac7c64118ad47704af2447d5006f2cec5d3 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 2 May 2023 18:53:41 +0200 Subject: [PATCH 055/154] Implement matching operator blow-up --- src/eko/runner/parts.py | 64 +++++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 28 deletions(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index ce3dbe9cd..871000a63 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -14,6 +14,7 @@ from .. import EKO from .. import evolution_operator as evop +from ..evolution_operator import matching_condition from ..evolution_operator import operator_matrix_element as ome from ..evolution_operator import physical from ..io.items import Evolution, Matching, Operator @@ -38,6 +39,32 @@ def managers(eko: EKO) -> dict: ) +def blowup_info(eko: EKO) -> dict: + """Prepare common information to blow up to flavor basis. + + Note + ---- + ``intrinsic_range`` is a fully deprecated feature, here and anywhere else, + since a full range is already always used for backward evolution, and it is + not harmful to use it also for forward. + + Indeed, the only feature of non-intrinsic evolution is to absorb a + non-trivial boundary condition when an intrinsic PDF is defined. + But to achieve this, is sufficient to not specify any intrinsic boundary + condition at all, while if something is there, it is intuitive enough that + it will be consistently evolved. + + Moreover, since two different behavior are applied for the forward and + backward evolution, the intrinsic range is a "non-local" function, since it + does not depend only on the evolution segment, but also on the previous + evolution history (to determine if evolution is backward in flavor, + irrespectively of happening for an increasing or decreasing interval in + scale at fixed flavor). + + """ + return dict(intrinsic_range=[4, 5, 6], qed=eko.theory_card.order[1] > 0) + + def evolve_configs(eko: EKO) -> dict: """Create configs for :class:`Operator`. @@ -73,32 +100,6 @@ def evolve_configs(eko: EKO) -> dict: ) -def evolve_blowup_info(eko: EKO) -> dict: - """Prepare common information to blow up to flavor basis. - - Note - ---- - ``intrinsic_range`` is a fully deprecated feature, here and anywhere else, - since a full range is already always used for backward evolution, and it is - not harmful to use it also for forward. - - Indeed, the only feature of non-intrinsic evolution is to absorb a - non-trivial boundary condition when an intrinsic PDF is defined. - But to achieve this, is sufficient to not specify any intrinsic boundary - condition at all, while if something is there, it is intuitive enough that - it will be consistently evolved. - - Moreover, since two different behavior are applied for the forward and - backward evolution, the intrinsic range is a "non-local" function, since it - does not depend only on the evolution segment, but also on the previous - evolution history (to determine if evolution is backward in flavor, - irrespectively of happening for an increasing or decreasing interval in - scale at fixed flavor). - - """ - return dict(intrinsic_range=[4, 5, 6], qed=eko.theory_card.order[1] > 0) - - def evolve(eko: EKO, recipe: Evolution) -> Operator: """Compute evolution in isolation.""" op = evop.Operator( @@ -106,7 +107,7 @@ def evolve(eko: EKO, recipe: Evolution) -> Operator: ) op.compute() - binfo = evolve_blowup_info(eko) + binfo = blowup_info(eko) res, err = physical.PhysicalOperator.ad_to_evol_map( op.op_members, op.nf, op.q2_to, **binfo ).to_flavor_basis_tensor(qed=binfo["qed"]) @@ -166,4 +167,11 @@ def match(eko: EKO, recipe: Matching) -> Operator: eko.theory_card.heavy.masses_scheme is QuarkMassScheme.MSBAR, ) op.compute() - return Operator(np.array([])) + + binfo = blowup_info(eko) + nf_match = op.nf - 1 if recipe.inverse else op.nf + res, err = matching_condition.MatchingCondition.split_ad_to_evol_map( + op, nf_match, recipe.scale, **binfo + ).to_flavor_basis_tensor(qed=binfo["qed"]) + + return Operator(res, err) From 4caff5de52db62a87772fdf7b1b34f2b8001b64e Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Wed, 3 May 2023 11:41:14 +0200 Subject: [PATCH 056/154] Initialize inventories, improve their typing --- src/eko/io/inventory.py | 15 +++++++++------ src/eko/io/struct.py | 31 +++++++++++++++++++++---------- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 4fac46ea1..625345654 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -2,7 +2,7 @@ import base64 from dataclasses import asdict, dataclass, field from pathlib import Path -from typing import Dict, Optional, Type +from typing import Dict, Generic, Optional, Type, TypeVar import yaml @@ -41,8 +41,11 @@ def operator_name(header: Header, err: bool): return stem + OPERATOR_EXT[1 if err else 0] +H = TypeVar("H", bound=Header) + + @dataclass(frozen=True) -class Inventory: +class Inventory(Generic[H]): """Assets manager. In particular, manage autosave, autoload, and memory caching. @@ -51,7 +54,7 @@ class Inventory: access: AccessConfigs header_type: Type[Header] - cache: Dict[Header, Optional[Operator]] = field(default_factory=dict) + cache: Dict[H, Optional[Operator]] = field(default_factory=dict) contentless: bool = False name: Optional[str] = None """Only for logging purpose.""" @@ -78,7 +81,7 @@ def lookup(self, stem: str, header: bool = False) -> Path: return found[0] - def __getitem__(self, header: Header) -> Optional[Operator]: + def __getitem__(self, header: H) -> Optional[Operator]: r"""Retrieve operator for given header. If the operator is not already in memory, it will be automatically @@ -112,7 +115,7 @@ def __getitem__(self, header: Header) -> Optional[Operator]: self.cache[header] = op return op - def __setitem__(self, header: Header, operator: Optional[Operator]): + def __setitem__(self, header: H, operator: Optional[Operator]): """Set operator for given :math:`mu^2`. Header and operator are automatically dumped on disk. @@ -140,7 +143,7 @@ def __setitem__(self, header: Header, operator: Optional[Operator]): self.cache[header] = operator - def __delitem__(self, header: Header): + def __delitem__(self, header: H): """Drop operator from memory. Irrelevant for contentless inventories. diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 839fc89ab..c7af8a60b 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -18,7 +18,7 @@ from .access import AccessConfigs from .bases import Bases from .inventory import Inventory -from .items import Evolution, Operator, Recipe, Target +from .items import Evolution, Matching, Operator, Recipe, Target from .metadata import Metadata from .paths import InternalPaths from .runcards import OperatorCard, TheoryCard @@ -28,6 +28,19 @@ logger = logging.getLogger(__name__) +def inventories(access: AccessConfigs) -> dict: + """Set up empty inventories for object initialization.""" + return dict( + recipes=Inventory(access, Evolution, contentless=True, name="recipes"), + recipes_matching=Inventory( + access, Matching, contentless=True, name="matching-recipes" + ), + parts=Inventory(access, Evolution, name="parts"), + parts_matching=Inventory(access, Matching, name="matching-parts"), + operators=Inventory(access, Target, name="operators"), + ) + + @dataclass class EKO: """Operator interface. @@ -61,11 +74,11 @@ class EKO: """ - recipes: Inventory - recipes_matching: Inventory - parts: Inventory - parts_matching: Inventory - operators: Inventory + recipes: Inventory[Evolution] + recipes_matching: Inventory[Matching] + parts: Inventory[Evolution] + parts_matching: Inventory[Matching] + operators: Inventory[Target] # public containers # ----------------- @@ -350,9 +363,7 @@ def open(cls, path: os.PathLike, mode="r"): cls.load(path, tmpdir) metadata = Metadata.load(tmpdir) opened = cls( - _operators={ - target: None for target in InternalPaths(metadata.path).grid - }, + **inventories(access), metadata=metadata, access=access, ) @@ -534,7 +545,7 @@ def build(self) -> EKO: ) self.eko = EKO( - _operators={ep: None for ep in self.operator.evolgrid}, + **inventories(self.access), metadata=metadata, access=self.access, ) From 6166be7234166a1f61ec1ce6d070e3bf0462dd38 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Wed, 3 May 2023 11:43:52 +0200 Subject: [PATCH 057/154] Drop some type assertions As a consequence of improved inventories typing --- src/eko/io/struct.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index c7af8a60b..2d499f3d9 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -207,7 +207,6 @@ def __iter__(self): """Iterate over keys (i.e. evolution points).""" self.operators.sync() for target in self.operators: - assert isinstance(target, Target) yield target.ep def items(self): @@ -229,7 +228,6 @@ def items(self): """ for target in self.operators: # recast to evolution point - assert isinstance(target, Target) ep = target.ep # auto-load From 46faf95eceaf940906e5fcfe183190bb18c07ba9 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 3 May 2023 14:08:48 +0200 Subject: [PATCH 058/154] Fix msbar_masses --- src/eko/msbar_masses.py | 16 +++++++++++++--- tests/eko/test_msbar_masses.py | 6 +++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/src/eko/msbar_masses.py b/src/eko/msbar_masses.py index 1793a7620..3e24feebb 100644 --- a/src/eko/msbar_masses.py +++ b/src/eko/msbar_masses.py @@ -268,7 +268,16 @@ def rge(m2, q2m_ref, strong_coupling, xif2, nf_ref): return float(msbar_mass) -def evolve(m2_ref, q2m_ref, strong_coupling, xif2, q2_to, nf_ref=None, nf_to=None): +def evolve( + m2_ref, + q2m_ref, + strong_coupling, + thresholds_ratios, + xif2, + q2_to, + nf_ref=None, + nf_to=None, +): r"""Perform the |MSbar| mass evolution up to given scale. It allows for different number of active flavors. @@ -298,7 +307,7 @@ def evolve(m2_ref, q2m_ref, strong_coupling, xif2, q2_to, nf_ref=None, nf_to=Non """ matching_scales = np.array(strong_coupling.atlas.walls)[1:-1] * np.array( - strong_coupling.thresholds.thresholds_ratios + thresholds_ratios ) atlas = Atlas(matching_scales.tolist(), (q2m_ref, nf_ref)) path = atlas.path((q2_to, nf_to)) @@ -316,7 +325,7 @@ def evolve(m2_ref, q2m_ref, strong_coupling, xif2, q2_to, nf_ref=None, nf_to=Non ) # apply matching condition if k < len(path) - 1: - L = np.log(atlas.walls[seg.nf - shift + 1]) + L = np.log(thresholds_ratios[seg.nf - shift]) m_coeffs = ( compute_matching_coeffs_down(seg.nf - 1) if is_downward @@ -446,6 +455,7 @@ def sc(thr_masses): m2_ref, q2m_ref, sc(masses), + matching, xif2, q2_to, nf_ref=nf_ref_cur, diff --git a/tests/eko/test_msbar_masses.py b/tests/eko/test_msbar_masses.py index c35f4359c..b0c102ef8 100644 --- a/tests/eko/test_msbar_masses.py +++ b/tests/eko/test_msbar_masses.py @@ -64,11 +64,14 @@ def test_compute_msbar_mass(self, theory_card: TheoryCard): m2_ref, Q2m_ref, strong_coupling=strong_coupling, + thresholds_ratios=th.heavy.matching_ratios, xif2=th.xif**2, q2_to=m2_computed[nf - 3], ) ) - np.testing.assert_allclose(m2_computed, m2_test, rtol=6e-4) + np.testing.assert_allclose( + m2_computed, m2_test, rtol=6e-4, err_msg=f"{method=},{order=}" + ) def test_compute_msbar_mass_VFNS(self, theory_card: TheoryCard): # test the solution now with some initial contition @@ -107,6 +110,7 @@ def test_compute_msbar_mass_VFNS(self, theory_card: TheoryCard): m2_ref, Q2m_ref, strong_coupling=strong_coupling, + thresholds_ratios=th.heavy.matching_ratios, xif2=th.xif**2, q2_to=m2_computed[nf - 3], ) From abba5fc10daf5915c8191f561f4f6db776b3a7a7 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 3 May 2023 14:41:31 +0200 Subject: [PATCH 059/154] Start fixing grid --- src/eko/evolution_operator/grid.py | 3 +-- .../operator_matrix_element.py | 8 ++---- tests/eko/evolution_operator/test_grid.py | 27 +++++++------------ 3 files changed, 13 insertions(+), 25 deletions(-) diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index ca8a46d51..c90383295 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -123,8 +123,7 @@ def get_threshold_operators(self, path: List[Segment]) -> List[Operator]: ome = OperatorMatrixElement( self.config, self.managers, - seg.nf - shift + 3, - seg.target, + seg, is_downward, np.log(kthr), self.config["HQ"] == "MSBAR", diff --git a/src/eko/evolution_operator/operator_matrix_element.py b/src/eko/evolution_operator/operator_matrix_element.py index c073b13cb..876aa8050 100644 --- a/src/eko/evolution_operator/operator_matrix_element.py +++ b/src/eko/evolution_operator/operator_matrix_element.py @@ -185,12 +185,8 @@ class OperatorMatrixElement(Operator): configuration managers : dict managers - nf: int - number of active flavor below threshold - q2: float - matching scale - is_backward: bool - True for backward evolution + segment: Segment + path segment L: float :math:`\ln(\mu_F^2 / m_h^2)` is_msbar: bool diff --git a/tests/eko/evolution_operator/test_grid.py b/tests/eko/evolution_operator/test_grid.py index 88a1eea2a..804388db6 100644 --- a/tests/eko/evolution_operator/test_grid.py +++ b/tests/eko/evolution_operator/test_grid.py @@ -44,33 +44,26 @@ def test_compute_mu2grid(theory_ffns, operator_card, tmp_path): opgrid = legacy.Runner( theory_ffns(3), operator_card, path=tmp_path / "eko.tar" ).op_grid - # q2 has not be precomputed - but should work nevertheless - opgrid.compute(3) - # we can also pass a single number opg = opgrid.compute() assert len(opg) == len(mugrid) assert all(k in op for k in ["operator", "error"] for op in opg.values()) - opg = opgrid.compute(3) - assert len(opg) == 1 - assert all(k in op for k in ["operator", "error"] for op in opg.values()) def test_grid_computation_VFNS(theory_card, operator_card, tmp_path): """Checks that the grid can be computed""" + mugrid = [(3, 4), (5, 5), (5, 4)] + operator_card.mugrid = mugrid opgrid = legacy.Runner( theory_card, operator_card, path=tmp_path / "eko.tar" ).op_grid - qgrid_check = [3, 5, 200**2] - operators = opgrid.compute(qgrid_check) - assert len(operators) == len(qgrid_check) + operators = opgrid.compute() + assert len(operators) == len(mugrid) def test_mod_expanded(theory_card, theory_ffns, operator_card, tmp_path: pathlib.Path): + mugrid = [(3, 4)] + operator_card.mugrid = mugrid operator_card.configs.scvar_method = eko.io.types.ScaleVariationsMethod.EXPANDED - theory_update = { - "order": (1, 0), - "ModSV": "expanded", - } epsilon = 1e-1 path = tmp_path / "eko.tar" for is_ffns, nf0 in zip([False, True], [5, 3]): @@ -82,11 +75,11 @@ def test_mod_expanded(theory_card, theory_ffns, operator_card, tmp_path: pathlib theory.heavy.num_flavs_init = nf0 path.unlink(missing_ok=True) opgrid = legacy.Runner(theory, operator_card, path=path).op_grid - opg = opgrid.compute(3) - theory_update["XIF"] = 1.0 + epsilon + opg = opgrid.compute() + theory.xif = 1.0 + epsilon path.unlink(missing_ok=True) sv_opgrid = legacy.Runner(theory, operator_card, path=path).op_grid - sv_opg = sv_opgrid.compute(3) + sv_opg = sv_opgrid.compute() np.testing.assert_allclose( - opg[3]["operator"], sv_opg[3]["operator"], atol=2.5 * epsilon + opg[(9, 4)]["operator"], sv_opg[(9, 4)]["operator"], atol=2.5 * epsilon ) From c615a3e339893960085f601a4559c9e4ed2a8bc6 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 3 May 2023 14:54:17 +0200 Subject: [PATCH 060/154] Make test_ome running --- tests/eko/evolution_operator/test_ome.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/tests/eko/evolution_operator/test_ome.py b/tests/eko/evolution_operator/test_ome.py index 23ab65e9f..13ab4e13d 100644 --- a/tests/eko/evolution_operator/test_ome.py +++ b/tests/eko/evolution_operator/test_ome.py @@ -14,6 +14,7 @@ ) from eko.io.runcards import OperatorCard, TheoryCard from eko.io.types import InversionMethod +from eko.matchings import Segment from eko.runner import legacy from ekore.operator_matrix_elements.unpolarized.space_like import ( A_non_singlet, @@ -333,19 +334,19 @@ def test_quad_ker(monkeypatch): class TestOperatorMatrixElement: def test_labels(self, theory_ffns, operator_card, tmp_path: pathlib.Path): + theory_card = theory_ffns(3) path = tmp_path / "eko.tar" for skip_singlet in [True, False]: for skip_ns in [True, False]: operator_card.debug.skip_singlet = skip_singlet operator_card.debug.skip_non_singlet = skip_ns path.unlink(missing_ok=True) - g = legacy.Runner(theory_ffns(3), operator_card, path=path).op_grid + g = legacy.Runner(theory_card, operator_card, path=path).op_grid o = OperatorMatrixElement( g.config, g.managers, is_backward=True, - q2=None, - nf=None, + segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), L=None, is_msbar=False, ) @@ -387,8 +388,7 @@ def test_compute_n3lo(self, theory_ffns, operator_card, tmp_path): g.config, g.managers, is_backward=True, - q2=theory_card.heavy.masses.b.value**2, - nf=4, + segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), L=0, is_msbar=False, ) @@ -419,8 +419,7 @@ def test_compute_lo(self, theory_ffns, operator_card, tmp_path): g.config, g.managers, is_backward=False, - q2=theory_card.heavy.masses.b.value**2, - nf=4, + segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), L=0, is_msbar=False, ) @@ -471,8 +470,7 @@ def test_compute_nlo(self, theory_ffns, operator_card: OperatorCard, tmp_path): g.config, g.managers, is_backward=False, - q2=theory_card.heavy.masses.b.value**2, - nf=4, + segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), L=0, is_msbar=False, ) From 605a4729f51aa88feff2f876a7b5a4a89b977750 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 3 May 2023 17:43:31 +0200 Subject: [PATCH 061/154] Revert "Make test_ome running" This reverts commit 21334f705d8c8cec82b22e13f0437a10f43d0654. --- tests/eko/evolution_operator/test_ome.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/tests/eko/evolution_operator/test_ome.py b/tests/eko/evolution_operator/test_ome.py index 13ab4e13d..23ab65e9f 100644 --- a/tests/eko/evolution_operator/test_ome.py +++ b/tests/eko/evolution_operator/test_ome.py @@ -14,7 +14,6 @@ ) from eko.io.runcards import OperatorCard, TheoryCard from eko.io.types import InversionMethod -from eko.matchings import Segment from eko.runner import legacy from ekore.operator_matrix_elements.unpolarized.space_like import ( A_non_singlet, @@ -334,19 +333,19 @@ def test_quad_ker(monkeypatch): class TestOperatorMatrixElement: def test_labels(self, theory_ffns, operator_card, tmp_path: pathlib.Path): - theory_card = theory_ffns(3) path = tmp_path / "eko.tar" for skip_singlet in [True, False]: for skip_ns in [True, False]: operator_card.debug.skip_singlet = skip_singlet operator_card.debug.skip_non_singlet = skip_ns path.unlink(missing_ok=True) - g = legacy.Runner(theory_card, operator_card, path=path).op_grid + g = legacy.Runner(theory_ffns(3), operator_card, path=path).op_grid o = OperatorMatrixElement( g.config, g.managers, is_backward=True, - segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), + q2=None, + nf=None, L=None, is_msbar=False, ) @@ -388,7 +387,8 @@ def test_compute_n3lo(self, theory_ffns, operator_card, tmp_path): g.config, g.managers, is_backward=True, - segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), + q2=theory_card.heavy.masses.b.value**2, + nf=4, L=0, is_msbar=False, ) @@ -419,7 +419,8 @@ def test_compute_lo(self, theory_ffns, operator_card, tmp_path): g.config, g.managers, is_backward=False, - segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), + q2=theory_card.heavy.masses.b.value**2, + nf=4, L=0, is_msbar=False, ) @@ -470,7 +471,8 @@ def test_compute_nlo(self, theory_ffns, operator_card: OperatorCard, tmp_path): g.config, g.managers, is_backward=False, - segment=Segment(theory_card.heavy.masses.b.value**2, None, 4), + q2=theory_card.heavy.masses.b.value**2, + nf=4, L=0, is_msbar=False, ) From 66ba3753cb91b791c5dec68d98a7b4aab63aeca7 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 3 May 2023 17:52:17 +0200 Subject: [PATCH 062/154] Attempt to fix grid and ome --- src/eko/evolution_operator/grid.py | 3 ++- src/eko/evolution_operator/operator_matrix_element.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/eko/evolution_operator/grid.py b/src/eko/evolution_operator/grid.py index c90383295..ca8a46d51 100644 --- a/src/eko/evolution_operator/grid.py +++ b/src/eko/evolution_operator/grid.py @@ -123,7 +123,8 @@ def get_threshold_operators(self, path: List[Segment]) -> List[Operator]: ome = OperatorMatrixElement( self.config, self.managers, - seg, + seg.nf - shift + 3, + seg.target, is_downward, np.log(kthr), self.config["HQ"] == "MSBAR", diff --git a/src/eko/evolution_operator/operator_matrix_element.py b/src/eko/evolution_operator/operator_matrix_element.py index 876aa8050..65fa7ceb2 100644 --- a/src/eko/evolution_operator/operator_matrix_element.py +++ b/src/eko/evolution_operator/operator_matrix_element.py @@ -15,6 +15,7 @@ from .. import basis_rotation as br from .. import scale_variations as sv from ..io.types import InversionMethod +from ..matchings import Segment from . import Operator, QuadKerBase logger = logging.getLogger(__name__) @@ -211,7 +212,7 @@ class OperatorMatrixElement(Operator): full_labels_qed = copy.deepcopy(full_labels) def __init__(self, config, managers, nf, q2, is_backward, L, is_msbar): - super().__init__(config, managers, nf, q2, None) + super().__init__(config, managers, Segment(q2, q2, nf)) self.backward_method = config["backward_inversion"] if is_backward else None if is_backward: self.is_intrinsic = True From cf504830d60f5429f068f9f41af92c0f2fd0af08 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 3 May 2023 17:58:08 +0200 Subject: [PATCH 063/154] Fix eko/evop/test_init --- tests/eko/evolution_operator/test_init.py | 33 +++++++++-------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/tests/eko/evolution_operator/test_init.py b/tests/eko/evolution_operator/test_init.py index 0aab4f9fc..492b96429 100644 --- a/tests/eko/evolution_operator/test_init.py +++ b/tests/eko/evolution_operator/test_init.py @@ -14,6 +14,7 @@ from eko.kernels import non_singlet as ns from eko.kernels import non_singlet_qed as qed_ns from eko.kernels import singlet as s +from eko.matchings import Segment def test_quad_ker_errors(): @@ -214,9 +215,7 @@ def test_labels(self): ev_op_iterations=1, ), fake_managers, - 3, - 1, - 2, + Segment(1, 2, 3), ) assert sorted(o.labels) == sorted(br.full_labels) o = Operator( @@ -229,9 +228,7 @@ def test_labels(self): ev_op_iterations=1, ), fake_managers, - 3, - 1, - 2, + Segment(1, 2, 3), ) assert sorted(o.labels) == [] @@ -246,9 +243,7 @@ def test_labels_qed(self): ev_op_iterations=1, ), fake_managers, - 3, - 1, - 2, + Segment(1, 2, 3), ) assert sorted(o.labels) == sorted(br.full_unified_labels) o = Operator( @@ -261,9 +256,7 @@ def test_labels_qed(self): ev_op_iterations=1, ), fake_managers, - 3, - 1, - 2, + Segment(1, 2, 3), ) assert sorted(o.labels) == [] @@ -282,9 +275,7 @@ def test_n_pools(self): ev_op_iterations=1, ), fake_managers, - 3, - 1, - 10, + Segment(1, 10, 3), ) assert o.n_pools == os.cpu_count() - excluded_cores @@ -296,7 +287,7 @@ def test_exponentiated(self, theory_ffns, operator_card, tmp_path): r = eko.runner.legacy.Runner(tcard, ocard, path=tmp_path / "eko.tar") g = r.op_grid # setup objs - o = Operator(g.config, g.managers, 3, 2.0, 10.0) + o = Operator(g.config, g.managers, Segment(2.0, 10.0, 3)) o.compute() self.check_lo(o) @@ -307,7 +298,7 @@ def test_compute_parallel(self, monkeypatch, theory_ffns, operator_card, tmp_pat r = eko.runner.legacy.Runner(tcard, ocard, path=tmp_path / "eko.tar") g = r.op_grid # setup objs - o = Operator(g.config, g.managers, 3, 2.0, 10.0) + o = Operator(g.config, g.managers, Segment(2.0, 10.0, 3)) # fake quad monkeypatch.setattr( scipy.integrate, "quad", lambda *args, **kwargs: np.random.rand(2) @@ -337,7 +328,7 @@ def test_compute_no_skip_sv( r = eko.runner.legacy.Runner(tcard, ocard, path=tmp_path / "eko.tar") g = r.op_grid # setup objs - o = Operator(g.config, g.managers, 3, 2.0, 2.0) + o = Operator(g.config, g.managers, Segment(2.0, 2.0, 3)) # fake quad v = 0.1234 monkeypatch.setattr( @@ -357,7 +348,7 @@ def test_compute(self, monkeypatch, theory_ffns, operator_card, tmp_path): ocard: OperatorCard = operator_card r = eko.runner.legacy.Runner(tcard, ocard, path=tmp_path / "eko.tar") g = r.op_grid - o = Operator(g.config, g.managers, 3, 2.0, 10.0) + o = Operator(g.config, g.managers, Segment(2.0, 10.0, 3)) # fake quad monkeypatch.setattr( scipy.integrate, "quad", lambda *args, **kwargs: np.random.rand(2) @@ -379,7 +370,7 @@ def test_compute(self, monkeypatch, theory_ffns, operator_card, tmp_path): # unity operators for n in range(1, 3 + 1): - o1 = Operator(g.config, g.managers, 3, 2.0, 2.0) + o1 = Operator(g.config, g.managers, Segment(2.0, 2.0, 3)) o1.config["order"] = (n, 0) o1.compute() for k in br.non_singlet_labels: @@ -393,7 +384,7 @@ def test_compute(self, monkeypatch, theory_ffns, operator_card, tmp_path): for n in range(1, 3 + 1): for qed in range(1, 2 + 1): g.config["order"] = (n, qed) - o1 = Operator(g.config, g.managers, 3, 2.0, 2.0) + o1 = Operator(g.config, g.managers, Segment(2.0, 2.0, 3)) # o1.config["order"] = (n, qed) o1.compute() for k in br.non_singlet_unified_labels: From 85ed203642c3e40523890b941f28fea46d4f22eb Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Wed, 3 May 2023 18:11:21 +0200 Subject: [PATCH 064/154] Give inventories the access to the correct path --- src/eko/io/inventory.py | 9 +++++---- src/eko/io/struct.py | 16 ++++++++-------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 625345654..24a04d3dc 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -52,6 +52,7 @@ class Inventory(Generic[H]): """ + path: Path access: AccessConfigs header_type: Type[Header] cache: Dict[H, Optional[Operator]] = field(default_factory=dict) @@ -67,7 +68,7 @@ def lookup(self, stem: str, header: bool = False) -> Path: EXT = OPERATOR_EXT if not header else [HEADER_EXT] found = [ path - for path in self.access.path.iterdir() + for path in self.path.iterdir() if path.name.startswith(stem) if "".join(path.suffixes) in EXT ] @@ -124,7 +125,7 @@ def __setitem__(self, header: H, operator: Optional[Operator]): self.access.assert_writeable() # always save the header on disk - headpath = self.access.path / header_name(header) + headpath = self.path / header_name(header) headpath.write_text(yaml.dump(asdict(header)), encoding="utf-8") # in case of contentless, set empty cache and exit @@ -136,7 +137,7 @@ def __setitem__(self, header: H, operator: Optional[Operator]): assert operator is not None with_err = operator.error is not None - oppath = self.access.path / operator_name(header, err=with_err) + oppath = self.path / operator_name(header, err=with_err) with open(oppath, "wb") as fd: operator.save(fd) @@ -188,7 +189,7 @@ def sync(self): has been abused, nothing will be deleted nor unloaded. """ - for path in self.access.path.iterdir(): + for path in self.path.iterdir(): if path.suffix != HEADER_EXT: continue diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 2d499f3d9..b711dbd66 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -28,16 +28,16 @@ logger = logging.getLogger(__name__) -def inventories(access: AccessConfigs) -> dict: +def inventories(path: pathlib.Path, access: AccessConfigs) -> dict: """Set up empty inventories for object initialization.""" return dict( - recipes=Inventory(access, Evolution, contentless=True, name="recipes"), + recipes=Inventory(path, access, Evolution, contentless=True, name="recipes"), recipes_matching=Inventory( - access, Matching, contentless=True, name="matching-recipes" + path, access, Matching, contentless=True, name="matching-recipes" ), - parts=Inventory(access, Evolution, name="parts"), - parts_matching=Inventory(access, Matching, name="matching-parts"), - operators=Inventory(access, Target, name="operators"), + parts=Inventory(path, access, Evolution, name="parts"), + parts_matching=Inventory(path, access, Matching, name="matching-parts"), + operators=Inventory(path, access, Target, name="operators"), ) @@ -361,7 +361,7 @@ def open(cls, path: os.PathLike, mode="r"): cls.load(path, tmpdir) metadata = Metadata.load(tmpdir) opened = cls( - **inventories(access), + **inventories(tmpdir, access), metadata=metadata, access=access, ) @@ -543,7 +543,7 @@ def build(self) -> EKO: ) self.eko = EKO( - **inventories(self.access), + **inventories(self.path, self.access), metadata=metadata, access=self.access, ) From edf8d830d3fe31340a8a21cc30b056e1f90d2b0c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 08:59:31 +0200 Subject: [PATCH 065/154] Propagate correct internal paths to inventories --- src/eko/io/paths.py | 13 +++++++++++++ src/eko/io/struct.py | 19 ++++++++++++++----- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/src/eko/io/paths.py b/src/eko/io/paths.py index 44b9821a2..67100fdaa 100644 --- a/src/eko/io/paths.py +++ b/src/eko/io/paths.py @@ -9,6 +9,7 @@ METADATAFILE = "metadata.yaml" RECIPESDIR = "recipes" PARTSDIR = "parts" +MATCHINGDIR = "matching" OPERATORSDIR = "operators" @@ -39,11 +40,21 @@ def recipes(self): """Recipes folder.""" return self.root / RECIPESDIR + @property + def recipes_matching(self): + """Matching recipes folder.""" + return self.root / RECIPESDIR / MATCHINGDIR + @property def parts(self): """Parts folder.""" return self.root / PARTSDIR + @property + def parts_matching(self): + """Matching parts folder.""" + return self.root / PARTSDIR / MATCHINGDIR + @property def operators(self): """Operators folder. @@ -70,5 +81,7 @@ def bootstrap(self, theory: dict, operator: dict, metadata: dict): self.theory_card.write_text(yaml.dump(theory), encoding="utf-8") self.operator_card.write_text(yaml.dump(operator), encoding="utf-8") self.recipes.mkdir() + self.recipes_matching.mkdir() self.parts.mkdir() + self.parts_matching.mkdir() self.operators.mkdir() diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index b711dbd66..350bc451d 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -30,14 +30,23 @@ def inventories(path: pathlib.Path, access: AccessConfigs) -> dict: """Set up empty inventories for object initialization.""" + paths = InternalPaths(path) return dict( - recipes=Inventory(path, access, Evolution, contentless=True, name="recipes"), + recipes=Inventory( + paths.recipes, access, Evolution, contentless=True, name="recipes" + ), recipes_matching=Inventory( - path, access, Matching, contentless=True, name="matching-recipes" + paths.recipes_matching, + access, + Matching, + contentless=True, + name="matching-recipes", + ), + parts=Inventory(paths.parts, access, Evolution, name="parts"), + parts_matching=Inventory( + paths.parts_matching, access, Matching, name="matching-parts" ), - parts=Inventory(path, access, Evolution, name="parts"), - parts_matching=Inventory(path, access, Matching, name="matching-parts"), - operators=Inventory(path, access, Target, name="operators"), + operators=Inventory(paths.operators, access, Target, name="operators"), ) From af85f39a24bd30dede1cc542b908afb0bb734ee1 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 08:59:54 +0200 Subject: [PATCH 066/154] Update rotations name --- src/ekobox/apply.py | 15 ++++++--------- tests/eko/runner/test_legacy.py | 9 +++++---- tests/ekobox/test_apply.py | 8 ++++---- 3 files changed, 15 insertions(+), 17 deletions(-) diff --git a/src/ekobox/apply.py b/src/ekobox/apply.py index f115e8337..9c5caf89b 100644 --- a/src/ekobox/apply.py +++ b/src/ekobox/apply.py @@ -74,15 +74,12 @@ def apply_pdf_flavor( output PDFs and their associated errors for the computed mu2grid """ # create pdfs - pdfs = np.zeros((len(eko.rotations.inputpids), len(eko.rotations.inputgrid))) - for j, pid in enumerate(eko.rotations.inputpids): + pdfs = np.zeros((len(eko.bases.inputpids), len(eko.bases.inputgrid))) + for j, pid in enumerate(eko.bases.inputpids): if not lhapdf_like.hasFlavor(pid): continue pdfs[j] = np.array( - [ - lhapdf_like.xfxQ2(pid, x, eko.mu20) / x - for x in eko.rotations.inputgrid.raw - ] + [lhapdf_like.xfxQ2(pid, x, eko.mu20) / x for x in eko.bases.inputgrid.raw] ) # build output @@ -94,11 +91,11 @@ def apply_pdf_flavor( else: error_final = None out_grid[mu2] = { - "pdfs": dict(zip(eko.rotations.targetpids, pdf_final)), + "pdfs": dict(zip(eko.bases.targetpids, pdf_final)), "errors": None, } if error_final is not None: - out_grid[mu2]["errors"] = dict(zip(eko.rotations.targetpids, error_final)) + out_grid[mu2]["errors"] = dict(zip(eko.bases.targetpids, error_final)) # rotate to evolution basis if flavor_rotation is not None: @@ -120,7 +117,7 @@ def apply_pdf_flavor( # rotate/interpolate to target grid if targetgrid is not None: b = interpolation.InterpolatorDispatcher( - xgrid=eko.rotations.targetgrid, + xgrid=eko.bases.targetgrid, polynomial_degree=eko.operator_card.configs.interpolation_polynomial_degree, mode_N=False, ) diff --git a/tests/eko/runner/test_legacy.py b/tests/eko/runner/test_legacy.py index 6d4f0f88a..1d299a6f0 100644 --- a/tests/eko/runner/test_legacy.py +++ b/tests/eko/runner/test_legacy.py @@ -45,20 +45,21 @@ class FakeEM(enum.Enum): def check_shapes(o, txs, ixs, theory_card, operators_card): - tpids = len(o.rotations.targetpids) - ipids = len(o.rotations.inputpids) + tpids = len(o.bases.targetpids) + ipids = len(o.bases.inputpids) op_shape = (tpids, len(txs), ipids, len(ixs)) # check output = input np.testing.assert_allclose(o.xgrid.raw, operators_card.xgrid.raw) # targetgrid and inputgrid in the opcard are now ignored, we are testing this np.testing.assert_allclose( - o.rotations.targetgrid.raw, + o.bases.targetgrid.raw, txs.raw, ) - np.testing.assert_allclose(o.rotations.inputgrid.raw, ixs.raw) + np.testing.assert_allclose(o.bases.inputgrid.raw, ixs.raw) np.testing.assert_allclose(o.mu20, operators_card.mu20) # check available operators + ~o.operators assert len(o.mu2grid) == len(operators_card.mu2grid) assert list(o.mu2grid) == operators_card.mu2grid for _, ops in o.items(): diff --git a/tests/ekobox/test_apply.py b/tests/ekobox/test_apply.py index 8c5817f06..8fa23548f 100644 --- a/tests/ekobox/test_apply.py +++ b/tests/ekobox/test_apply.py @@ -12,13 +12,13 @@ def test_apply(self, eko_factory: EKOFactory, fake_pdf): pdf_grid = apply.apply_pdf(eko, fake_pdf) assert len(pdf_grid) == len(eko.evolgrid) pdfs = pdf_grid[ep_out]["pdfs"] - assert list(pdfs.keys()) == list(eko.rotations.targetpids) + assert list(pdfs.keys()) == list(eko.bases.targetpids) # rotate to target_grid target_grid = [0.75] pdf_grid = apply.apply_pdf(eko, fake_pdf, target_grid) assert len(pdf_grid) == 1 pdfs = pdf_grid[ep_out]["pdfs"] - assert list(pdfs.keys()) == list(eko.rotations.targetpids) + assert list(pdfs.keys()) == list(eko.bases.targetpids) def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): eko = eko_factory.get() @@ -29,10 +29,10 @@ def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): ) monkeypatch.setattr( "eko.basis_rotation.flavor_basis_pids", - eko.rotations.targetpids, + eko.bases.targetpids, ) fake_evol_basis = tuple( - ["a", "b"] + [str(x) for x in range(len(eko.rotations.pids) - 2)] + ["a", "b"] + [str(x) for x in range(len(eko.bases.pids) - 2)] ) monkeypatch.setattr("eko.basis_rotation.evol_basis", fake_evol_basis) pdf_grid = apply.apply_pdf(eko, fake_pdf, rotate_to_evolution_basis=True) From 29555c382d67dd79ca446f68272c2797283d61ed Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 11:23:19 +0200 Subject: [PATCH 067/154] Fix test eko/io/bases --- tests/eko/io/test_bases.py | 47 +++++++++++++++++++++++++++++++++++++ tests/eko/io/test_struct.py | 42 --------------------------------- 2 files changed, 47 insertions(+), 42 deletions(-) create mode 100644 tests/eko/io/test_bases.py diff --git a/tests/eko/io/test_bases.py b/tests/eko/io/test_bases.py new file mode 100644 index 000000000..0794358ba --- /dev/null +++ b/tests/eko/io/test_bases.py @@ -0,0 +1,47 @@ +import numpy as np + +from eko import basis_rotation as br +from eko import interpolation +from eko.io.bases import Bases + + +class TestBases: + def test_fallback(self): + xg = interpolation.XGrid([0.1, 1.0]) + r = Bases(xgrid=xg) + np.testing.assert_allclose(r.targetpids, r.pids) + np.testing.assert_allclose(r.inputpids, r.pids) + assert r.xgrid == xg + assert r.targetgrid == xg + assert r.inputgrid == xg + + def test_overwrite(self): + tpids = np.array([3, 4] + list(br.flavor_basis_pids[2:])) + ipids = np.array([5, 6] + list(br.flavor_basis_pids[2:])) + xg = interpolation.XGrid([0.1, 1.0]) + txg = interpolation.XGrid([0.2, 1.0]) + ixg = interpolation.XGrid([0.3, 1.0]) + r = Bases( + xgrid=xg, + _targetgrid=txg, + _inputgrid=ixg, + _targetpids=tpids, + _inputpids=ipids, + ) + np.testing.assert_allclose(r.targetpids, tpids) + np.testing.assert_allclose(r.inputpids, ipids) + assert r.xgrid == xg + assert r.targetgrid == txg + assert r.inputgrid == ixg + + def test_init(self): + xg = interpolation.XGrid([0.1, 1.0]) + txg = np.array([0.2, 1.0]) + ixg = {"grid": [0.3, 1.0], "log": True} + r = Bases(xgrid=xg, _targetgrid=txg, _inputgrid=ixg) + assert isinstance(r.xgrid, interpolation.XGrid) + assert isinstance(r.targetgrid, interpolation.XGrid) + assert isinstance(r.inputgrid, interpolation.XGrid) + assert r.xgrid == xg + assert r.targetgrid == interpolation.XGrid(txg) + assert r.inputgrid == interpolation.XGrid.load(ixg) diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index e8c97f67d..36946e530 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -49,48 +49,6 @@ def test_load_error(self, monkeypatch): struct.Operator.load(stream, False) -class TestRotations: - def test_fallback(self): - xg = interpolation.XGrid([0.1, 1.0]) - r = struct.Rotations(xgrid=xg) - np.testing.assert_allclose(r.targetpids, r.pids) - np.testing.assert_allclose(r.inputpids, r.pids) - assert r.xgrid == xg - assert r.targetgrid == xg - assert r.inputgrid == xg - - def test_overwrite(self): - tpids = np.array([3, 4] + list(br.flavor_basis_pids[2:])) - ipids = np.array([5, 6] + list(br.flavor_basis_pids[2:])) - xg = interpolation.XGrid([0.1, 1.0]) - txg = interpolation.XGrid([0.2, 1.0]) - ixg = interpolation.XGrid([0.3, 1.0]) - r = struct.Rotations( - xgrid=xg, - _targetgrid=txg, - _inputgrid=ixg, - _targetpids=tpids, - _inputpids=ipids, - ) - np.testing.assert_allclose(r.targetpids, tpids) - np.testing.assert_allclose(r.inputpids, ipids) - assert r.xgrid == xg - assert r.targetgrid == txg - assert r.inputgrid == ixg - - def test_init(self): - xg = interpolation.XGrid([0.1, 1.0]) - txg = np.array([0.2, 1.0]) - ixg = {"grid": [0.3, 1.0], "log": True} - r = struct.Rotations(xgrid=xg, _targetgrid=txg, _inputgrid=ixg) - assert isinstance(r.xgrid, interpolation.XGrid) - assert isinstance(r.targetgrid, interpolation.XGrid) - assert isinstance(r.inputgrid, interpolation.XGrid) - assert r.xgrid == xg - assert r.targetgrid == interpolation.XGrid(txg) - assert r.inputgrid == interpolation.XGrid.load(ixg) - - class TestEKO: def test_new_error(self, tmp_path: pathlib.Path): # try to write to a file different from bla From 4d35418118d91dbb079b66f9425508cf80c596dc Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 10:52:17 +0200 Subject: [PATCH 068/154] Add prefix to tempdir created --- src/eko/io/struct.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 350bc451d..9c3a64a4c 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -27,6 +27,8 @@ logger = logging.getLogger(__name__) +TEMP_PREFIX = "eko-" + def inventories(path: pathlib.Path, access: AccessConfigs) -> dict: """Set up empty inventories for object initialization.""" @@ -324,7 +326,7 @@ def deepcopy(self, path: os.PathLike): new.access.readonly = False new.access.open = True - tmpdir = pathlib.Path(tempfile.mkdtemp()) + tmpdir = pathlib.Path(tempfile.mkdtemp(prefix=TEMP_PREFIX)) new.metadata.path = tmpdir # copy old dir to new dir tmpdir.rmdir() @@ -365,7 +367,7 @@ def open(cls, path: os.PathLike, mode="r"): else: raise ValueError - tmpdir = pathlib.Path(tempfile.mkdtemp()) + tmpdir = pathlib.Path(tempfile.mkdtemp(prefix=TEMP_PREFIX)) if load: cls.load(path, tmpdir) metadata = Metadata.load(tmpdir) From b2fd2c429efac33ac87fbfde100c83f14440398d Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 11:35:16 +0200 Subject: [PATCH 069/154] Sync before items, for user convenience --- src/eko/io/struct.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 9c3a64a4c..56a31c1e8 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -237,6 +237,7 @@ def items(self): immediately after """ + self.operators.sync() for target in self.operators: # recast to evolution point ep = target.ep From d58129dab0bb21e1dcf600c29b3f064ed01b8eac Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 11:36:10 +0200 Subject: [PATCH 070/154] Handle the new evolgrid in ekobox --- src/ekobox/apply.py | 2 +- src/ekobox/evol_pdf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ekobox/apply.py b/src/ekobox/apply.py index 9c5caf89b..edfdc422d 100644 --- a/src/ekobox/apply.py +++ b/src/ekobox/apply.py @@ -84,7 +84,7 @@ def apply_pdf_flavor( # build output out_grid = {} - for mu2, elem in eko.items(): + for (mu2, nf), elem in eko.items(): pdf_final = np.einsum(CONTRACTION, elem.operator, pdfs, optimize="optimal") if elem.error is not None: error_final = np.einsum(CONTRACTION, elem.error, pdfs, optimize="optimal") diff --git a/src/ekobox/evol_pdf.py b/src/ekobox/evol_pdf.py index 3be7201de..b6dbb5def 100644 --- a/src/ekobox/evol_pdf.py +++ b/src/ekobox/evol_pdf.py @@ -84,7 +84,7 @@ def evolve_pdfs( lambda pid, x, Q2, evolved_PDF=evolved_PDF: targetlist[targetlist.index(x)] * evolved_PDF[Q2]["pdfs"][pid][targetlist.index(x)], xgrid=targetlist, - mu2grid=operators_card.mu2grid, + evolgrid=operators_card.evolgrid, pids=np.array(br.flavor_basis_pids), ) # all_blocks will be useful in case there will be necessity to dump many blocks From b1d0329753430f530986366baa7cd19a7c8caa58 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 11:36:31 +0200 Subject: [PATCH 071/154] Simplify genpdf, make it evolgrid aware --- src/ekobox/genpdf/__init__.py | 32 +++++++++----------------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/src/ekobox/genpdf/__init__.py b/src/ekobox/genpdf/__init__.py index a5ed41865..858da36b8 100644 --- a/src/ekobox/genpdf/__init__.py +++ b/src/ekobox/genpdf/__init__.py @@ -3,10 +3,12 @@ import logging import pathlib import shutil +from typing import Callable, List import numpy as np from eko import basis_rotation as br +from eko.io.types import EvolutionPoint as EPoint from . import export, flavors, load @@ -236,30 +238,14 @@ def install_pdf(name): shutil.move(str(src), str(target)) -def generate_block(xfxQ2, xgrid, mu2grid, pids): - """Generate an LHAPDF data block from a callable. - - Parameters - ---------- - xfxQ2 : callable - LHAPDF like callable - mu2grid : list(float) - Q grid - pids : list(int) - Flavors list - xgrid : list(float) - x grid - - Returns - ------- - dict - PDF block - - """ - block = dict(mu2grid=mu2grid, pids=pids, xgrid=xgrid) +def generate_block( + xfxQ2: Callable, xgrid: List[float], evolgrid: List[EPoint], pids: List[int] +) -> dict: + """Generate an LHAPDF data block from a callable.""" + block: dict = dict(mu2grid=[mu2 for mu2, _ in evolgrid], pids=pids, xgrid=xgrid) data = [] for x in xgrid: - for Q2 in mu2grid: - data.append(np.array([xfxQ2(pid, x, Q2) for pid in pids])) + for mu2, _ in evolgrid: + data.append(np.array([xfxQ2(pid, x, mu2) for pid in pids])) block["data"] = np.array(data) return block From 5515530deade47bc200e1ea4475ce50596f2227e Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 11:41:11 +0200 Subject: [PATCH 072/154] Fix test apply --- tests/ekobox/test_apply.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/ekobox/test_apply.py b/tests/ekobox/test_apply.py index 8fa23548f..70e4b1435 100644 --- a/tests/ekobox/test_apply.py +++ b/tests/ekobox/test_apply.py @@ -11,13 +11,13 @@ def test_apply(self, eko_factory: EKOFactory, fake_pdf): # fake pdfs pdf_grid = apply.apply_pdf(eko, fake_pdf) assert len(pdf_grid) == len(eko.evolgrid) - pdfs = pdf_grid[ep_out]["pdfs"] + pdfs = pdf_grid[ep_out[0]]["pdfs"] assert list(pdfs.keys()) == list(eko.bases.targetpids) # rotate to target_grid target_grid = [0.75] pdf_grid = apply.apply_pdf(eko, fake_pdf, target_grid) assert len(pdf_grid) == 1 - pdfs = pdf_grid[ep_out]["pdfs"] + pdfs = pdf_grid[ep_out[0]]["pdfs"] assert list(pdfs.keys()) == list(eko.bases.targetpids) def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): @@ -37,5 +37,5 @@ def test_apply_flavor(self, eko_factory: EKOFactory, fake_pdf, monkeypatch): monkeypatch.setattr("eko.basis_rotation.evol_basis", fake_evol_basis) pdf_grid = apply.apply_pdf(eko, fake_pdf, rotate_to_evolution_basis=True) assert len(pdf_grid) == len(eko.evolgrid) - pdfs = pdf_grid[ep_out]["pdfs"] + pdfs = pdf_grid[ep_out[0]]["pdfs"] assert list(pdfs.keys()) == list(fake_evol_basis) From 5f1e8e1281041fcc83b4de55638ae4dc3f33d788 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 11:44:17 +0200 Subject: [PATCH 073/154] Fix test eko/io/items --- src/eko/io/items.py | 2 ++ tests/eko/io/test_items.py | 38 +++++++++++++++++++++++++++++++++++++ tests/eko/io/test_struct.py | 37 ------------------------------------ 3 files changed, 40 insertions(+), 37 deletions(-) create mode 100644 tests/eko/io/test_items.py diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 4eacff548..4b6e4be64 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -186,6 +186,8 @@ def load(cls, stream: BinaryIO): op = content["operator"] err = content["error"] else: + # TODO: We might consider dropping this exception since np.load will always + # return a array (or fail on it's own) raise exceptions.OperatorLoadingError( "Not possible to load operator, content format not recognized" ) diff --git a/tests/eko/io/test_items.py b/tests/eko/io/test_items.py new file mode 100644 index 000000000..d2bba3919 --- /dev/null +++ b/tests/eko/io/test_items.py @@ -0,0 +1,38 @@ +import io + +import lz4.frame +import numpy as np +import pytest + +from eko.io import struct + + +class TestOperator: + def test_value_only(self): + v = np.random.rand(2, 2) + opv = struct.Operator(operator=v) + assert opv.error is None + stream = io.BytesIO() + opv.save(stream) + stream.seek(0) + opv_ = struct.Operator.load(stream) + np.testing.assert_allclose(opv.operator, opv_.operator) + np.testing.assert_allclose(v, opv_.operator) + assert opv_.error is None + + def test_value_and_error(self): + v, e = np.random.rand(2, 2, 2) + opve = struct.Operator(operator=v, error=e) + stream = io.BytesIO() + opve.save(stream) + stream.seek(0) + opve_ = struct.Operator.load(stream) + np.testing.assert_allclose(opve.operator, opve_.operator) + np.testing.assert_allclose(v, opve_.operator) + np.testing.assert_allclose(opve.error, opve_.error) + np.testing.assert_allclose(e, opve_.error) + + def test_load_error_is_not_lz4(self, monkeypatch): + stream = io.BytesIO() + with pytest.raises(RuntimeError, match="LZ4"): + struct.Operator.load(stream) diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index 36946e530..1e9c6e868 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -1,4 +1,3 @@ -import io import pathlib import tarfile @@ -13,42 +12,6 @@ from tests.conftest import EKOFactory -class TestOperator: - def test_value_only(self): - v = np.random.rand(2, 2) - opv = struct.Operator(operator=v) - assert opv.error is None - for compress in (True, False): - stream = io.BytesIO() - opv.save(stream, compress) - stream.seek(0) - opv_ = struct.Operator.load(stream, compress) - np.testing.assert_allclose(opv.operator, opv_.operator) - np.testing.assert_allclose(v, opv_.operator) - assert opv_.error is None - - def test_value_and_error(self): - v, e = np.random.rand(2, 2, 2) - opve = struct.Operator(operator=v, error=e) - for compress in (True, False): - stream = io.BytesIO() - opve.save(stream, compress) - stream.seek(0) - opve_ = struct.Operator.load(stream, compress) - np.testing.assert_allclose(opve.operator, opve_.operator) - np.testing.assert_allclose(v, opve_.operator) - np.testing.assert_allclose(opve.error, opve_.error) - np.testing.assert_allclose(e, opve_.error) - - def test_load_error(self, monkeypatch): - # We might consider dropping this exception since np.load will always - # return a array (or fail on it's own) - stream = io.BytesIO() - monkeypatch.setattr(np, "load", lambda _: None) - with pytest.raises(ValueError): - struct.Operator.load(stream, False) - - class TestEKO: def test_new_error(self, tmp_path: pathlib.Path): # try to write to a file different from bla From 4142d09e89781b0bee3130ed8b4a9094ec82502c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 11:53:49 +0200 Subject: [PATCH 074/154] Fix genpdf tests --- src/ekobox/genpdf/__init__.py | 100 +++++++++++++--------------------- tests/ekobox/test_genpdf.py | 20 ++++--- 2 files changed, 52 insertions(+), 68 deletions(-) diff --git a/src/ekobox/genpdf/__init__.py b/src/ekobox/genpdf/__init__.py index 858da36b8..5df318de3 100644 --- a/src/ekobox/genpdf/__init__.py +++ b/src/ekobox/genpdf/__init__.py @@ -3,7 +3,7 @@ import logging import pathlib import shutil -from typing import Callable, List +from typing import Callable, List, Optional, Union import numpy as np @@ -15,24 +15,18 @@ logger = logging.getLogger(__name__) -def take_data(parent_pdf_set=None, members=False, xgrid=None, mu2grid=None): +def take_data( + parent_pdf_set: Optional[Union[str, dict]] = None, + members: bool = False, + xgrid: Optional[List[float]] = None, + evolgrid: Optional[List[EPoint]] = None, +): """ Auxiliary function for `generate_pdf`. It provides the info, the heads of the member files and the blocks to be generated to `generate_pdf`. - Parameters - ---------- - parent_pdf_set : None or str or dict - the PDF set to be used as parent set - members : bool - if true every member of the parent is loaded - xgrid : list(float) - produced x grid if given - mu2grid : list(float) - produced Q2 grid if given - Returns ------- info : dict @@ -44,8 +38,8 @@ def take_data(parent_pdf_set=None, members=False, xgrid=None, mu2grid=None): """ if xgrid is None: xgrid = np.geomspace(1e-9, 1, 240) - if mu2grid is None: - mu2grid = np.geomspace(1.3, 1e5, 35) + if evolgrid is None: + evolgrid = [(mu2, 0) for mu2 in np.geomspace(1.3, 1e5, 35)] # collect blocks all_blocks = [] info = None @@ -65,7 +59,7 @@ def take_data(parent_pdf_set=None, members=False, xgrid=None, mu2grid=None): else: toylh = toy.mkPDF("", 0) all_blocks.append( - [generate_block(toylh.xfxQ2, xgrid, mu2grid, br.flavor_basis_pids)] + [generate_block(toylh.xfxQ2, xgrid, evolgrid, br.flavor_basis_pids)] ) else: @@ -86,7 +80,7 @@ def take_data(parent_pdf_set=None, members=False, xgrid=None, mu2grid=None): if pid not in parent_pdf_set else parent_pdf_set[pid](x, Q2), xgrid, - mu2grid, + evolgrid, br.flavor_basis_pids, ) ] @@ -97,14 +91,14 @@ def take_data(parent_pdf_set=None, members=False, xgrid=None, mu2grid=None): def generate_pdf( - name, - labels, + name: str, + labels: List[int], parent_pdf_set=None, members=False, info_update=None, - install=False, - xgrid=None, - mu2grid=None, + install: bool = False, + xgrid: Optional[List[float]] = None, + evolgrid: Optional[List[EPoint]] = None, ): """ Generate a new PDF from a parent PDF with a set of flavors. @@ -135,46 +129,30 @@ def generate_pdf( Turning True the value of the `install` flag, it is possible to automatically install the generated PDF to the lhapdf directory. By default install is False. - Parameters - ---------- - name : str - target name - labels : - list of flavors - parent_pdf_set : - parent PDF - all : bool - iterate on members - install : bool - install on LHAPDF path - xgrid : list(float) - produced x grid if given - mu2grid : list(float) - produced Q2 grid if given - Examples -------- - To generate a PDF with a fixed function `f(x,Q2)` for some flavors - you can use the following snippet: - - >>> # f = lambda x,Q2 ... put the desired function here - >>> # mask = [list of active PIDs] - >>> generate_pdf(name, labels, parent_pdf_set={pid: f for pid in mask}) - - The |API| also provides the possibility to extract arbitrary flavor combinations: - using the debug PDF settings we can construct a "anti-QED-singlet" combination that - is usefull in debugging DIS codes since it does not couple in |LO|, but only - through the pure-singlet contributions (starting at |NNLO|) - - >>> from eko import basis_rotation as br - >>> from ekobox import genpdf - >>> import numpy as np - >>> anti_qed_singlet = np.zeros_like(br.flavor_basis_pids, dtype=np.float_) - >>> anti_qed_singlet[br.flavor_basis_pids.index(1)] = -4 - >>> anti_qed_singlet[br.flavor_basis_pids.index(-1)] = -4 - >>> anti_qed_singlet[br.flavor_basis_pids.index(2)] = 1 - >>> anti_qed_singlet[br.flavor_basis_pids.index(-2)] = 1 - >>> genpdf.generate_pdf("anti_qed_singlet", [anti_qed_singlet]) + To generate a PDF with a fixed function `f(x,Q2)` for some flavors + you can use the following snippet: + + >>> # f = lambda x,Q2 ... put the desired function here + >>> # mask = [list of active PIDs] + >>> generate_pdf(name, labels, parent_pdf_set={pid: f for pid in mask}) + + The |API| also provides the possibility to extract arbitrary flavor combinations: + using the debug PDF settings we can construct a "anti-QED-singlet" combination that + is usefull in debugging DIS codes since it does not couple in |LO|, but only + through the pure-singlet contributions (starting at |NNLO|) + + >>> from eko import basis_rotation as br + >>> from ekobox import genpdf + >>> import numpy as np + >>> anti_qed_singlet = np.zeros_like(br.flavor_basis_pids, dtype=np.float_) + >>> anti_qed_singlet[br.flavor_basis_pids.index(1)] = -4 + >>> anti_qed_singlet[br.flavor_basis_pids.index(-1)] = -4 + >>> anti_qed_singlet[br.flavor_basis_pids.index(2)] = 1 + >>> anti_qed_singlet[br.flavor_basis_pids.index(-2)] = 1 + >>> genpdf.generate_pdf("anti_qed_singlet", [anti_qed_singlet]) + """ pathlib.Path(name).mkdir(exist_ok=True) # Checking label basis @@ -189,7 +167,7 @@ def generate_pdf( # labels = verify_labels(args.labels) info, heads, all_blocks = take_data( - parent_pdf_set, members, xgrid=xgrid, mu2grid=mu2grid + parent_pdf_set, members, xgrid=xgrid, evolgrid=evolgrid ) # filter the PDF diff --git a/tests/ekobox/test_genpdf.py b/tests/ekobox/test_genpdf.py index d14c5dfe6..0565db701 100644 --- a/tests/ekobox/test_genpdf.py +++ b/tests/ekobox/test_genpdf.py @@ -2,8 +2,11 @@ import pytest from eko import basis_rotation as br +from eko.io.runcards import flavored_mugrid from ekobox import genpdf +MASSES = [1.5, 4.0, 170.0] + def test_genpdf_exceptions(tmp_path, cd): with cd(tmp_path): @@ -33,13 +36,14 @@ def test_genpdf_exceptions(tmp_path, cd): def test_generate_block(): xg = np.linspace(0.0, 1.0, 5) - q2s = np.geomspace(1.0, 1e3, 5) + mu2s = np.geomspace(1.0, 1e3, 5) + evolgrid = flavored_mugrid(mu2s.tolist(), MASSES, [1.0, 1.0, 1.0]) pids = np.arange(3) - b = genpdf.generate_block(lambda pid, x, q2: pid * x * q2, xg, q2s, pids) + b = genpdf.generate_block(lambda pid, x, q2: pid * x * q2, xg, evolgrid, pids) assert isinstance(b, dict) assert sorted(b.keys()) == sorted(["data", "mu2grid", "xgrid", "pids"]) assert isinstance(b["data"], np.ndarray) - assert b["data"].shape == (len(xg) * len(q2s), len(pids)) + assert b["data"].shape == (len(xg) * len(mu2s), len(pids)) def test_install_pdf(fake_lhapdf, cd): @@ -68,7 +72,8 @@ def test_generate_pdf_debug_pid(fake_lhapdf, cd): mytmp.mkdir() n = "test_generate_pdf_debug_pid" xg = np.linspace(0.0, 1.0, 5) - q2s = np.geomspace(1.0, 1e3, 7) + mu2s = np.geomspace(1.0, 1e3, 7) + evolgrid = flavored_mugrid(mu2s.tolist(), MASSES, [1.0, 1.0, 1.0]) p = mytmp / n i = f"{n}.info" with cd(mytmp): @@ -79,7 +84,7 @@ def test_generate_pdf_debug_pid(fake_lhapdf, cd): info_update={"Debug": "debug"}, install=True, xgrid=xg, - mu2grid=q2s, + evolgrid=evolgrid, ) pp = fake_lhapdf / n assert not p.exists() @@ -164,7 +169,8 @@ def test_generate_pdf_toy_antiqed(fake_lhapdf, cd): # iterate pdfs with their error type and number of blocks n = "test_generate_pdf_toy_antiqed" xg = np.linspace(1e-5, 1.0, 5) - q2s = np.geomspace(1.0, 1e3, 7) + mu2s = np.geomspace(1.0, 1e3, 7) + evolgrid = flavored_mugrid(mu2s.tolist(), MASSES, [1.0, 1.0, 1.0]) anti_qed_singlet = np.zeros_like(br.flavor_basis_pids, dtype=np.float_) anti_qed_singlet[br.flavor_basis_pids.index(1)] = -4 anti_qed_singlet[br.flavor_basis_pids.index(-1)] = -4 @@ -179,7 +185,7 @@ def test_generate_pdf_toy_antiqed(fake_lhapdf, cd): [anti_qed_singlet], "toy", xgrid=xg, - mu2grid=q2s, + evolgrid=evolgrid, ) assert p.exists() # check info file From d8f67ce96b805ffbf655702d19133aa8688e06b8 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 11:56:33 +0200 Subject: [PATCH 075/154] Step towards fixing tests for eko/io/struct.EKO --- tests/eko/io/test_struct.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index 1e9c6e868..5a3bb069c 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -1,3 +1,4 @@ +import io import pathlib import tarfile @@ -124,8 +125,6 @@ def test_copy(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): eko2_ = struct.EKO.read(p) assert eko2.raw == eko2_.raw - -class TestLegacy: def test_items(self, eko_factory: EKOFactory): """Test autodump, autoload, and manual unload.""" eko = eko_factory.get() @@ -137,14 +136,14 @@ def test_items(self, eko_factory: EKOFactory): ep = next(iter(eko)) # unload - eko._operators[ep] = None + eko.operators[ep] = None # test autoloading assert isinstance(eko[ep], struct.Operator) - assert isinstance(eko._operators[ep], struct.Operator) + assert isinstance(eko.operators[ep], struct.Operator) del eko[ep] - assert eko._operators[ep] is None + assert eko.operators[ep] is None def test_iter(self, eko_factory: EKOFactory): """Test managed iteration.""" @@ -154,7 +153,7 @@ def test_iter(self, eko_factory: EKOFactory): epprev = None for ep, op in eko.items(): if epprev is not None: - assert eko._operators[epprev] is None + assert eko.operators[epprev] is None assert isinstance(op, struct.Operator) epprev = ep @@ -166,4 +165,4 @@ def test_context_operator(self, eko_factory: EKOFactory): with eko.operator(ep) as op: assert isinstance(op, struct.Operator) - assert eko._operators[ep] is None + assert eko.operators[ep] is None From d135f71ea65406f82a1695375167e3233200f8d5 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 12:02:30 +0200 Subject: [PATCH 076/154] Fix bytes encoding for negative hashes --- src/eko/io/inventory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 24a04d3dc..433736039 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -10,7 +10,7 @@ from .items import Header, Operator NBYTES = 8 -ENDIANNES = "little" +ENDIANNESS = "little" HEADER_EXT = ".yaml" ARRAY_EXT = [".npy", ".npz"] @@ -25,7 +25,7 @@ class LookupError(ValueError): def encode(header: Header): """Extract an hash from a header.""" return base64.urlsafe_b64encode( - hash(header).to_bytes(NBYTES, byteorder=ENDIANNES) + abs(hash(header)).to_bytes(NBYTES, byteorder=ENDIANNESS) ).decode(encoding="utf-8") From f8c3ae794b14220a93fa676a8b980d4aa7c85742 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 12:19:51 +0200 Subject: [PATCH 077/154] Fix manipulate and tests --- src/eko/io/manipulate.py | 38 +++--- tests/eko/io/test_init.py | 226 +------------------------------ tests/eko/io/test_manipulate.py | 228 ++++++++++++++++++++++++++++++++ 3 files changed, 248 insertions(+), 244 deletions(-) create mode 100644 tests/eko/io/test_manipulate.py diff --git a/src/eko/io/manipulate.py b/src/eko/io/manipulate.py index f06e90e32..542e04705 100644 --- a/src/eko/io/manipulate.py +++ b/src/eko/io/manipulate.py @@ -9,7 +9,7 @@ from .. import basis_rotation as br from .. import interpolation from ..interpolation import XGrid -from .struct import EKO +from .struct import EKO, Operator logger = logging.getLogger(__name__) @@ -85,13 +85,13 @@ def xgrid_reshape( # construct matrices newtarget, targetrot = rotation( targetgrid, - eko.rotations.targetgrid, + eko.bases.targetgrid, check, lambda new, old: crot(new, old, interpdeg), ) newinput, inputrot = rotation( inputgrid, - eko.rotations.inputgrid, + eko.bases.inputgrid, check, lambda new, old: crot(new, old, interpdeg, swap=True), ) @@ -102,9 +102,9 @@ def xgrid_reshape( return # if no rotation is done, the grids are not modified if targetrot is not None: - eko.rotations.targetgrid = newtarget + eko.bases.targetgrid = newtarget if inputrot is not None: - eko.rotations.inputgrid = newinput + eko.bases.inputgrid = newinput # build new grid for ep, elem in eko.items(): @@ -127,11 +127,13 @@ def xgrid_reshape( operands.append(inputrot) operands_errs.append(inputrot) - elem.operator = np.einsum(contraction, *operands, optimize="optimal") + new_operator = np.einsum(contraction, *operands, optimize="optimal") if elem.error is not None: - elem.error = np.einsum(contraction, *operands_errs, optimize="optimal") + new_error = np.einsum(contraction, *operands_errs, optimize="optimal") + else: + new_error = None - eko[ep] = elem + eko[ep] = Operator(operator=new_operator, error=new_error) eko.update() @@ -171,12 +173,12 @@ def flavor_reshape( raise ValueError("Nor inputpids nor targetpids was given") # now check to the current status if targetpids is not None and np.allclose( - targetpids, np.eye(len(eko.rotations.targetpids)) + targetpids, np.eye(len(eko.bases.targetpids)) ): targetpids = None warnings.warn("The new targetpids is close to current basis") if inputpids is not None and np.allclose( - inputpids, np.eye(len(eko.rotations.inputpids)) + inputpids, np.eye(len(eko.bases.inputpids)) ): inputpids = None warnings.warn("The new inputpids is close to current basis") @@ -222,17 +224,15 @@ def flavor_reshape( if errs is not None else None ) - elem.operator = ops - elem.error = errs - eko[q2] = elem + eko[q2] = Operator(operator=ops, error=errs) # drop PIDs - keeping them int nevertheless # there is no meaningful way to set them in general, after rotation if inputpids is not None: - eko.rotations.inputpids = np.array([0] * len(eko.rotations.inputpids)) + eko.bases.inputpids = np.array([0] * len(eko.bases.inputpids)) if targetpids is not None: - eko.rotations.targetpids = np.array([0] * len(eko.rotations.targetpids)) + eko.bases.targetpids = np.array([0] * len(eko.bases.targetpids)) if update: eko.update() @@ -262,9 +262,9 @@ def to_evol(eko: EKO, source: bool = True, target: bool = False): flavor_reshape(eko, inputpids=inputpids, targetpids=targetpids, update=False) # assign pids if source: - eko.rotations.inputpids = inputpids + eko.bases.inputpids = inputpids if target: - eko.rotations.targetpids = targetpids + eko.bases.targetpids = targetpids eko.update() @@ -293,8 +293,8 @@ def to_uni_evol(eko: EKO, source: bool = True, target: bool = False): flavor_reshape(eko, inputpids=inputpids, targetpids=targetpids, update=False) # assign pids if source: - eko.rotations.inputpids = inputpids + eko.bases.inputpids = inputpids if target: - eko.rotations.targetpids = targetpids + eko.bases.targetpids = targetpids eko.update() diff --git a/tests/eko/io/test_init.py b/tests/eko/io/test_init.py index a4b9cfa74..e5ca0e139 100644 --- a/tests/eko/io/test_init.py +++ b/tests/eko/io/test_init.py @@ -1,23 +1,9 @@ import pathlib -import numpy as np import pytest -import eko from eko import EKO -from eko import basis_rotation as br -from eko import interpolation -from eko.io import legacy, manipulate, runcards -from ekobox.mock import eko_identity -from tests.conftest import EKOFactory - - -def chk_keys(a, b): - """Check all keys are preserved""" - assert sorted(a.keys()) == sorted(b.keys()) - for key, value in a.items(): - if isinstance(value, dict): - assert sorted(value.keys()) == sorted(b[key].keys()) +from eko.io import legacy class TestLegacy: @@ -27,213 +13,3 @@ def test_load_tar(self, out_v0, tmp_path: pathlib.Path): legacy.load_tar(out_v0, oppath) with EKO.read(oppath) as eko: assert eko.metadata.data_version == 0 - - -class TestManipulate: - def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): - # create object - muout = 10.0 - mu2out = muout**2 - epout = (mu2out, 5) - xg = interpolation.XGrid(np.geomspace(1e-5, 1.0, 21)) - eko_factory.operator.mugrid = [(muout, 5)] - eko_factory.operator.xgrid = xg - o1 = eko_factory.get() - lpids = 2 - o1[epout] = eko.io.Operator( - operator=eko_identity([1, lpids, len(xg), lpids, len(xg)])[0] - ) - xgp = interpolation.XGrid(np.geomspace(1e-5, 1.0, 11)) - # only target - otpath = tmp_path / "ot.tar" - o1.deepcopy(otpath) - with EKO.edit(otpath) as ot: - manipulate.xgrid_reshape(ot, xgp) - chk_keys(o1.raw, ot.raw) - assert ot[epout].operator.shape == (lpids, len(xgp), lpids, len(xg)) - ottpath = tmp_path / "ott.tar" - o1.deepcopy(ottpath) - with EKO.edit(ottpath) as ott: - with pytest.warns(Warning): - manipulate.xgrid_reshape(ott, xg) - chk_keys(o1.raw, ott.raw) - np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) - - # only input - oipath = tmp_path / "oi.tar" - o1.deepcopy(oipath) - with EKO.edit(oipath) as oi: - manipulate.xgrid_reshape(oi, inputgrid=xgp) - assert oi[epout].operator.shape == (lpids, len(xg), lpids, len(xgp)) - chk_keys(o1.raw, oi.raw) - oiipath = tmp_path / "oii.tar" - o1.deepcopy(oiipath) - with EKO.edit(oiipath) as oii: - with pytest.warns(Warning): - manipulate.xgrid_reshape(oii, inputgrid=xg) - chk_keys(o1.raw, oii.raw) - np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) - - # both - oitpath = tmp_path / "oit.tar" - o1.deepcopy(oitpath) - with EKO.edit(oitpath) as oit: - manipulate.xgrid_reshape(oit, xgp, xgp) - chk_keys(o1.raw, oit.raw) - op = eko_identity([1, 2, len(xgp), 2, len(xgp)]) - np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) - - # error - with pytest.raises(ValueError): - manipulate.xgrid_reshape(o1) - - def test_reshape_io(self, eko_factory: EKOFactory, tmp_path): - eko_factory.path = tmp_path / "eko.tar" - eko_factory.operator.configs.interpolation_polynomial_degree = 1 - # create object - o1 = eko_factory.get() - lpids = len(o1.rotations.pids) - path_copy = tmp_path / "eko_copy.tar" - o1.deepcopy(path_copy) - newxgrid = interpolation.XGrid([0.1, 1.0]) - inputpids = np.eye(lpids) - inputpids[:2, :2] = np.array([[1, -1], [1, 1]]) - with EKO.edit(path_copy) as o2: - manipulate.xgrid_reshape(o2, newxgrid, newxgrid) - manipulate.flavor_reshape(o2, inputpids=inputpids) - # reload - with EKO.read(path_copy) as o3: - chk_keys(o1.raw, o3.raw) - np.testing.assert_allclose(o3.rotations.inputgrid.raw, newxgrid.raw) - np.testing.assert_allclose(o3.rotations.targetgrid.raw, newxgrid.raw) - # since we use a general rotation, the inputpids are erased, - # leaving just as many zeros as PIDs, as placeholders for missing - # values - np.testing.assert_allclose( - o3.rotations.inputpids, [0] * len(o3.rotations.pids) - ) - # these has to be unchanged - np.testing.assert_allclose(o3.rotations.targetpids, o3.rotations.pids) - - def test_flavor_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): - # create object - xg = interpolation.XGrid(np.geomspace(1e-5, 1.0, 21)) - muout = 10.0 - mu2out = muout**2 - epout = (mu2out, 5) - eko_factory.operator.xgrid = xg - eko_factory.operator.mugrid = [(muout, 5)] - o1 = eko_factory.get() - lpids = len(o1.rotations.pids) - lx = len(xg) - o1[epout] = eko.io.Operator( - operator=eko_identity([1, lpids, lx, lpids, lx])[0], - error=None, - ) - # only target - target_r = np.eye(lpids) - target_r[:2, :2] = np.array([[1, -1], [1, 1]]) - tpath = tmp_path / "ot.tar" - ttpath = tmp_path / "ott.tar" - o1.deepcopy(tpath) - with EKO.edit(tpath) as ot: - manipulate.flavor_reshape(ot, target_r) - chk_keys(o1.raw, ot.raw) - assert ot[epout].operator.shape == (lpids, len(xg), lpids, len(xg)) - ot.deepcopy(ttpath) - with EKO.edit(ttpath) as ott: - manipulate.flavor_reshape(ott, np.linalg.inv(target_r)) - np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) - with pytest.warns(Warning): - manipulate.flavor_reshape(ott, np.eye(lpids)) - chk_keys(o1.raw, ott.raw) - np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) - - # only input - input_r = np.eye(lpids) - input_r[:2, :2] = np.array([[1, -1], [1, 1]]) - ipath = tmp_path / "oi.tar" - iipath = tmp_path / "oii.tar" - o1.deepcopy(ipath) - with EKO.edit(ipath) as oi: - manipulate.flavor_reshape(oi, inputpids=input_r) - chk_keys(o1.raw, oi.raw) - assert oi[epout].operator.shape == (lpids, len(xg), lpids, len(xg)) - oi.deepcopy(iipath) - with EKO.edit(iipath) as oii: - manipulate.flavor_reshape(oii, inputpids=np.linalg.inv(input_r)) - np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) - with pytest.warns(Warning): - manipulate.flavor_reshape(oii, inputpids=np.eye(lpids)) - chk_keys(o1.raw, oii.raw) - np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) - - # both - itpath = tmp_path / "oit.tar" - o1.deepcopy(itpath) - with EKO.edit(itpath) as oit: - manipulate.flavor_reshape(oit, target_r, input_r) - chk_keys(o1.raw, oit.raw) - op = eko_identity([1, lpids, len(xg), lpids, len(xg)]).copy() - np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) - # error - fpath = tmp_path / "fail.tar" - o1.deepcopy(fpath) - with pytest.raises(ValueError): - with EKO.edit(fpath) as of: - manipulate.flavor_reshape(of) - - def test_to_evol(self, eko_factory: EKOFactory, tmp_path): - xgrid = interpolation.XGrid([0.5, 1.0]) - mu_out = 2.0 - mu2_out = mu_out**2 - nfout = 4 - epout = (mu2_out, nfout) - eko_factory.operator.mu0 = float(np.sqrt(1.0)) - eko_factory.operator.mugrid = [(mu_out, nfout)] - eko_factory.operator.xgrid = xgrid - eko_factory.operator.configs.interpolation_polynomial_degree = 1 - eko_factory.operator.configs.interpolation_is_log = False - eko_factory.operator.configs.ev_op_max_order = (2, 0) - eko_factory.operator.configs.ev_op_iterations = 1 - eko_factory.operator.configs.inversion_method = runcards.InversionMethod.EXACT - o00 = eko_factory.get() - o01_path = tmp_path / "o01.tar" - o00.deepcopy(o01_path) - with EKO.edit(o01_path) as o01: - manipulate.to_evol(o01) - o10_path = tmp_path / "o10.tar" - o00.deepcopy(o10_path) - with EKO.edit(o10_path) as o10: - manipulate.to_evol(o10, False, True) - o11_path = tmp_path / "o11.tar" - o00.deepcopy(o11_path) - with EKO.edit(o11_path) as o11: - manipulate.to_evol(o11, True, True) - chk_keys(o00.raw, o11.raw) - - with EKO.edit(o01_path) as o01: - with EKO.edit(o10_path) as o10: - with EKO.read(o11_path) as o11: - # check the input rotated one - np.testing.assert_allclose( - o01.rotations.inputpids, br.rotate_flavor_to_evolution - ) - np.testing.assert_allclose( - o01.rotations.targetpids, br.flavor_basis_pids - ) - # rotate also target - manipulate.to_evol(o01, False, True) - np.testing.assert_allclose(o01[epout].operator, o11[epout].operator) - chk_keys(o00.raw, o01.raw) - # check the target rotated one - np.testing.assert_allclose( - o10.rotations.inputpids, br.flavor_basis_pids - ) - np.testing.assert_allclose( - o10.rotations.targetpids, br.rotate_flavor_to_evolution - ) - # rotate also input - manipulate.to_evol(o10) - np.testing.assert_allclose(o10[epout].operator, o11[epout].operator) - chk_keys(o00.raw, o10.raw) diff --git a/tests/eko/io/test_manipulate.py b/tests/eko/io/test_manipulate.py new file mode 100644 index 000000000..ee83b276a --- /dev/null +++ b/tests/eko/io/test_manipulate.py @@ -0,0 +1,228 @@ +import pathlib + +import numpy as np +import pytest + +import eko +from eko import EKO +from eko import basis_rotation as br +from eko import interpolation +from eko.io import manipulate, runcards +from ekobox.mock import eko_identity +from tests.conftest import EKOFactory + + +def chk_keys(a, b): + """Check all keys are preserved""" + assert sorted(a.keys()) == sorted(b.keys()) + for key, value in a.items(): + if isinstance(value, dict): + assert sorted(value.keys()) == sorted(b[key].keys()) + + +class TestManipulate: + def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): + # create object + muout = 10.0 + mu2out = muout**2 + epout = (mu2out, 5) + xg = interpolation.XGrid(np.geomspace(1e-5, 1.0, 21)) + eko_factory.operator.mugrid = [(muout, 5)] + eko_factory.operator.xgrid = xg + o1 = eko_factory.get() + lpids = 2 + o1[epout] = eko.io.Operator( + operator=eko_identity([1, lpids, len(xg), lpids, len(xg)])[0] + ) + xgp = interpolation.XGrid(np.geomspace(1e-5, 1.0, 11)) + # only target + otpath = tmp_path / "ot.tar" + o1.deepcopy(otpath) + with EKO.edit(otpath) as ot: + manipulate.xgrid_reshape(ot, xgp) + chk_keys(o1.raw, ot.raw) + assert ot[epout].operator.shape == (lpids, len(xgp), lpids, len(xg)) + ottpath = tmp_path / "ott.tar" + o1.deepcopy(ottpath) + with EKO.edit(ottpath) as ott: + with pytest.warns(Warning): + manipulate.xgrid_reshape(ott, xg) + chk_keys(o1.raw, ott.raw) + np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) + + # only input + oipath = tmp_path / "oi.tar" + o1.deepcopy(oipath) + with EKO.edit(oipath) as oi: + manipulate.xgrid_reshape(oi, inputgrid=xgp) + assert oi[epout].operator.shape == (lpids, len(xg), lpids, len(xgp)) + chk_keys(o1.raw, oi.raw) + oiipath = tmp_path / "oii.tar" + o1.deepcopy(oiipath) + with EKO.edit(oiipath) as oii: + with pytest.warns(Warning): + manipulate.xgrid_reshape(oii, inputgrid=xg) + chk_keys(o1.raw, oii.raw) + np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) + + # both + oitpath = tmp_path / "oit.tar" + o1.deepcopy(oitpath) + with EKO.edit(oitpath) as oit: + manipulate.xgrid_reshape(oit, xgp, xgp) + chk_keys(o1.raw, oit.raw) + op = eko_identity([1, 2, len(xgp), 2, len(xgp)]) + np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) + + # error + with pytest.raises(ValueError): + manipulate.xgrid_reshape(o1) + + def test_reshape_io(self, eko_factory: EKOFactory, tmp_path): + eko_factory.path = tmp_path / "eko.tar" + eko_factory.operator.configs.interpolation_polynomial_degree = 1 + # create object + o1 = eko_factory.get() + lpids = len(o1.bases.pids) + path_copy = tmp_path / "eko_copy.tar" + o1.deepcopy(path_copy) + newxgrid = interpolation.XGrid([0.1, 1.0]) + inputpids = np.eye(lpids) + inputpids[:2, :2] = np.array([[1, -1], [1, 1]]) + with EKO.edit(path_copy) as o2: + manipulate.xgrid_reshape(o2, newxgrid, newxgrid) + manipulate.flavor_reshape(o2, inputpids=inputpids) + # reload + with EKO.read(path_copy) as o3: + chk_keys(o1.raw, o3.raw) + np.testing.assert_allclose(o3.bases.inputgrid.raw, newxgrid.raw) + np.testing.assert_allclose(o3.bases.targetgrid.raw, newxgrid.raw) + # since we use a general rotation, the inputpids are erased, + # leaving just as many zeros as PIDs, as placeholders for missing + # values + np.testing.assert_allclose(o3.bases.inputpids, [0] * len(o3.bases.pids)) + # these has to be unchanged + np.testing.assert_allclose(o3.bases.targetpids, o3.bases.pids) + + def test_flavor_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): + # create object + xg = interpolation.XGrid(np.geomspace(1e-5, 1.0, 21)) + muout = 10.0 + mu2out = muout**2 + epout = (mu2out, 5) + eko_factory.operator.xgrid = xg + eko_factory.operator.mugrid = [(muout, 5)] + o1 = eko_factory.get() + lpids = len(o1.bases.pids) + lx = len(xg) + o1[epout] = eko.io.Operator( + operator=eko_identity([1, lpids, lx, lpids, lx])[0], + error=None, + ) + # only target + target_r = np.eye(lpids) + target_r[:2, :2] = np.array([[1, -1], [1, 1]]) + tpath = tmp_path / "ot.tar" + ttpath = tmp_path / "ott.tar" + o1.deepcopy(tpath) + with EKO.edit(tpath) as ot: + manipulate.flavor_reshape(ot, target_r) + chk_keys(o1.raw, ot.raw) + assert ot[epout].operator.shape == (lpids, len(xg), lpids, len(xg)) + ot.deepcopy(ttpath) + with EKO.edit(ttpath) as ott: + manipulate.flavor_reshape(ott, np.linalg.inv(target_r)) + np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) + with pytest.warns(Warning): + manipulate.flavor_reshape(ott, np.eye(lpids)) + chk_keys(o1.raw, ott.raw) + np.testing.assert_allclose(ott[epout].operator, o1[epout].operator) + + # only input + input_r = np.eye(lpids) + input_r[:2, :2] = np.array([[1, -1], [1, 1]]) + ipath = tmp_path / "oi.tar" + iipath = tmp_path / "oii.tar" + o1.deepcopy(ipath) + with EKO.edit(ipath) as oi: + manipulate.flavor_reshape(oi, inputpids=input_r) + chk_keys(o1.raw, oi.raw) + assert oi[epout].operator.shape == (lpids, len(xg), lpids, len(xg)) + oi.deepcopy(iipath) + with EKO.edit(iipath) as oii: + manipulate.flavor_reshape(oii, inputpids=np.linalg.inv(input_r)) + np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) + with pytest.warns(Warning): + manipulate.flavor_reshape(oii, inputpids=np.eye(lpids)) + chk_keys(o1.raw, oii.raw) + np.testing.assert_allclose(oii[epout].operator, o1[epout].operator) + + # both + itpath = tmp_path / "oit.tar" + o1.deepcopy(itpath) + with EKO.edit(itpath) as oit: + manipulate.flavor_reshape(oit, target_r, input_r) + chk_keys(o1.raw, oit.raw) + op = eko_identity([1, lpids, len(xg), lpids, len(xg)]).copy() + np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) + # error + fpath = tmp_path / "fail.tar" + o1.deepcopy(fpath) + with pytest.raises(ValueError): + with EKO.edit(fpath) as of: + manipulate.flavor_reshape(of) + + def test_to_evol(self, eko_factory: EKOFactory, tmp_path): + xgrid = interpolation.XGrid([0.5, 1.0]) + mu_out = 2.0 + mu2_out = mu_out**2 + nfout = 4 + epout = (mu2_out, nfout) + eko_factory.operator.mu0 = float(np.sqrt(1.0)) + eko_factory.operator.mugrid = [(mu_out, nfout)] + eko_factory.operator.xgrid = xgrid + eko_factory.operator.configs.interpolation_polynomial_degree = 1 + eko_factory.operator.configs.interpolation_is_log = False + eko_factory.operator.configs.ev_op_max_order = (2, 0) + eko_factory.operator.configs.ev_op_iterations = 1 + eko_factory.operator.configs.inversion_method = runcards.InversionMethod.EXACT + o00 = eko_factory.get() + o01_path = tmp_path / "o01.tar" + o00.deepcopy(o01_path) + with EKO.edit(o01_path) as o01: + manipulate.to_evol(o01) + o10_path = tmp_path / "o10.tar" + o00.deepcopy(o10_path) + with EKO.edit(o10_path) as o10: + manipulate.to_evol(o10, False, True) + o11_path = tmp_path / "o11.tar" + o00.deepcopy(o11_path) + with EKO.edit(o11_path) as o11: + manipulate.to_evol(o11, True, True) + chk_keys(o00.raw, o11.raw) + + with EKO.edit(o01_path) as o01: + with EKO.edit(o10_path) as o10: + with EKO.read(o11_path) as o11: + # check the input rotated one + np.testing.assert_allclose( + o01.bases.inputpids, br.rotate_flavor_to_evolution + ) + np.testing.assert_allclose( + o01.bases.targetpids, br.flavor_basis_pids + ) + # rotate also target + manipulate.to_evol(o01, False, True) + np.testing.assert_allclose(o01[epout].operator, o11[epout].operator) + chk_keys(o00.raw, o01.raw) + # check the target rotated one + np.testing.assert_allclose( + o10.bases.inputpids, br.flavor_basis_pids + ) + np.testing.assert_allclose( + o10.bases.targetpids, br.rotate_flavor_to_evolution + ) + # rotate also input + manipulate.to_evol(o10) + np.testing.assert_allclose(o10[epout].operator, o11[epout].operator) + chk_keys(o00.raw, o10.raw) From fe6899503f8d6cf4eb5a05f9ed0893a81081afdd Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 12:28:44 +0200 Subject: [PATCH 078/154] Sync operators at load time --- src/eko/io/struct.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 56a31c1e8..c7f705b14 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -216,7 +216,6 @@ def operator(self, ep: EPoint): def __iter__(self): """Iterate over keys (i.e. evolution points).""" - self.operators.sync() for target in self.operators: yield target.ep @@ -237,13 +236,14 @@ def items(self): immediately after """ - self.operators.sync() for target in self.operators: # recast to evolution point ep = target.ep # auto-load - yield ep, self[ep] + op = self[ep] + assert op is not None + yield ep, op # auto-unload del self[ep] @@ -377,6 +377,7 @@ def open(cls, path: os.PathLike, mode="r"): metadata=metadata, access=access, ) + opened.operators.sync() else: opened = Builder(path=tmpdir, access=access) From 233878eb8c016ed45ef23fb7f7db565932af5099 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 12:29:15 +0200 Subject: [PATCH 079/154] Fix approx and its user (ekos product) --- src/eko/io/struct.py | 6 +++--- src/ekobox/utils.py | 19 +++++++++++++------ tests/ekobox/test_utils.py | 7 ++----- 3 files changed, 18 insertions(+), 14 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index c7f705b14..82452c407 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -275,9 +275,9 @@ def approx( mu2s = np.array([mu2 for mu2, _ in eps]) close = eps[np.isclose(ep[0], mu2s, rtol=rtol, atol=atol)] - if close.size == 1: - return close[0] - if close.size == 0: + if len(close) == 1: + return tuple(close[0]) + if len(close) == 0: return None raise ValueError(f"Multiple values of Q2 have been found close to {ep}") diff --git a/src/ekobox/utils.py b/src/ekobox/utils.py index b7fdd995b..095e605e9 100644 --- a/src/ekobox/utils.py +++ b/src/ekobox/utils.py @@ -36,14 +36,21 @@ def ekos_product( # TODO: add a control on the theory (but before we need to implement # another kind of output which includes the theory and operator runcards) - q2match = eko_ini.approx(eko_fin.operator_card.mu0**2, rtol=rtol, atol=atol) - if q2match is None: + ep_match = eko_ini.approx( + (eko_fin.operator_card.mu0**2, eko_fin.theory_card.heavy.num_flavs_init), + rtol=rtol, + atol=atol, + ) + if ep_match is None: raise ValueError( "Initial Q2 of final eko operator does not match any final Q2 in" " the initial eko operator" ) - ope1 = eko_ini[q2match].operator.copy() - ope1_error = eko_ini[q2match].error + ope1 = eko_ini[ep_match] + assert ope1 is not None + + ope1_value = ope1.operator.copy() + ope1_error = ope1.error if ope1_error is not None: ope1_error = ope1_error.copy() @@ -57,10 +64,10 @@ def ekos_product( if q2 in eko_ini: continue - op = np.einsum(CONTRACTION, ope1, op2.operator) + op = np.einsum(CONTRACTION, ope1_value, op2.operator) if ope1_error is not None and op2.error is not None: - error = np.einsum(CONTRACTION, ope1, op2.error) + np.einsum( + error = np.einsum(CONTRACTION, ope1_value, op2.error) + np.einsum( CONTRACTION, ope1_error, op2.operator ) else: diff --git a/tests/ekobox/test_utils.py b/tests/ekobox/test_utils.py index d4b896927..51ef5e285 100644 --- a/tests/ekobox/test_utils.py +++ b/tests/ekobox/test_utils.py @@ -16,6 +16,7 @@ def test_ekos_product(tmp_path): theory = cards.example.theory() theory.order = (1, 0) + theory.heavy.num_flavs_init = 5 op1 = cards.example.operator() op1.mu0 = mu01 @@ -35,8 +36,7 @@ def test_ekos_product(tmp_path): op_err = copy.deepcopy(op2) op_err.mu0 = mu01 - mu2first = mugrid2[0][0] ** 2 - mu2last = mugrid2[-1][0] ** 2 + mu2first = (mugrid2[0][0] ** 2, mugrid2[0][1]) ini_path = tmp_path / "ini.tar" eko.solve(theory, op1, path=ini_path) @@ -52,7 +52,6 @@ def test_ekos_product(tmp_path): _ = utils.ekos_product(eko_ini, eko_fin_err) # product is copied res_path = tmp_path / "eko_res.tar" - eko_fin[mu2last].error = None # drop one set of errors utils.ekos_product(eko_ini, eko_fin, path=res_path) with EKO.read(res_path) as eko_res: @@ -70,5 +69,3 @@ def test_ekos_product(tmp_path): eko_res[mu2first].operator, eko_ini[mu2first].operator ) utils.ekos_product(eko_ini, eko_fin) - - assert eko_res[mu2last].error is None From 788368c443914020dfca59ea1829c09039f4cbe6 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 12:43:12 +0200 Subject: [PATCH 080/154] Drop non-operator setting, fix checks against bare cache --- tests/eko/io/test_struct.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index 5a3bb069c..c871802f2 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -6,10 +6,9 @@ import pytest import yaml -from eko import EKO -from eko import basis_rotation as br -from eko import interpolation +from eko import EKO, interpolation from eko.io import struct +from eko.io.items import Target from tests.conftest import EKOFactory @@ -67,13 +66,10 @@ def test_ops(self, eko_factory: EKOFactory): eko = eko_factory.get() v = np.random.rand(2, 2) opv = struct.Operator(operator=v) - # try setting not an operator - with pytest.raises(ValueError): - eko[ep] = "bla" # approx eko[ep] = opv assert eko.approx((2 * mu2, nf)) is None - assert eko.approx((mu2 + 1.0, nf), atol=2) == mu2 + assert eko.approx((mu2 + 1.0, nf), atol=2)[0] == mu2 eko[(mu2 + 1.0, nf)] = opv with pytest.raises(ValueError): eko.approx((mu2 + 0.5, nf), atol=2) @@ -136,14 +132,14 @@ def test_items(self, eko_factory: EKOFactory): ep = next(iter(eko)) # unload - eko.operators[ep] = None + eko.operators.cache[Target.from_ep(ep)] = None # test autoloading assert isinstance(eko[ep], struct.Operator) - assert isinstance(eko.operators[ep], struct.Operator) + assert isinstance(eko.operators[Target.from_ep(ep)], struct.Operator) del eko[ep] - assert eko.operators[ep] is None + assert eko.operators.cache[Target.from_ep(ep)] is None def test_iter(self, eko_factory: EKOFactory): """Test managed iteration.""" From f71cece89d69bf761b1df1cb01222c9bf17775a8 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 12:47:06 +0200 Subject: [PATCH 081/154] Other checks moved down to bare cache, again --- tests/eko/io/test_struct.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index c871802f2..e7ebacfa4 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -149,7 +149,7 @@ def test_iter(self, eko_factory: EKOFactory): epprev = None for ep, op in eko.items(): if epprev is not None: - assert eko.operators[epprev] is None + assert eko.operators.cache[Target.from_ep(epprev)] is None assert isinstance(op, struct.Operator) epprev = ep @@ -161,4 +161,4 @@ def test_context_operator(self, eko_factory: EKOFactory): with eko.operator(ep) as op: assert isinstance(op, struct.Operator) - assert eko.operators[ep] is None + assert eko.operators.cache[Target.from_ep(ep)] is None From d09c14de7afc97fb0a17dd4abb9971cb671c4f03 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 12:53:23 +0200 Subject: [PATCH 082/154] Lift flavor tensor blow-up --- src/eko/evolution_operator/physical.py | 52 -------------------------- src/eko/member.py | 47 +++++++++++++++++++++++ 2 files changed, 47 insertions(+), 52 deletions(-) diff --git a/src/eko/evolution_operator/physical.py b/src/eko/evolution_operator/physical.py index 10164fd68..c963e2a26 100644 --- a/src/eko/evolution_operator/physical.py +++ b/src/eko/evolution_operator/physical.py @@ -1,10 +1,6 @@ """Contains the :class:`PhysicalOperator` class.""" - -import numpy as np - from .. import basis_rotation as br from .. import member -from . import flavors class PhysicalOperator(member.OperatorBase): @@ -109,51 +105,3 @@ def ad_to_evol_map(cls, op_members, nf, q2_final, intrinsic_range, qed=False): m[f"{hq}-.{hq}-"] = op_id.copy() # map key to MemberName return cls.promote_names(m, q2_final) - - def to_flavor_basis_tensor(self, qed=False): - """ - Convert the computations into an rank 4 tensor over flavor operator space and momentum fraction operator space. - - Parameters - ---------- - qed : bool - activate qed - - Returns - ------- - tensor : numpy.ndarray - EKO - """ - nf_in, nf_out = flavors.get_range(self.op_members.keys(), qed) - len_pids = len(br.flavor_basis_pids) - len_xgrid = list(self.op_members.values())[0].value.shape[0] - # dimension will be pids^2 * xgrid^2 - value_tensor = np.zeros((len_pids, len_xgrid, len_pids, len_xgrid)) - error_tensor = value_tensor.copy() - for name, op in self.op_members.items(): - if not qed: - in_pids = flavors.pids_from_intrinsic_evol(name.input, nf_in, False) - out_pids = flavors.pids_from_intrinsic_evol(name.target, nf_out, True) - else: - in_pids = flavors.pids_from_intrinsic_unified_evol( - name.input, nf_in, False - ) - out_pids = flavors.pids_from_intrinsic_unified_evol( - name.target, nf_out, True - ) - for out_idx, out_weight in enumerate(out_pids): - for in_idx, in_weight in enumerate(in_pids): - # keep the outer index to the left as we're multiplying from the right - value_tensor[ - out_idx, # output pid (position) - :, # output momentum fraction - in_idx, # input pid (position) - :, # input momentum fraction - ] += out_weight * (op.value * in_weight) - error_tensor[ - out_idx, # output pid (position) - :, # output momentum fraction - in_idx, # input pid (position) - :, # input momentum fraction - ] += out_weight * (op.error * in_weight) - return value_tensor, error_tensor diff --git a/src/eko/member.py b/src/eko/member.py index ad3fb2e19..f03b0e9b4 100644 --- a/src/eko/member.py +++ b/src/eko/member.py @@ -5,6 +5,9 @@ import numpy as np +from . import basis_rotation as br +from .evolution_operator import flavors + class OpMember: """ @@ -263,6 +266,50 @@ def operator_multiply(left, right, operation): new_oms[new_key] += operation(l_op, r_op) return new_oms + def to_flavor_basis_tensor(self, qed: bool = False): + """Convert the computations into an rank 4 tensor. + + A sparse tensor defined with dot-notation (e.g. ``S.g``) is converted + to a plain rank-4 array over flavor operator space and momentum + fraction operator space. + + If `qed` is passed, the unified intrinsic basis is used. + + """ + nf_in, nf_out = flavors.get_range(self.op_members.keys(), qed) + len_pids = len(br.flavor_basis_pids) + len_xgrid = list(self.op_members.values())[0].value.shape[0] + # dimension will be pids^2 * xgrid^2 + value_tensor = np.zeros((len_pids, len_xgrid, len_pids, len_xgrid)) + error_tensor = value_tensor.copy() + for name, op in self.op_members.items(): + if not qed: + in_pids = flavors.pids_from_intrinsic_evol(name.input, nf_in, False) + out_pids = flavors.pids_from_intrinsic_evol(name.target, nf_out, True) + else: + in_pids = flavors.pids_from_intrinsic_unified_evol( + name.input, nf_in, False + ) + out_pids = flavors.pids_from_intrinsic_unified_evol( + name.target, nf_out, True + ) + for out_idx, out_weight in enumerate(out_pids): + for in_idx, in_weight in enumerate(in_pids): + # keep the outer index to the left as we're multiplying from the right + value_tensor[ + out_idx, # output pid (position) + :, # output momentum fraction + in_idx, # input pid (position) + :, # input momentum fraction + ] += out_weight * (op.value * in_weight) + error_tensor[ + out_idx, # output pid (position) + :, # output momentum fraction + in_idx, # input pid (position) + :, # input momentum fraction + ] += out_weight * (op.error * in_weight) + return value_tensor, error_tensor + class ScalarOperator(OperatorBase): """Operator above space of real numbers.""" From dc7e4786ea04a366fcfe25be58f79900fd52dfbf Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 13:36:20 +0200 Subject: [PATCH 083/154] Temporarily disable pylint on open, as in the legacy runner --- src/eko/runner/managed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 01712adaa..5082e11a5 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -21,7 +21,7 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): """Solve DGLAP equations in terms of evolution kernel operators (EKO).""" with EKO.create(path) as builder: - eko = builder.load_cards(theory, operator).build() + eko = builder.load_cards(theory, operator).build() # pylint: disable=E1101 atlas = commons.atlas(eko.theory_card, eko.operator_card) From f8e92284f3865ce7bec98277575871ff0b9d7a40 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 14:13:03 +0200 Subject: [PATCH 084/154] Pin nf0=4 in LHA FFNS --- benchmarks/lha_paper_bench.py | 1 + 1 file changed, 1 insertion(+) diff --git a/benchmarks/lha_paper_bench.py b/benchmarks/lha_paper_bench.py index 71ddf3550..9f54d1616 100644 --- a/benchmarks/lha_paper_bench.py +++ b/benchmarks/lha_paper_bench.py @@ -204,6 +204,7 @@ def __init__(self): "FNS": "FFNS", # ignored by eko, but needed by LHA_utils "NfFF": 4, "nfref": 4, + "nf0": 4, "kcThr": 0.0, "kbThr": np.inf, "ktThr": np.inf, From d60b2b03b47f7875e74c1f1497937fa9f9d70f96 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 14:44:43 +0200 Subject: [PATCH 085/154] Update tests/eko/test_matchings.py Co-authored-by: Felix Hekhorn --- tests/eko/test_matchings.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/eko/test_matchings.py b/tests/eko/test_matchings.py index 0e6a77174..4236d6cf8 100644 --- a/tests/eko/test_matchings.py +++ b/tests/eko/test_matchings.py @@ -1,6 +1,4 @@ -""" - Tests for the threshold class -""" +"""Tests for the threshold class""" from dataclasses import astuple import numpy as np From 8c7baaa9448f48aaec9d3d37ee652e1b9aaba9c7 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 14:45:45 +0200 Subject: [PATCH 086/154] Update tests/eko/test_matchings.py Co-authored-by: Felix Hekhorn --- tests/eko/test_matchings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/eko/test_matchings.py b/tests/eko/test_matchings.py index 4236d6cf8..fd1ca34b5 100644 --- a/tests/eko/test_matchings.py +++ b/tests/eko/test_matchings.py @@ -20,7 +20,7 @@ def test_str(self): p = Segment(0, 1, 3) s = str(p) assert s.index("0") > 0 - assert s.index("3") > 0 + assert s.index("1") > 0 assert s.index("3") > 0 From 75120f3e294cbd75a612e03251bbfa7cc4157255 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 14:48:54 +0200 Subject: [PATCH 087/154] Improve docstring --- src/eko/matchings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/eko/matchings.py b/src/eko/matchings.py index d03a5143a..566faa4a2 100644 --- a/src/eko/matchings.py +++ b/src/eko/matchings.py @@ -40,7 +40,8 @@ def __str__(self): class Matching: """Matching between two different segments. - The meaning of the flavor index is the PID of the corresponding heavy quark. + The meaning of the flavor index `hq` is the PID of the corresponding heavy + quark. """ From 44db62e9c36af500904f048a23bf8128b0265c26 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 15:26:15 +0200 Subject: [PATCH 088/154] Fix a few strong coupling benchmarks and evol-to-unity --- benchmarks/eko/benchmark_evol_to_unity.py | 10 +++++----- benchmarks/eko/benchmark_strong_coupling.py | 22 ++++++++++----------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/benchmarks/eko/benchmark_evol_to_unity.py b/benchmarks/eko/benchmark_evol_to_unity.py index 0824a39d3..41bc4b4d0 100644 --- a/benchmarks/eko/benchmark_evol_to_unity.py +++ b/benchmarks/eko/benchmark_evol_to_unity.py @@ -8,6 +8,7 @@ from eko.interpolation import XGrid from eko.io import types from eko.io.runcards import OperatorCard, TheoryCard +from eko.matchings import Segment from eko.quantities.couplings import CouplingsInfo from eko.runner.legacy import Runner @@ -53,11 +54,10 @@ def test_operator_grid( path=tmp_path / "eko.tar", ).op_grid - q20 = 30 - q21 = 50 - nf = 4 - o = Operator(g.config, g.managers, nf, q20, q21) - o_back = Operator(g.config, g.managers, nf, q21, q20) + seg = Segment(30, 50, 4) + seg_back = Segment(50, 30, 4) + o = Operator(g.config, g.managers, seg) + o_back = Operator(g.config, g.managers, seg_back) o.compute() o_back.compute() diff --git a/benchmarks/eko/benchmark_strong_coupling.py b/benchmarks/eko/benchmark_strong_coupling.py index ed186ca67..3f004db74 100644 --- a/benchmarks/eko/benchmark_strong_coupling.py +++ b/benchmarks/eko/benchmark_strong_coupling.py @@ -3,7 +3,7 @@ import numpy as np import pytest -from eko import thresholds +from eko import matchings from eko.beta import beta_qcd from eko.couplings import Couplings from eko.io.runcards import TheoryCard @@ -138,13 +138,13 @@ def benchmark_APFEL_ffns(self): ), } # collect my values - threshold_holder = thresholds.ThresholdsAtlas.ffns(nf) + threshold_holder = matchings.Atlas.ffns(nf, 0.0) for order in [1, 2, 3]: as_FFNS = Couplings( couplings=ref_couplings(coupling_ref, scale_ref), order=(order, 0), method=CouplingEvolutionMethod.EXPANDED, - masses=threshold_holder.area_walls[1:-1], + masses=threshold_holder.walls[1:-1], hqm_scheme=QuarkMassScheme.POLE, thresholds_ratios=[1.0, 1.0, 1.0], ) @@ -211,7 +211,7 @@ def benchmark_pegasus_ffns(self): ), } # collect my values - threshold_holder = thresholds.ThresholdsAtlas.ffns(nf) + threshold_holder = matchings.Atlas.ffns(nf, 0.0) couplings = ref_couplings(coupling_ref, scale_ref) couplings.max_num_flavs = 4 for order in [1, 2, 3, 4]: @@ -219,7 +219,7 @@ def benchmark_pegasus_ffns(self): couplings=couplings, order=(order, 0), method=CouplingEvolutionMethod.EXACT, - masses=threshold_holder.area_walls[1:-1], + masses=threshold_holder.walls[1:-1], hqm_scheme=QuarkMassScheme.POLE, thresholds_ratios=[1.0, 1.0, 1.0], ) @@ -584,12 +584,12 @@ def benchmark_lhapdf_ffns_lo(self): scale_ref = 91.0 nf = 4 # collect my values - threshold_holder = thresholds.ThresholdsAtlas.ffns(nf) + threshold_holder = matchings.Atlas.ffns(nf, 0.0) as_FFNS_LO = Couplings( couplings=ref_couplings(coupling_ref, scale_ref), order=(1, 0), method=CouplingEvolutionMethod.EXACT, - masses=threshold_holder.area_walls[1:-1], + masses=threshold_holder.walls[1:-1], hqm_scheme=QuarkMassScheme.POLE, thresholds_ratios=[1.0, 1.0, 1.0], ) @@ -630,7 +630,7 @@ def benchmark_apfel_exact(self): coupling_ref = np.array([0.118, 0.007496]) scale_ref = 90 # collect my values - threshold_holder = thresholds.ThresholdsAtlas.ffns(3) + threshold_holder = matchings.Atlas.ffns(3, 0.0) # LHAPDF cache apfel_vals_dict = { 1: np.array( @@ -663,7 +663,7 @@ def benchmark_apfel_exact(self): couplings=ref_couplings(coupling_ref, scale_ref), order=(order, 0), method=CouplingEvolutionMethod.EXACT, - masses=threshold_holder.area_walls[1:-1], + masses=threshold_holder.walls[1:-1], hqm_scheme=QuarkMassScheme.POLE, thresholds_ratios=[1.0, 1.0, 1.0], ) @@ -697,7 +697,7 @@ def benchmark_lhapdf_exact(self): coupling_ref = np.array([0.118, 0.007496]) scale_ref = 90 # collect my values - threshold_holder = thresholds.ThresholdsAtlas.ffns(3) + threshold_holder = matchings.Atlas.ffns(3, 0.0) # LHAPDF cache lhapdf_vals_dict = { 1: np.array( @@ -738,7 +738,7 @@ def benchmark_lhapdf_exact(self): couplings=ref_couplings(coupling_ref, scale_ref), order=(order, 0), method=CouplingEvolutionMethod.EXACT, - masses=threshold_holder.area_walls[1:-1], + masses=threshold_holder.walls[1:-1], hqm_scheme=QuarkMassScheme.POLE, thresholds_ratios=[1.0, 1.0, 1.0], ) From 323fec95644a744976f0f47671a100d72cbed645 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 15:52:58 +0200 Subject: [PATCH 089/154] Fix one more strong coupling test --- benchmarks/eko/benchmark_strong_coupling.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/benchmarks/eko/benchmark_strong_coupling.py b/benchmarks/eko/benchmark_strong_coupling.py index 3f004db74..d13a264bd 100644 --- a/benchmarks/eko/benchmark_strong_coupling.py +++ b/benchmarks/eko/benchmark_strong_coupling.py @@ -861,20 +861,18 @@ def benchmark_APFEL_fact_to_ren_lha_settings(self, theory_card: TheoryCard): ) for mu2 in mu2s: my_val = sc.a(mu2 * xif2)[0] - path = sc.thresholds.path(mu2 * xif2) my_val_4 = sc.a(mu2 * xif2, nf_to=4)[0] - path_4 = sc.thresholds.path(mu2 * xif2, 4) + path_4 = sc.atlas.path((mu2 * xif2, 4)) my_val_3 = sc.a(mu2 * xif2, nf_to=3)[0] - path_3 = sc.thresholds.path(mu2 * xif2, 3) + path_3 = sc.atlas.path((mu2 * xif2, 3)) # path_4 it's not matched assert len(path_4) == 1 # path_3 is the same as path backward in nf and in q2. assert len(path_3) == 2 - assert len(path) == 2 assert path_3[1].nf < path_3[0].nf - assert path_3[1].q2_from < path_3[0].q2_from + assert path_3[1].origin < path_3[0].origin apfel_val_ref = 0.03478112968976964 if use_APFEL: From cd296a332f5c8f68b7c8bd4bc81f1e2d64d93ce0 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 15:57:06 +0200 Subject: [PATCH 090/154] Provide missing argument in msbar evolution --- benchmarks/eko/benchmark_msbar_evolution.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/benchmarks/eko/benchmark_msbar_evolution.py b/benchmarks/eko/benchmark_msbar_evolution.py index 26b8873af..6286e15ad 100644 --- a/benchmarks/eko/benchmark_msbar_evolution.py +++ b/benchmarks/eko/benchmark_msbar_evolution.py @@ -112,8 +112,8 @@ def benchmark_APFEL_msbar_evolution( order=theory_card.order, evmeth=couplevmeth, matching=np.power( - list(iter(theory_card.heavy.matching_ratios)), 2.0 - ), + theory_card.heavy.matching_ratios, 2.0 + ).tolist(), xif2=theory_card.xif**2, ).tolist(), thresholds_ratios=np.power( @@ -131,6 +131,9 @@ def benchmark_APFEL_msbar_evolution( qmasses[n].value ** 2, qmasses[n].scale ** 2, strong_coupling=as_VFNS, + thresholds_ratios=np.power( + theory_card.heavy.matching_ratios, 2.0 + ).tolist(), xif2=1.0, q2_to=Q2, nf_ref=n + 3, From 36a8251789b8c9e8d8af7f2c220486ad4cb35d90 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 16:15:31 +0200 Subject: [PATCH 091/154] Fix parts assignment --- src/eko/runner/managed.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 5082e11a5..2a91289b4 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -33,9 +33,10 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): eko.parts[recipe] = parts.evolve(eko, recipe) for recipe in eko.recipes_matching: assert isinstance(recipe, Matching) - eko.parts[recipe] = parts.match(eko, recipe) + eko.parts_matching[recipe] = parts.match(eko, recipe) for ep in operator.evolgrid: headers = recipes.elements(ep, atlas) + __import__("pdb").set_trace() parts_ = operators.retrieve(headers, eko.parts, eko.parts_matching) eko.operators[Target.from_ep(ep)] = operators.join(parts_) From 8f701c9d615df8078674468c16fc108d8afa1d07 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 16:30:53 +0200 Subject: [PATCH 092/154] Test elements creation --- tests/eko/runner/test_recipes.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/eko/runner/test_recipes.py b/tests/eko/runner/test_recipes.py index 581ea3a8d..c79f675ad 100644 --- a/tests/eko/runner/test_recipes.py +++ b/tests/eko/runner/test_recipes.py @@ -3,7 +3,11 @@ import pytest from eko import EKO +from eko.io.items import Evolution, Matching from eko.io.runcards import OperatorCard, TheoryCard +from eko.matchings import Atlas +from eko.quantities.heavy_quarks import MatchingScales +from eko.runner.recipes import elements @pytest.fixture @@ -15,5 +19,26 @@ def neweko(theory_card: TheoryCard, operator_card: OperatorCard, tmp_path: Path) path.unlink(missing_ok=True) +def test_elements(): + scales = MatchingScales([10.0, 20.0, 30.0]) + + atlas = Atlas(scales, (50.0, 5)) + onestep = elements((60.0, 5), atlas) + assert len(onestep) == 1 + assert isinstance(onestep[0], Evolution) + assert not onestep[0].cliff + + backandforth = elements((60.0, 6), atlas) + assert len(backandforth) == 3 + assert isinstance(backandforth[0], Evolution) + assert backandforth[0].cliff + assert isinstance(backandforth[1], Matching) + assert not backandforth[1].inverse + + down = elements((5.0, 3), atlas) + assert all([isinstance(el, Evolution) for i, el in enumerate(down) if i % 2 == 0]) + assert all([isinstance(el, Matching) for i, el in enumerate(down) if i % 2 == 1]) + + def test_create(neweko: EKO): pass From 2a67d18e9ff871e77769eb76d8a4f65d0056339a Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 16:40:49 +0200 Subject: [PATCH 093/154] Test recipes creation, deduplicate them --- src/eko/runner/recipes.py | 2 +- tests/eko/runner/test_recipes.py | 43 ++++++++++++++++---------------- 2 files changed, 22 insertions(+), 23 deletions(-) diff --git a/src/eko/runner/recipes.py b/src/eko/runner/recipes.py index 83810bb5c..7a09554cb 100644 --- a/src/eko/runner/recipes.py +++ b/src/eko/runner/recipes.py @@ -32,4 +32,4 @@ def create(evolgrid: List[EPoint], atlas: Atlas) -> List[Recipe]: for ep in evolgrid: recipes.extend(elements(ep, atlas)) - return recipes + return list(set(recipes)) diff --git a/tests/eko/runner/test_recipes.py b/tests/eko/runner/test_recipes.py index c79f675ad..59b1a8855 100644 --- a/tests/eko/runner/test_recipes.py +++ b/tests/eko/runner/test_recipes.py @@ -1,44 +1,43 @@ -from pathlib import Path +from typing import List -import pytest - -from eko import EKO from eko.io.items import Evolution, Matching -from eko.io.runcards import OperatorCard, TheoryCard +from eko.io.types import EvolutionPoint from eko.matchings import Atlas from eko.quantities.heavy_quarks import MatchingScales -from eko.runner.recipes import elements - +from eko.runner.recipes import create, elements -@pytest.fixture -def neweko(theory_card: TheoryCard, operator_card: OperatorCard, tmp_path: Path): - path = tmp_path / "eko.tar" - with EKO.create(path) as builder: - yield builder.load_cards(theory_card, operator_card).build() - - path.unlink(missing_ok=True) +SCALES = MatchingScales([10.0, 20.0, 30.0]) +ATLAS = Atlas(SCALES, (50.0, 5)) def test_elements(): - scales = MatchingScales([10.0, 20.0, 30.0]) - - atlas = Atlas(scales, (50.0, 5)) - onestep = elements((60.0, 5), atlas) + onestep = elements((60.0, 5), ATLAS) assert len(onestep) == 1 assert isinstance(onestep[0], Evolution) assert not onestep[0].cliff - backandforth = elements((60.0, 6), atlas) + backandforth = elements((60.0, 6), ATLAS) assert len(backandforth) == 3 assert isinstance(backandforth[0], Evolution) assert backandforth[0].cliff assert isinstance(backandforth[1], Matching) assert not backandforth[1].inverse - down = elements((5.0, 3), atlas) + down = elements((5.0, 3), ATLAS) assert all([isinstance(el, Evolution) for i, el in enumerate(down) if i % 2 == 0]) assert all([isinstance(el, Matching) for i, el in enumerate(down) if i % 2 == 1]) -def test_create(neweko: EKO): - pass +def test_create(): + evolgrid: List[EvolutionPoint] = [(60.0, 5)] + + recs = create(evolgrid, ATLAS) + assert len(recs) == 1 + + evolgrid.append((60.0, 6)) + recs = create(evolgrid, ATLAS) + assert len(recs) == 1 + 3 + + evolgrid.append((70.0, 6)) + recs = create(evolgrid, ATLAS) + assert len(recs) == 1 + 3 + 1 From 23de89ab356e5961a88c35fd0bf9d2cda7c91924 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 16:46:29 +0200 Subject: [PATCH 094/154] Fix sorting in eko_product --- tests/ekobox/test_utils.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/ekobox/test_utils.py b/tests/ekobox/test_utils.py index 51ef5e285..557db679c 100644 --- a/tests/ekobox/test_utils.py +++ b/tests/ekobox/test_utils.py @@ -56,7 +56,9 @@ def test_ekos_product(tmp_path): with EKO.read(res_path) as eko_res: assert eko_res.operator_card.mu20 == eko_ini.operator_card.mu20 - np.testing.assert_allclose(eko_res.mu2grid[1:], eko_fin.mu2grid) + np.testing.assert_allclose( + sorted(eko_res.mu2grid)[1:], sorted(eko_fin.mu2grid) + ) np.testing.assert_allclose( eko_ini[mu2first].operator, eko_res[mu2first].operator ) @@ -64,7 +66,9 @@ def test_ekos_product(tmp_path): # product overwrites initial utils.ekos_product(eko_ini, eko_fin) - np.testing.assert_allclose(eko_ini.mu2grid[1:], eko_fin.mu2grid) + np.testing.assert_allclose( + sorted(eko_ini.mu2grid)[1:], sorted(eko_fin.mu2grid) + ) np.testing.assert_allclose( eko_res[mu2first].operator, eko_ini[mu2first].operator ) From cf30aafca86a1bb851c3c02d00881bd92b584b6e Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 17:14:03 +0200 Subject: [PATCH 095/154] Fix test eko/couplings --- tests/eko/test_couplings.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/eko/test_couplings.py b/tests/eko/test_couplings.py index c78a51914..6fd901a4d 100644 --- a/tests/eko/test_couplings.py +++ b/tests/eko/test_couplings.py @@ -226,7 +226,10 @@ def test_exact(self): for thresh_setup in thresh_setups: for qcd in range(1, 4 + 1): for qed in range(2 + 1): - for qedref in [muref, nan]: # testing both running and fixed alpha + for em_running in [ + True, + False, + ]: # testing both running and fixed alpha pto = (qcd, qed) couplings = CouplingsInfo.from_dict( dict( @@ -235,6 +238,7 @@ def test_exact(self): scale=muref, num_flavs_ref=None, max_num_flavs=6, + em_running=em_running, ) ) sc_expanded = Couplings( @@ -273,7 +277,7 @@ def test_exact(self): sc_exact.a(q2)[1], rtol=precisions[1], ) - if qedref is nan or qed == 0: + if not em_running or qed == 0: np.testing.assert_allclose( sc_expanded.a(q2)[1], alpharef[1] / (4 * np.pi), From 8b917b6972551b84a040bc181fa52b2baf2da168 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 4 May 2023 17:47:32 +0200 Subject: [PATCH 096/154] Simplify ekore/harmonics/cache --- src/ekore/harmonics/cache.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/ekore/harmonics/cache.py b/src/ekore/harmonics/cache.py index fb297fe4a..6b22d61d1 100644 --- a/src/ekore/harmonics/cache.py +++ b/src/ekore/harmonics/cache.py @@ -127,10 +127,8 @@ def get( cache = update(w1.S1, S1mh, cache, (n - 1) / 2) s = recursive_harmonic_sum(cache[S1mh], (n - 1) / 2, 1, 1) elif key == Sm1: - cache = update(w1.S1, S1, cache, n) - cache = update(w1.S1, S1mh, cache, (n - 1) / 2) - cache = update(w1.S1, S1h, cache, n / 2) - s = w1.Sm1(n, cache[S1], cache[S1mh], cache[S1h], is_singlet) + cache = update_Sm1(cache, n, is_singlet) + s = cache[key] elif key == S1p2: cache = update(w1.S1, S1, cache, n) s = recursive_harmonic_sum(cache[S1], n, 2, 1) @@ -145,10 +143,8 @@ def get( cache = update(w2.S2, S2mh, cache, (n - 1) / 2) s = recursive_harmonic_sum(cache[S2mh], (n - 1) / 2, 1, 2) elif key == Sm2: - cache = update(w2.S2, S2, cache, n) - cache = update(w2.S2, S2mh, cache, (n - 1) / 2) - cache = update(w2.S2, S2h, cache, n / 2) - s = w2.Sm2(n, cache[S2], cache[S2mh], cache[S2h], is_singlet) + cache = update_Sm2(cache, n, is_singlet) + s = cache[key] # weight 3 elif key == S3: s = w3.S3(n) From 9f51fd3dee242f1109717af1df16dbea090af460 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 18:18:19 +0200 Subject: [PATCH 097/154] Fix regression due to different EKO iteration --- src/eko/runner/managed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 2a91289b4..304fc9f4b 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -25,7 +25,7 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): atlas = commons.atlas(eko.theory_card, eko.operator_card) - recs = recipes.create(eko.evolgrid, atlas) + recs = recipes.create(eko.operator_card.evolgrid, atlas) eko.load_recipes(recs) for recipe in eko.recipes: From 1879f2baaaa161d07b80a0987228a2344e392fd1 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 18:18:54 +0200 Subject: [PATCH 098/154] Implement join operation, and trivially test it --- src/eko/runner/operators.py | 31 +++++++++++++++++++++++- tests/eko/runner/conftest.py | 38 ++++++++++++++++++++++++++++++ tests/eko/runner/test_operators.py | 26 ++++++++++++++++++++ 3 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 tests/eko/runner/conftest.py create mode 100644 tests/eko/runner/test_operators.py diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py index 4f84a0884..4f0e7ae4e 100644 --- a/src/eko/runner/operators.py +++ b/src/eko/runner/operators.py @@ -1,7 +1,9 @@ """Combine parts into operators.""" +from functools import reduce from typing import List import numpy as np +import numpy.typing as npt from ..io.inventory import Inventory from ..io.items import Operator, Recipe @@ -14,6 +16,33 @@ def retrieve( return [] +def dot4(op1: npt.NDArray, op2: npt.NDArray) -> npt.NDArray: + """Dot product between rank 4 objects. + + The product is performed considering them as matrices indexed by pairs, so + linearizing the indices in pairs. + + """ + return np.einsum("aibj,bjck->aick", op1, op2) + + +def dotop(op1: Operator, op2: Operator) -> Operator: + """Dot product between two operators. + + Essentially a wrapper of :func:`dot4`, applying linear error propagation, + if applicable. + + """ + val = dot4(op1.operator, op2.operator) + + if op1.error is not None and op2.error is not None: + err = dot4(op1.operator, op2.error) + dot4(op1.error, op2.operator) + else: + err = None + + return Operator(val, err) + + def join(elements: List[Operator]) -> Operator: """Join the elements into the final operator.""" - return Operator(np.array([])) + return reduce(dotop, elements) diff --git a/tests/eko/runner/conftest.py b/tests/eko/runner/conftest.py new file mode 100644 index 000000000..a1516b2a7 --- /dev/null +++ b/tests/eko/runner/conftest.py @@ -0,0 +1,38 @@ +from pathlib import Path + +import numpy as np +import pytest + +from eko import EKO +from eko.io.items import Operator +from eko.io.runcards import OperatorCard, TheoryCard +from eko.runner import commons, recipes + + +@pytest.fixture +def neweko(theory_card: TheoryCard, operator_card: OperatorCard, tmp_path: Path): + path = tmp_path / "eko.tar" + with EKO.create(path) as builder: + yield builder.load_cards(theory_card, operator_card).build() + + path.unlink(missing_ok=True) + + +@pytest.fixture +def identity(neweko: EKO): + xs = len(neweko.xgrid.raw) + flavs = len(neweko.bases.pids) + return Operator(operator=np.eye(xs * flavs).reshape((xs, flavs, xs, flavs))) + + +@pytest.fixture +def ekoparts(neweko: EKO, identity: Operator): + atlas = commons.atlas(neweko.theory_card, neweko.operator_card) + neweko.load_recipes(recipes.create(neweko.operator_card.evolgrid, atlas)) + + for rec in neweko.recipes: + neweko.parts[rec] = identity + for rec in neweko.recipes_matching: + neweko.parts_matching[rec] = identity + + return neweko diff --git a/tests/eko/runner/test_operators.py b/tests/eko/runner/test_operators.py new file mode 100644 index 000000000..a7a1a6783 --- /dev/null +++ b/tests/eko/runner/test_operators.py @@ -0,0 +1,26 @@ +import numpy as np + +from eko.io.items import Operator +from eko.io.struct import EKO +from eko.runner.operators import join + + +def test_retrieve(ekoparts: EKO): + # __import__("pdb").set_trace() + pass + + +def test_join(identity: Operator): + """Just check a trivial property: product of identities is identity. + + In principle the join operation should respect all the properties of the + matrix product, but there not so many sensible rank-4 operators. + + """ + linear_size = np.product(identity.operator.shape[:2]) + for n in range(1, 8, 3): + res = join([identity for _ in range(n)]) + assert res.error is None + np.testing.assert_allclose( + res.operator.reshape(linear_size, linear_size), np.eye(linear_size) + ) From 3609d6847577ddbae5bba9829195b707152acfb8 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 18:31:12 +0200 Subject: [PATCH 099/154] Implement parts retrieval and trivial test --- src/eko/runner/operators.py | 11 ++++++++--- tests/eko/runner/test_operators.py | 16 +++++++++++++--- tests/eko/runner/test_parts.py | 5 +++++ 3 files changed, 26 insertions(+), 6 deletions(-) create mode 100644 tests/eko/runner/test_parts.py diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py index 4f0e7ae4e..23ed79c63 100644 --- a/src/eko/runner/operators.py +++ b/src/eko/runner/operators.py @@ -6,14 +6,19 @@ import numpy.typing as npt from ..io.inventory import Inventory -from ..io.items import Operator, Recipe +from ..io.items import Evolution, Operator, Recipe def retrieve( - elements: List[Recipe], parts: Inventory, parts_matching: Inventory + headers: List[Recipe], parts: Inventory, parts_matching: Inventory ) -> List[Operator]: """Retrieve parts to be joined.""" - return [] + elements = [] + for head in headers: + inv = parts if isinstance(head, Evolution) else parts_matching + elements.append(inv[head]) + + return elements def dot4(op1: npt.NDArray, op2: npt.NDArray) -> npt.NDArray: diff --git a/tests/eko/runner/test_operators.py b/tests/eko/runner/test_operators.py index a7a1a6783..8721fca99 100644 --- a/tests/eko/runner/test_operators.py +++ b/tests/eko/runner/test_operators.py @@ -2,12 +2,22 @@ from eko.io.items import Operator from eko.io.struct import EKO -from eko.runner.operators import join +from eko.runner.operators import join, retrieve def test_retrieve(ekoparts: EKO): - # __import__("pdb").set_trace() - pass + evhead, evop = next(iter(ekoparts.parts.cache.items())) + matchhead, matchop = next(iter(ekoparts.parts_matching.cache.items())) + + els = retrieve([evhead] * 5, ekoparts.parts, ekoparts.parts_matching) + assert len(els) == 5 + assert all(isinstance(el, Operator) for el in els) + + els = retrieve( + [evhead, matchhead, matchhead], ekoparts.parts, ekoparts.parts_matching + ) + assert len(els) == 3 + assert all(isinstance(el, Operator) for el in els) def test_join(identity: Operator): diff --git a/tests/eko/runner/test_parts.py b/tests/eko/runner/test_parts.py new file mode 100644 index 000000000..0f3b7a87a --- /dev/null +++ b/tests/eko/runner/test_parts.py @@ -0,0 +1,5 @@ +from eko import EKO + + +def test_evolve(eko_factory: EKO): + pass From 7688d6835918df5787182abbcc354c30188e7b42 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 18:53:10 +0200 Subject: [PATCH 100/154] Fix parts configs --- src/eko/runner/managed.py | 1 - src/eko/runner/parts.py | 37 +++---------------------------------- 2 files changed, 3 insertions(+), 35 deletions(-) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 304fc9f4b..1a867c819 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -37,6 +37,5 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): for ep in operator.evolgrid: headers = recipes.elements(ep, atlas) - __import__("pdb").set_trace() parts_ = operators.retrieve(headers, eko.parts, eko.parts_matching) eko.operators[Target.from_ep(ep)] = operators.join(parts_) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 871000a63..9f3d1b8cc 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -73,17 +73,6 @@ def evolve_configs(eko: EKO) -> dict: Legacy interface, make use of a dedicated object. """ - # self.config: - # - order - # - n_integration_cores - # - xif2 - # - ev_op_iterations - # - debug_skip_non_singlet - # - debug_skip_singlet - # - method - # - ev_op_max_order - # - polarized - # - time_like tcard = eko.theory_card ocard = eko.operator_card return dict( @@ -97,6 +86,8 @@ def evolve_configs(eko: EKO) -> dict: debug_skip_singlet=ocard.debug.skip_singlet, debug_skip_non_singlet=ocard.debug.skip_non_singlet, n_integration_cores=ocard.configs.n_integration_cores, + ModSV=ocard.configs.scvar_method, + n3lo_ad_variation=tcard.n3lo_ad_variation, ) @@ -123,32 +114,10 @@ def matching_configs(eko: EKO) -> dict: Legacy interface, make use of a dedicated object. """ - # self.config: - # - order - # - n_integration_cores - # - xif2 - # - ev_op_iterations - # - debug_skip_non_singlet - # - debug_skip_singlet - # - method - # - ev_op_max_order - # - polarized - # - time_like - # - backward_inversion - # - intrinsic_range tcard = eko.theory_card ocard = eko.operator_card return dict( - order=tcard.order, - xif2=tcard.xif**2, - method=ocard.configs.evolution_method, - ev_op_iterations=ocard.configs.ev_op_iterations, - ev_op_max_order=ocard.configs.ev_op_max_order, - polarized=ocard.configs.polarized, - time_like=ocard.configs.time_like, - debug_skip_singlet=ocard.debug.skip_singlet, - debug_skip_non_singlet=ocard.debug.skip_non_singlet, - n_integration_cores=ocard.configs.n_integration_cores, + **evolve_configs(eko), backward_inversion=ocard.configs.inversion_method, intrinsic_range=tcard.heavy.intrinsic_flavors, ) From 9788f1d65362b1e0c562fa8840eccf6d2d233f4e Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 20:02:59 +0200 Subject: [PATCH 101/154] Fix matching conditions blow-up to flavor tensors in new runner --- src/eko/runner/parts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 9f3d1b8cc..28db95882 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -140,7 +140,7 @@ def match(eko: EKO, recipe: Matching) -> Operator: binfo = blowup_info(eko) nf_match = op.nf - 1 if recipe.inverse else op.nf res, err = matching_condition.MatchingCondition.split_ad_to_evol_map( - op, nf_match, recipe.scale, **binfo + op.op_members, nf_match, recipe.scale, **binfo ).to_flavor_basis_tensor(qed=binfo["qed"]) return Operator(res, err) From b08addd841b37622a13a57f72ea37a6c19d56980 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 20:05:08 +0200 Subject: [PATCH 102/154] Switch temporarily to the jets runner --- src/eko/runner/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/eko/runner/__init__.py b/src/eko/runner/__init__.py index feba847eb..80481bd53 100644 --- a/src/eko/runner/__init__.py +++ b/src/eko/runner/__init__.py @@ -41,6 +41,7 @@ def solve( legacy.Runner(theory_card, operators_card, path).compute() +# TODO: drop this, and directly re-export manaaged.solve def solve_jets( theory_card: Union[RawCard, TheoryCard], operators_card: Union[RawCard, OperatorCard], @@ -51,4 +52,5 @@ def solve_jets( managed.solve(new_theory, new_operator, Path(path)) -# solve = solve_jets +# TODO: drop this before merging #242 +solve = solve_jets From 80a1764ab21f8a39e2cce36dd311d8f191799184 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 20:41:41 +0200 Subject: [PATCH 103/154] Use values instead of variants for enums --- src/eko/runner/parts.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 28db95882..1d9b6d5d8 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -75,10 +75,11 @@ def evolve_configs(eko: EKO) -> dict: """ tcard = eko.theory_card ocard = eko.operator_card + scvar = ocard.configs.scvar_method return dict( order=tcard.order, xif2=tcard.xif**2, - method=ocard.configs.evolution_method, + method=ocard.configs.evolution_method.value, ev_op_iterations=ocard.configs.ev_op_iterations, ev_op_max_order=ocard.configs.ev_op_max_order, polarized=ocard.configs.polarized, @@ -86,7 +87,7 @@ def evolve_configs(eko: EKO) -> dict: debug_skip_singlet=ocard.debug.skip_singlet, debug_skip_non_singlet=ocard.debug.skip_non_singlet, n_integration_cores=ocard.configs.n_integration_cores, - ModSV=ocard.configs.scvar_method, + ModSV=scvar.value if scvar is not None else None, n3lo_ad_variation=tcard.n3lo_ad_variation, ) @@ -116,9 +117,10 @@ def matching_configs(eko: EKO) -> dict: """ tcard = eko.theory_card ocard = eko.operator_card + inversion = ocard.configs.inversion_method return dict( **evolve_configs(eko), - backward_inversion=ocard.configs.inversion_method, + backward_inversion=inversion.value if inversion is not None else None, intrinsic_range=tcard.heavy.intrinsic_flavors, ) From 729934897be7d7a9adf9351cb82155225dc9308d Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 4 May 2023 20:48:48 +0200 Subject: [PATCH 104/154] Revert already propagated enums --- src/eko/runner/parts.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index 1d9b6d5d8..cab44e609 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -75,7 +75,6 @@ def evolve_configs(eko: EKO) -> dict: """ tcard = eko.theory_card ocard = eko.operator_card - scvar = ocard.configs.scvar_method return dict( order=tcard.order, xif2=tcard.xif**2, @@ -87,7 +86,7 @@ def evolve_configs(eko: EKO) -> dict: debug_skip_singlet=ocard.debug.skip_singlet, debug_skip_non_singlet=ocard.debug.skip_non_singlet, n_integration_cores=ocard.configs.n_integration_cores, - ModSV=scvar.value if scvar is not None else None, + ModSV=ocard.configs.scvar_method, n3lo_ad_variation=tcard.n3lo_ad_variation, ) @@ -117,10 +116,9 @@ def matching_configs(eko: EKO) -> dict: """ tcard = eko.theory_card ocard = eko.operator_card - inversion = ocard.configs.inversion_method return dict( **evolve_configs(eko), - backward_inversion=inversion.value if inversion is not None else None, + backward_inversion=ocard.configs.inversion_method, intrinsic_range=tcard.heavy.intrinsic_flavors, ) From 208a99b545c19c4d6506660c4c2e6ea26372a520 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 5 May 2023 15:50:50 +0200 Subject: [PATCH 105/154] Fix nf0 determination in runcards upgrade --- benchmarks/lha_paper_bench.py | 2 -- src/eko/io/runcards.py | 5 +++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/benchmarks/lha_paper_bench.py b/benchmarks/lha_paper_bench.py index 9f54d1616..494376afa 100644 --- a/benchmarks/lha_paper_bench.py +++ b/benchmarks/lha_paper_bench.py @@ -1,7 +1,6 @@ """ Benchmark to :cite:`Giele:2002hx` (LO + NLO) and :cite:`Dittmar:2005ed` (NNLO). """ -import argparse import os from math import nan @@ -204,7 +203,6 @@ def __init__(self): "FNS": "FFNS", # ignored by eko, but needed by LHA_utils "NfFF": 4, "nfref": 4, - "nf0": 4, "kcThr": 0.0, "kbThr": np.inf, "ktThr": np.inf, diff --git a/src/eko/io/runcards.py b/src/eko/io/runcards.py index 27b6c54ce..83c1dae95 100644 --- a/src/eko/io/runcards.py +++ b/src/eko/io/runcards.py @@ -174,7 +174,7 @@ def new_theory(self): new["order"] = [old["PTO"] + 1, old["QED"]] alphaem = self.fallback(old.get("alphaqed"), old.get("alphaem"), default=0.0) ms = self.heavies("m%s", old) - mus = self.heavies("Qm%s", old) + ks = self.heavies("k%sThr", old) new["couplings"] = dict( alphas=old["alphas"], alphaem=alphaem, @@ -183,7 +183,7 @@ def new_theory(self): max_num_flavs=old["MaxNfAs"], ) new["heavy"] = { - "num_flavs_init": nf_default(old["Qref"] ** 2.0, default_atlas(ms, mus)) + "num_flavs_init": nf_default(old["Qref"] ** 2.0, default_atlas(ms, ks)) if old["nf0"] is None else old["nf0"], "num_flavs_max_pdf": old["MaxNfPdf"], @@ -198,6 +198,7 @@ def new_theory(self): if old["HQ"] == "POLE": new["heavy"]["masses"] = [[m, nan] for m in ms] elif old["HQ"] == "MSBAR": + mus = self.heavies("Qm%s", old) new["heavy"]["masses"] = [[m, mu] for m, mu in zip(ms, mus)] else: raise ValueError(f"Unknown mass scheme '{old['HQ']}'") From cd6fb55c3ca48ca0d019e40411b299d09a48957e Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Fri, 5 May 2023 15:54:27 +0200 Subject: [PATCH 106/154] Minor improvements on eko/io/items docs --- src/eko/io/items.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/eko/io/items.py b/src/eko/io/items.py index 4b6e4be64..b466dba6c 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -23,7 +23,8 @@ class Header: class Evolution(Header): """Information to compute an evolution operator. - Describe evolution with a fixed number of light flavors. + It describes the evolution with a fixed number of light flavors between two + scales. """ @@ -43,7 +44,7 @@ class Evolution(Header): Intermediate ones always have final scales :attr:`mu2` corresponding to matching scales, and initial scales :attr:`mu20` corresponding to either - matching scales or the global initial scale of the EKO. + matching scales or the global initial scale of the |EKO|. Note ---- @@ -58,15 +59,15 @@ class Evolution(Header): | nf = 6 -------------------------------------------------------- - where each lane corresponds to DGLAP evolution with the relative number of - running flavors, and the vertical bridges are the perturbative matchings + where each lane corresponds to |DGLAP| evolution with the relative number + of running flavors, and the vertical bridges are the perturbative matchings between two different "adjacent" schemes. """ @classmethod def from_atlas(cls, segment: matchings.Segment, cliff: bool = False): - """Create instance from analogous Atlas object.""" + """Create instance from analogous :class:`eko.matchings.Atlas` object.""" return cls(**asdict(segment), cliff=cliff) @property @@ -89,7 +90,7 @@ class Matching(Header): @classmethod def from_atlas(cls, matching: matchings.Matching, inverse: bool = False): - """Create instance from analogous Atlas object.""" + """Create instance from analogous :class:`eko.matchings.Atlas` object.""" return cls(**asdict(matching), inverse=inverse) @property @@ -123,8 +124,8 @@ def ep(self) -> EPoint: class Operator: """Operator representation. - To be used to hold the result of a computed 4-dim operator (from a given - scale to another given one). + To be used to hold the result of a computed 4-dim operator (either a raw + evolution operator or a matching condition). Note ---- From 443bb6cad7c95d129124edc3e6ad9c7a4c73827a Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 5 May 2023 16:11:57 +0200 Subject: [PATCH 107/154] Fix misuse of area walls in place of ratios, in couplings --- src/eko/beta.py | 3 +-- src/eko/couplings.py | 3 ++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/eko/beta.py b/src/eko/beta.py index 7eacf0d3a..c869e9930 100644 --- a/src/eko/beta.py +++ b/src/eko/beta.py @@ -6,8 +6,7 @@ import numba as nb -from eko.constants import zeta3 - +from .constants import zeta3 from . import constants diff --git a/src/eko/couplings.py b/src/eko/couplings.py index dc8279b90..7038ea0a5 100644 --- a/src/eko/couplings.py +++ b/src/eko/couplings.py @@ -453,6 +453,7 @@ def assert_positive(name, var): # create new threshold object self.a_ref = np.array(couplings.values) / 4.0 / np.pi # convert to a_s and a_em matching_scales = (np.array(masses) * np.array(thresholds_ratios)).tolist() + self.thresholds_ratios = list(thresholds_ratios) self.atlas = matchings.Atlas(matching_scales, (couplings.scale**2, nf_ref)) self.hqm_scheme = scheme_name logger.info( @@ -744,7 +745,7 @@ def a( # - if there is yet a step to go if k < len(path) - 1: # q2_to is the threshold value - L = np.log(self.atlas.walls[seg.nf - shift + 1]) + L = np.log(self.thresholds_ratios[seg.nf - shift]) m_coeffs = ( compute_matching_coeffs_down(self.hqm_scheme, seg.nf - 1) if is_downward From f1f8aa84460af5bd513923035acc0f53e9a84b3e Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Fri, 5 May 2023 16:17:19 +0200 Subject: [PATCH 108/154] Fix isort --- src/eko/beta.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/beta.py b/src/eko/beta.py index c869e9930..8ce6658f1 100644 --- a/src/eko/beta.py +++ b/src/eko/beta.py @@ -6,8 +6,8 @@ import numba as nb -from .constants import zeta3 from . import constants +from .constants import zeta3 @nb.njit(cache=True) From b4a550e36e3bc749fbc348dfdb1489329c009c79 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Fri, 5 May 2023 18:05:35 +0200 Subject: [PATCH 109/154] Fix operator order --- src/eko/runner/operators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py index 23ed79c63..9396aa291 100644 --- a/src/eko/runner/operators.py +++ b/src/eko/runner/operators.py @@ -50,4 +50,4 @@ def dotop(op1: Operator, op2: Operator) -> Operator: def join(elements: List[Operator]) -> Operator: """Join the elements into the final operator.""" - return reduce(dotop, elements) + return reduce(dotop, reversed(elements)) From a46e9969acd41831d72c795da4f0f08e90d30b99 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 5 May 2023 18:33:50 +0200 Subject: [PATCH 110/154] Hardcode intrinsic range, effectively deprecating it --- src/eko/runner/legacy.py | 1 + src/eko/runner/managed.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/eko/runner/legacy.py b/src/eko/runner/legacy.py index ca8c54f15..c964c3415 100644 --- a/src/eko/runner/legacy.py +++ b/src/eko/runner/legacy.py @@ -44,6 +44,7 @@ def __init__( """ new_theory, new_operator = runcards.update(theory_card, operators_card) + new_theory.heavy.intrinsic_flavors = [4, 5, 6] # Store inputs self.path = path diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 1a867c819..eeb4fc778 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -20,6 +20,8 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): """Solve DGLAP equations in terms of evolution kernel operators (EKO).""" + theory.heavy.intrinsic_flavors = [4, 5, 6] + with EKO.create(path) as builder: eko = builder.load_cards(theory, operator).build() # pylint: disable=E1101 From a5a87dd678891136d22fab182d3a4b2808f6d022 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 5 May 2023 20:12:56 +0200 Subject: [PATCH 111/154] Add note about reverting the elements in vfns --- src/eko/runner/operators.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py index 9396aa291..ca8eec9ff 100644 --- a/src/eko/runner/operators.py +++ b/src/eko/runner/operators.py @@ -49,5 +49,17 @@ def dotop(op1: Operator, op2: Operator) -> Operator: def join(elements: List[Operator]) -> Operator: - """Join the elements into the final operator.""" + """Join the elements into the final operator. + + Note + ---- + Since the matrices composing the path have to be multiplied from the + destination to the origin, the input order, coming from path (which is + instead ``origin -> target``), is being reversed. + + .. todo:: + + consider if reversing the path... + + """ return reduce(dotop, reversed(elements)) From ab0dbfc6a0fa95c5d42ac822a611dfe9f68adcdd Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 5 May 2023 20:13:52 +0200 Subject: [PATCH 112/154] Actually empty the cache during parts computation --- src/eko/runner/managed.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index eeb4fc778..520dfbf4d 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -33,9 +33,13 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): for recipe in eko.recipes: assert isinstance(recipe, Evolution) eko.parts[recipe] = parts.evolve(eko, recipe) + # flush the memory + del eko.parts[recipe] for recipe in eko.recipes_matching: assert isinstance(recipe, Matching) eko.parts_matching[recipe] = parts.match(eko, recipe) + # flush the memory + del eko.parts_matching[recipe] for ep in operator.evolgrid: headers = recipes.elements(ep, atlas) From 6cb3cce04263542adbaf8ce7f03c63d48e5cbb7b Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 5 May 2023 20:18:57 +0200 Subject: [PATCH 113/154] Actually empty the cache during parts joining --- src/eko/io/inventory.py | 5 +++++ src/eko/io/struct.py | 8 ++++++++ src/eko/runner/managed.py | 7 ++++++- 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 433736039..5116448a1 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -201,3 +201,8 @@ def sync(self): def __invert__(self): """Alias for :meth:`sync`.""" self.sync() + + def empty(self): + """Empty the in-memory cache.""" + for header in self.cache: + del self[header] diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index 82452c407..d0f253c93 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -200,6 +200,14 @@ def __delitem__(self, ep: EPoint): """Drop operator from memory.""" del self.operators[Target.from_ep(ep)] + def __delattr__(self, name: str): + """Empty an inventory cache.""" + attr = getattr(self, name) + if isinstance(attr, Inventory): + attr.empty() + else: + super().__delattr__(name) + @contextlib.contextmanager def operator(self, ep: EPoint): """Retrieve an operator and discard it afterwards. diff --git a/src/eko/runner/managed.py b/src/eko/runner/managed.py index 520dfbf4d..c353decde 100644 --- a/src/eko/runner/managed.py +++ b/src/eko/runner/managed.py @@ -44,4 +44,9 @@ def solve(theory: TheoryCard, operator: OperatorCard, path: Path): for ep in operator.evolgrid: headers = recipes.elements(ep, atlas) parts_ = operators.retrieve(headers, eko.parts, eko.parts_matching) - eko.operators[Target.from_ep(ep)] = operators.join(parts_) + target = Target.from_ep(ep) + eko.operators[target] = operators.join(parts_) + # flush the memory + del eko.parts + del eko.parts_matching + del eko.operators[target] From 5dadb36271ef275e5d2fd3e03c1545297ce89e78 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 8 May 2023 12:45:54 +0200 Subject: [PATCH 114/154] Add suitable absolute values to linear error propagation in operators product --- src/eko/runner/operators.py | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/src/eko/runner/operators.py b/src/eko/runner/operators.py index ca8eec9ff..5a0a219c7 100644 --- a/src/eko/runner/operators.py +++ b/src/eko/runner/operators.py @@ -32,16 +32,44 @@ def dot4(op1: npt.NDArray, op2: npt.NDArray) -> npt.NDArray: def dotop(op1: Operator, op2: Operator) -> Operator: - """Dot product between two operators. + r"""Dot product between two operators. Essentially a wrapper of :func:`dot4`, applying linear error propagation, if applicable. + Note + ---- + Linear error propagation requires matrices to be positive before taking the product. + + Indeed, for a simple product of two variables :math:`a \cdot b`, the error is + propagated in the following way: + + .. math:: + + \max_{\sgn_{da}, \sgn{db}} (a + da)(b + db) - ab = + \max_{\sgn_{da}, \sgn{db}} da \cdot b + a \cdot db + \mathcal{O}(d^2) = + |da \cdot b| + |a \cdot db| + \mathcal{O}(d^2) = + |da | \cdot |b| + |a| \cdot |db| + \mathcal{O}(d^2) + + Where the second step is a consequence of being able to vary the two + variations independently, and last just a trivial property of the product. + + But in a matrix multiplication, each element of the two matrices has an + independent variation associated. Thus: + + .. math:: + + \max_{\sgn_{da_i}, \sgn{db_i}} (a_i + da_i)(b_i + db_i) - a_i b_i = + \max_{\sgn_{da_i}, \sgn{db_i}} da_i \cdot b_i + a_i \cdot db_i + \mathcal{O}(d^2) = + |da_i| \cdot |b_i| + |a_i| \cdot |db_i| + \mathcal{O}(d^2) + """ val = dot4(op1.operator, op2.operator) if op1.error is not None and op2.error is not None: - err = dot4(op1.operator, op2.error) + dot4(op1.error, op2.operator) + err = dot4(np.abs(op1.operator), np.abs(op2.error)) + dot4( + np.abs(op1.error), np.abs(op2.operator) + ) else: err = None From 29c6e6f7b7cfa94deefc60ca1a2ff8fba1d3eb36 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 8 May 2023 14:10:21 +0200 Subject: [PATCH 115/154] Fix regression in MSbar masses evolution --- src/eko/msbar_masses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/msbar_masses.py b/src/eko/msbar_masses.py index 3e24feebb..3c6704388 100644 --- a/src/eko/msbar_masses.py +++ b/src/eko/msbar_masses.py @@ -387,7 +387,7 @@ def sc(thr_masses): order=order, method=evmeth, masses=thr_masses, - thresholds_ratios=matching * xif2, + thresholds_ratios=(np.array(matching) * xif2).tolist(), hqm_scheme=QuarkMassScheme.MSBAR, ) From 915510086af6d1a6444ea0d538eb9a447bad4ffb Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 8 May 2023 14:12:35 +0200 Subject: [PATCH 116/154] Update src/eko/couplings.py Co-authored-by: Giacomo Magni <39065935+giacomomagni@users.noreply.github.com> --- src/eko/couplings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/eko/couplings.py b/src/eko/couplings.py index 7038ea0a5..9b9de888e 100644 --- a/src/eko/couplings.py +++ b/src/eko/couplings.py @@ -744,7 +744,6 @@ def a( # apply matching conditions: see hep-ph/9706430 # - if there is yet a step to go if k < len(path) - 1: - # q2_to is the threshold value L = np.log(self.thresholds_ratios[seg.nf - shift]) m_coeffs = ( compute_matching_coeffs_down(self.hqm_scheme, seg.nf - 1) From caeeacf7f4a35b5819cf980e6b457ace0b037889 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 8 May 2023 14:12:51 +0200 Subject: [PATCH 117/154] Update src/eko/matchings.py Co-authored-by: Giacomo Magni <39065935+giacomomagni@users.noreply.github.com> --- src/eko/matchings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/matchings.py b/src/eko/matchings.py index 566faa4a2..49f32990b 100644 --- a/src/eko/matchings.py +++ b/src/eko/matchings.py @@ -194,7 +194,7 @@ def is_downward_path(path: Path) -> bool: - in the number of active flavors when the path list contains more than one :class:`PathSegment`, note this can be different from each - :attr:`PathSegment.is_downward_q2` + :attr:`PathSegment.is_downward` - in :math:`Q^2` when just one single :class:`PathSegment` is given """ From ed7128f0d0d24bfb69f9ba513fa7cc736f4b272c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 8 May 2023 14:15:08 +0200 Subject: [PATCH 118/154] Update matchings nomenclature in strings --- src/eko/matchings.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/eko/matchings.py b/src/eko/matchings.py index 49f32990b..bf90e778b 100644 --- a/src/eko/matchings.py +++ b/src/eko/matchings.py @@ -30,7 +30,7 @@ def is_downward(self) -> bool: def __str__(self): """Textual representation, mainly for logging purpose.""" - return f"PathSegment({self.origin} -> {self.target}, nf={self.nf})" + return f"Segment({self.origin} -> {self.target}, nf={self.nf})" Path = List[Segment] @@ -188,14 +188,14 @@ def nf_default(mu2: SquaredScale, atlas: Atlas) -> FlavorsNumber: def is_downward_path(path: Path) -> bool: - """Determine if a path is downward. + r"""Determine if a path is downward. Criterias are: - in the number of active flavors when the path list contains more than one - :class:`PathSegment`, note this can be different from each - :attr:`PathSegment.is_downward` - - in :math:`Q^2` when just one single :class:`PathSegment` is given + :class:`Segment`, note this can be different from each + :attr:`Segment.is_downward` + - in :math:`\mu^2`, when just one single :class:`Segment` is given """ if len(path) == 1: From d919b8902b681b14afda577ef64ab461e4f6d8a0 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 8 May 2023 15:57:25 +0200 Subject: [PATCH 119/154] Add note about flavor rotation --- src/eko/runner/parts.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index cab44e609..dbb0882ea 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -124,7 +124,18 @@ def matching_configs(eko: EKO) -> dict: def match(eko: EKO, recipe: Matching) -> Operator: - """Compute matching in isolation.""" + """Compute matching in isolation. + + Note + ---- + Compared to the old prescription, a dedicated rotation to a different + intrinsic basis is not needed any longer. + + All the operators are blown up to flavor basis, and they are saved and + joined in that unique basis. So, the only rotation used is towards that + basis, and encoded in the blowing up prescription. + + """ kthr = eko.theory_card.heavy.squared_ratios[recipe.hq - 4] op = ome.OperatorMatrixElement( matching_configs(eko), From 9b43116795ba534b51ad65a944028bec49fd2801 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Mon, 8 May 2023 18:19:25 +0200 Subject: [PATCH 120/154] Fix matched path construction --- src/eko/matchings.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/eko/matchings.py b/src/eko/matchings.py index bf90e778b..20d552726 100644 --- a/src/eko/matchings.py +++ b/src/eko/matchings.py @@ -160,11 +160,12 @@ def matched_path(self, target: EPoint) -> MatchedPath: prev = path[0] matched: MatchedPath = [prev] - for seq in path[1:]: - matching = Matching(prev.target, max(prev.nf, seq.nf)) + for seg in path[1:]: + matching = Matching(prev.target, max(prev.nf, seg.nf)) matched.append(matching) - matched.append(seq) + matched.append(seg) + prev = seg return matched From 945555536cb6ac9339d2269022005f7871b7e0bc Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 11:23:44 +0200 Subject: [PATCH 121/154] Add unit tests for matchings --- tests/eko/test_matchings.py | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/tests/eko/test_matchings.py b/tests/eko/test_matchings.py index fd1ca34b5..36a540f3f 100644 --- a/tests/eko/test_matchings.py +++ b/tests/eko/test_matchings.py @@ -3,7 +3,14 @@ import numpy as np -from eko.matchings import Atlas, Segment, flavor_shift, is_downward_path, nf_default +from eko.matchings import ( + Atlas, + Matching, + Segment, + flavor_shift, + is_downward_path, + nf_default, +) from eko.quantities.heavy_quarks import MatchingScales @@ -36,6 +43,12 @@ def test_init(self): tc2 = Atlas(MatchingScales([0, 2, 3]), ZERO) assert tc2.walls == [0, 0, 2, 3, np.inf] + def test_normalize(self): + tc = Atlas(MatchingScales([1, 2, 3]), (1.5, None)) + assert tc.origin[1] == 4 + assert tc.normalize((2.5, None))[1] == 5 + assert tc.normalize((2.5, 3))[1] == 3 + def test_nfref(self): # weird but fine tc = Atlas(MatchingScales([1, 2, 3]), (1.5, 3)) @@ -114,6 +127,26 @@ def test_path_3thr_weird(self): assert p8[0].origin == mu2_from assert p8[3].target == mu2_to + def test_matched_path(self): + for kc in [0.3, 0.9, 1.5]: + for kb in [0.75, 1.25]: + tc = Atlas(MatchingScales([1 * kc, 2 * kb, 300]), (0.5, 3)) + p = tc.matched_path((200, 5)) + assert len(p) == 5 + assert isinstance(p[0], Segment) + assert isinstance(p[1], Matching) + assert p[1].scale == p[0].target + assert p[1].scale == p[2].origin + assert p[1].hq == p[0].nf + 1 + assert p[1].hq == p[2].nf + assert isinstance(p[2], Segment) + assert isinstance(p[3], Matching) + assert p[3].scale == p[2].target + assert p[3].scale == p[4].origin + assert p[3].hq == p[2].nf + 1 + assert p[3].hq == p[4].nf + assert isinstance(p[4], Segment) + def test_nf(): nf4 = Atlas.ffns(4, 0.0) From 65a61f87d61d1c3a1f72cf164e35a6130eb2a7b4 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 11:29:20 +0200 Subject: [PATCH 122/154] Improve unit test ekore/harmonics/log --- tests/ekore/harmonics/test_log_functions.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/ekore/harmonics/test_log_functions.py b/tests/ekore/harmonics/test_log_functions.py index a6f0dc0fe..3f8bf0035 100644 --- a/tests/ekore/harmonics/test_log_functions.py +++ b/tests/ekore/harmonics/test_log_functions.py @@ -7,10 +7,10 @@ def test_lm1pm2(): - def mellin_lm1pm2(x, k): + def mellin_lm1pm2(x, k, N): return x ** (N - 1) * (1 - x) ** 2 * np.log(1 - x) ** k - Ns = 100 * np.random.rand(3) + Ns = 1.0 + 100 * np.random.rand(3) for N in Ns: sx = hsx(N, 4) @@ -22,16 +22,16 @@ def mellin_lm1pm2(x, k): } for k, ref in ref_values.items(): - test_value = quad(mellin_lm1pm2, 0, 1, args=(k))[0] + test_value = quad(mellin_lm1pm2, 0, 1, args=(k, N))[0] np.testing.assert_allclose(test_value, ref) def test_lm1pm1(): # test mellin transformation with some random N values - def mellin_lm1pm1(x, k): + def mellin_lm1pm1(x, k, N): return x ** (N - 1) * (1 - x) * np.log(1 - x) ** k - Ns = 100 * np.random.rand(3) + Ns = 1.0 + 100 * np.random.rand(3) for N in Ns: sx = hsx(N, 4) @@ -43,16 +43,16 @@ def mellin_lm1pm1(x, k): } for k in [1, 2, 3, 4]: - test_value = quad(mellin_lm1pm1, 0, 1, args=(k))[0] + test_value = quad(mellin_lm1pm1, 0, 1, args=(k, N))[0] np.testing.assert_allclose(test_value, ref_values[k]) def test_lm1p(): # test mellin transformation with some random N values - def mellin_lm1p(x, k): + def mellin_lm1p(x, k, N): return x ** (N - 1) * np.log(1 - x) ** k - Ns = 100 * np.random.rand(3) + Ns = 1.0 + 100 * np.random.rand(3) for N in Ns: sx = hsx(N, 5) @@ -65,5 +65,5 @@ def mellin_lm1p(x, k): } for k in [1, 3, 4, 5]: - test_value = quad(mellin_lm1p, 0, 1, args=(k))[0] + test_value = quad(mellin_lm1p, 0, 1, args=(k, N))[0] np.testing.assert_allclose(test_value, ref_values[k]) From bfbd49d01108daa7495e15134a495d189c02a77d Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 11:43:00 +0200 Subject: [PATCH 123/154] Add test_to_uni_evol --- src/eko/io/manipulate.py | 4 +-- tests/eko/io/test_manipulate.py | 46 +++++++++++++++++++++------------ 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/src/eko/io/manipulate.py b/src/eko/io/manipulate.py index 542e04705..e9cc89538 100644 --- a/src/eko/io/manipulate.py +++ b/src/eko/io/manipulate.py @@ -241,7 +241,7 @@ def flavor_reshape( def to_evol(eko: EKO, source: bool = True, target: bool = False): """Rotate the operator into evolution basis. - This also assigns also the pids. The operation is in-place. + This assigns also the pids. The operation is in-place. Parameters ---------- @@ -272,7 +272,7 @@ def to_evol(eko: EKO, source: bool = True, target: bool = False): def to_uni_evol(eko: EKO, source: bool = True, target: bool = False): """Rotate the operator into evolution basis. - This also assigns also the pids. The operation is in-place. + This assigns also the pids. The operation is in-place. Parameters ---------- diff --git a/tests/eko/io/test_manipulate.py b/tests/eko/io/test_manipulate.py index ee83b276a..c79716eb1 100644 --- a/tests/eko/io/test_manipulate.py +++ b/tests/eko/io/test_manipulate.py @@ -173,6 +173,26 @@ def test_flavor_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): manipulate.flavor_reshape(of) def test_to_evol(self, eko_factory: EKOFactory, tmp_path): + self._test_to_all_evol( + eko_factory, + tmp_path, + manipulate.to_evol, + br.rotate_flavor_to_evolution, + br.flavor_basis_pids, + ) + + def test_to_uni_evol(self, eko_factory: EKOFactory, tmp_path): + self._test_to_all_evol( + eko_factory, + tmp_path, + manipulate.to_uni_evol, + br.rotate_flavor_to_unified_evolution, + br.flavor_basis_pids, + ) + + def _test_to_all_evol( + self, eko_factory: EKOFactory, tmp_path, to_evol_fnc, rot_matrix, pids + ): xgrid = interpolation.XGrid([0.5, 1.0]) mu_out = 2.0 mu2_out = mu_out**2 @@ -190,39 +210,31 @@ def test_to_evol(self, eko_factory: EKOFactory, tmp_path): o01_path = tmp_path / "o01.tar" o00.deepcopy(o01_path) with EKO.edit(o01_path) as o01: - manipulate.to_evol(o01) + to_evol_fnc(o01) o10_path = tmp_path / "o10.tar" o00.deepcopy(o10_path) with EKO.edit(o10_path) as o10: - manipulate.to_evol(o10, False, True) + to_evol_fnc(o10, False, True) o11_path = tmp_path / "o11.tar" o00.deepcopy(o11_path) with EKO.edit(o11_path) as o11: - manipulate.to_evol(o11, True, True) + to_evol_fnc(o11, True, True) chk_keys(o00.raw, o11.raw) with EKO.edit(o01_path) as o01: with EKO.edit(o10_path) as o10: with EKO.read(o11_path) as o11: # check the input rotated one - np.testing.assert_allclose( - o01.bases.inputpids, br.rotate_flavor_to_evolution - ) - np.testing.assert_allclose( - o01.bases.targetpids, br.flavor_basis_pids - ) + np.testing.assert_allclose(o01.bases.inputpids, rot_matrix) + np.testing.assert_allclose(o01.bases.targetpids, pids) # rotate also target - manipulate.to_evol(o01, False, True) + to_evol_fnc(o01, False, True) np.testing.assert_allclose(o01[epout].operator, o11[epout].operator) chk_keys(o00.raw, o01.raw) # check the target rotated one - np.testing.assert_allclose( - o10.bases.inputpids, br.flavor_basis_pids - ) - np.testing.assert_allclose( - o10.bases.targetpids, br.rotate_flavor_to_evolution - ) + np.testing.assert_allclose(o10.bases.inputpids, pids) + np.testing.assert_allclose(o10.bases.targetpids, rot_matrix) # rotate also input - manipulate.to_evol(o10) + to_evol_fnc(o10) np.testing.assert_allclose(o10[epout].operator, o11[epout].operator) chk_keys(o00.raw, o10.raw) From d71d1fe42943c5a35c46083161ddfe10126ea46b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 11:47:11 +0200 Subject: [PATCH 124/154] Update ekomark/plots --- src/ekomark/plots.py | 54 ++++++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/src/ekomark/plots.py b/src/ekomark/plots.py index eee5ef79b..92a155de3 100644 --- a/src/ekomark/plots.py +++ b/src/ekomark/plots.py @@ -141,24 +141,24 @@ def plot_dist(x, y, yerr, yref, title=None, oMx_min=1e-2, oMx_max=0.5): def plot_operator(var_name, op, op_err, log_operator=True, abs_operator=True): - """ - Plot a single operator as heat map. + """Plot a single operator as heat map. Parameters ---------- - ret : dict - DGLAP result - var_name : str - operator name - log_operator : bool, optional - plot on logarithmic scale - abs_operator : bool, optional - plot absolute value (instead of true value) + ret : dict + DGLAP result + var_name : str + operator name + log_operator : bool, optional + plot on logarithmic scale + abs_operator : bool, optional + plot absolute value (instead of true value) Returns ------- - fig : matplotlib.pyplot.figure - created figure + fig : matplotlib.pyplot.figure + created figure + """ # get # op = ret["operators"][var_name] @@ -210,25 +210,25 @@ def plot_operator(var_name, op, op_err, log_operator=True, abs_operator=True): def save_operators_to_pdf(path, theory, ops, me: EKO, skip_pdfs, change_lab=False): - """ - Output all operator heatmaps to PDF. + """Output all operator heatmaps to PDF. Parameters ---------- - path : str - path to the plot - theory : dict - theory card - ops : dict - operator card - me : eko.output.Output - DGLAP result - skip_pdfs : list - PDF to skip - change_lab : bool - set whether to rename the labels + path : str + path to the plot + theory : dict + theory card + ops : dict + operator card + me : eko.output.Output + DGLAP result + skip_pdfs : list + PDF to skip + change_lab : bool + set whether to rename the labels + """ - ops_names = list(me.rotations.targetpids) + ops_names = list(me.bases.targetpids) if np.allclose(ops_names, br.rotate_flavor_to_evolution): ops_names = br.evol_basis_pids else: From b94e4a21bdc97422505a323fc491ffdefdba51a7 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 11:56:58 +0200 Subject: [PATCH 125/154] Fix ekore/cache and complete its unit test --- src/ekore/harmonics/cache.py | 2 +- tests/ekore/harmonics/test_cache.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/ekore/harmonics/cache.py b/src/ekore/harmonics/cache.py index 6b22d61d1..1bc8f54fe 100644 --- a/src/ekore/harmonics/cache.py +++ b/src/ekore/harmonics/cache.py @@ -108,7 +108,7 @@ def get( """ # Maybe improve error - if key < 0 or key > len(cache): + if key < 0 or key >= len(cache): raise RuntimeError # load the thing s = cache[key] diff --git a/tests/ekore/harmonics/test_cache.py b/tests/ekore/harmonics/test_cache.py index ab756da11..30d68d759 100644 --- a/tests/ekore/harmonics/test_cache.py +++ b/tests/ekore/harmonics/test_cache.py @@ -1,4 +1,5 @@ import numpy as np +import pytest from ekore import harmonics as h @@ -12,6 +13,15 @@ def test_reset(): np.sum([isinstance(val, int) for val in h.cache.__dict__.values()]) - 1 ) assert c.shape == (available_items,) + assert c.shape == (h.cache.CACHE_SIZE,) + + +def test_error(): + c = h.cache.reset() + with pytest.raises(RuntimeError): + h.cache.get(-1, c, 1.0) + with pytest.raises(RuntimeError): + h.cache.get(h.cache.CACHE_SIZE, c, 1.0) def test_get(): From e3ca379f890e2109ec2b49f481da64cbe2c7ae0b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 13:08:47 +0200 Subject: [PATCH 126/154] Add more ekore unit tests --- .../unpolarized/space_like/test_init.py | 10 ++++++++++ .../unpolarized/time_like/test_init.py | 6 +++++- .../unpolarized/space_like/test_init.py | 9 +++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 tests/ekore/operator_matrix_elements/unpolarized/space_like/test_init.py diff --git a/tests/ekore/anomalous_dimensions/unpolarized/space_like/test_init.py b/tests/ekore/anomalous_dimensions/unpolarized/space_like/test_init.py index 3e82fbb52..2175eddcd 100644 --- a/tests/ekore/anomalous_dimensions/unpolarized/space_like/test_init.py +++ b/tests/ekore/anomalous_dimensions/unpolarized/space_like/test_init.py @@ -123,6 +123,8 @@ def test_gamma_ns(): ad_us.gamma_ns((4, 0), br.non_singlet_pids_map["ns+"], 1, nf), np.zeros(4), ) + with pytest.raises(NotImplementedError): + ad_us.gamma_ns((2, 0), 10106, 2.0, nf) def test_gamma_ns_qed(): @@ -194,6 +196,14 @@ def test_gamma_ns_qed(): ) +def test_errors(): + cache = h.cache.reset() + with pytest.raises(NotImplementedError): + ad_us.choose_ns_ad_as1aem1(10106, 2.0, cache) + with pytest.raises(NotImplementedError): + ad_us.choose_ns_ad_aem2(10106, 2.0, 4, cache) + + def test_dim_singlet(): nf = 3 N = 2 diff --git a/tests/ekore/anomalous_dimensions/unpolarized/time_like/test_init.py b/tests/ekore/anomalous_dimensions/unpolarized/time_like/test_init.py index 7ad42d825..88f83a980 100644 --- a/tests/ekore/anomalous_dimensions/unpolarized/time_like/test_init.py +++ b/tests/ekore/anomalous_dimensions/unpolarized/time_like/test_init.py @@ -1,7 +1,11 @@ +import eko.basis_rotation as br import ekore.anomalous_dimensions.unpolarized.time_like as ad def test_shapes(): for k in range(1, 3 + 1): - assert ad.gamma_ns((k, 0), 10101, 2.0, 5).shape == (k,) + for s in ["+", "-", "V"]: + assert ad.gamma_ns( + (k, 0), br.non_singlet_pids_map[f"ns{s}"], 2.0, 5 + ).shape == (k,) assert ad.gamma_singlet((k, 0), 2.0, 5).shape == (k, 2, 2) diff --git a/tests/ekore/operator_matrix_elements/unpolarized/space_like/test_init.py b/tests/ekore/operator_matrix_elements/unpolarized/space_like/test_init.py new file mode 100644 index 000000000..d1db339bd --- /dev/null +++ b/tests/ekore/operator_matrix_elements/unpolarized/space_like/test_init.py @@ -0,0 +1,9 @@ +r"""Test |OME|.""" + +import ekore.operator_matrix_elements.unpolarized.space_like as ome + + +def test_shapes(): + for k in range(1, 3 + 1): + assert ome.A_singlet((k, 0), complex(0, 1), 4, 0.0, False).shape == (k, 3, 3) + assert ome.A_non_singlet((k, 0), complex(0, 1), 4, 0.0).shape == (k, 2, 2) From e5714664d68d029d1ff811b0462697f78e0fd9da Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 14:51:43 +0200 Subject: [PATCH 127/154] Fix import lint warning and shift bases tests --- src/eko/couplings.py | 1 - .../operator_matrix_element.py | 1 - src/eko/io/items.py | 2 +- src/eko/kernels/singlet_qed.py | 1 - .../polarized/space_like/as2.py | 3 +- .../unpolarized/space_like/aem1.py | 1 - src/ekore/harmonics/log_functions.py | 2 - tests/eko/io/test_bases.py | 36 ++++++++++++++++++ tests/eko/io/test_runcards.py | 37 ------------------- 9 files changed, 39 insertions(+), 45 deletions(-) diff --git a/src/eko/couplings.py b/src/eko/couplings.py index 9b9de888e..b0d39a070 100644 --- a/src/eko/couplings.py +++ b/src/eko/couplings.py @@ -8,7 +8,6 @@ """ import logging -import warnings from typing import Iterable, List import numba as nb diff --git a/src/eko/evolution_operator/operator_matrix_element.py b/src/eko/evolution_operator/operator_matrix_element.py index 65fa7ceb2..a16b3f25e 100644 --- a/src/eko/evolution_operator/operator_matrix_element.py +++ b/src/eko/evolution_operator/operator_matrix_element.py @@ -10,7 +10,6 @@ import ekore.operator_matrix_elements.polarized.space_like as ome_ps import ekore.operator_matrix_elements.unpolarized.space_like as ome_us import ekore.operator_matrix_elements.unpolarized.time_like as ome_ut -from ekore import harmonics from .. import basis_rotation as br from .. import scale_variations as sv diff --git a/src/eko/io/items.py b/src/eko/io/items.py index b466dba6c..1846602c9 100644 --- a/src/eko/io/items.py +++ b/src/eko/io/items.py @@ -5,8 +5,8 @@ import lz4.frame import numpy as np -import numpy.lib.npyio as npyio import numpy.typing as npt +from numpy.lib import npyio from .. import matchings from . import exceptions diff --git a/src/eko/kernels/singlet_qed.py b/src/eko/kernels/singlet_qed.py index e85ad5112..d6e57c537 100644 --- a/src/eko/kernels/singlet_qed.py +++ b/src/eko/kernels/singlet_qed.py @@ -5,7 +5,6 @@ from ekore import anomalous_dimensions as ad from .. import beta -from . import utils @nb.njit(cache=True) diff --git a/src/ekore/anomalous_dimensions/polarized/space_like/as2.py b/src/ekore/anomalous_dimensions/polarized/space_like/as2.py index 95134d7ae..e8ac4f221 100644 --- a/src/ekore/anomalous_dimensions/polarized/space_like/as2.py +++ b/src/ekore/anomalous_dimensions/polarized/space_like/as2.py @@ -9,7 +9,8 @@ # Non Singlet sector is swapped from ...unpolarized.space_like.as2 import gamma_nsm as gamma_nsp -from ...unpolarized.space_like.as2 import gamma_nsp as gamma_nsm + +# from ...unpolarized.space_like.as2 import gamma_nsp as gamma_nsm @nb.njit(cache=True) diff --git a/src/ekore/anomalous_dimensions/unpolarized/space_like/aem1.py b/src/ekore/anomalous_dimensions/unpolarized/space_like/aem1.py index 33cfc751f..9163d722c 100644 --- a/src/ekore/anomalous_dimensions/unpolarized/space_like/aem1.py +++ b/src/ekore/anomalous_dimensions/unpolarized/space_like/aem1.py @@ -5,7 +5,6 @@ from eko import constants -from ....harmonics import cache as c from . import as1 diff --git a/src/ekore/harmonics/log_functions.py b/src/ekore/harmonics/log_functions.py index 6f3202a52..c02aa4a2a 100644 --- a/src/ekore/harmonics/log_functions.py +++ b/src/ekore/harmonics/log_functions.py @@ -8,8 +8,6 @@ """ import numba as nb -from eko.constants import zeta3 - @nb.njit(cache=True) def lm11m1(n, S1): diff --git a/tests/eko/io/test_bases.py b/tests/eko/io/test_bases.py index 0794358ba..a0165243f 100644 --- a/tests/eko/io/test_bases.py +++ b/tests/eko/io/test_bases.py @@ -1,3 +1,5 @@ +from dataclasses import fields + import numpy as np from eko import basis_rotation as br @@ -6,6 +8,40 @@ class TestBases: + XGRID_TEST = [1e-3, 1e-2, 1e-1, 1.0] + + def test_serialization(self): + rot = Bases(interpolation.XGrid(self.XGRID_TEST)) + + d = rot.raw + rot1 = rot.from_dict(d) + + for f in fields(Bases): + assert getattr(rot, f.name) == getattr(rot1, f.name) + + assert d["targetgrid"] is None + assert "_targetgrid" not in d + + def test_pids(self): + rot = Bases(interpolation.XGrid(self.XGRID_TEST)) + + # no check on correctness of value set + rot.inputpids = [0, 1] + # but the internal grid is unmodified + assert len(rot.pids) == 14 + # and fallback implemented for unset external bases + assert np.all(rot.targetpids == rot.pids) + + def test_grids(self): + rot = Bases(interpolation.XGrid(self.XGRID_TEST)) + + # no check on correctness of value set + rot.inputgrid = interpolation.XGrid([0.1, 1]) + # but the internal grid is unmodified + assert len(rot.xgrid) == len(self.XGRID_TEST) + # and fallback implemented for unset external grids + assert np.all(rot.targetgrid == rot.xgrid) + def test_fallback(self): xg = interpolation.XGrid([0.1, 1.0]) r = Bases(xgrid=xg) diff --git a/tests/eko/io/test_runcards.py b/tests/eko/io/test_runcards.py index 995f2b63b..493ff5357 100644 --- a/tests/eko/io/test_runcards.py +++ b/tests/eko/io/test_runcards.py @@ -1,5 +1,4 @@ import copy -from dataclasses import fields from io import StringIO import numpy as np @@ -13,42 +12,6 @@ from ekomark.data.operators import default_card as operator_card -class TestBases: - XGRID_TEST = [1e-3, 1e-2, 1e-1, 1.0] - - def test_serialization(self): - rot = Bases(interpolation.XGrid(self.XGRID_TEST)) - - d = rot.raw - rot1 = rot.from_dict(d) - - for f in fields(Bases): - assert getattr(rot, f.name) == getattr(rot1, f.name) - - assert d["targetgrid"] is None - assert "_targetgrid" not in d - - def test_pids(self): - rot = Bases(interpolation.XGrid(self.XGRID_TEST)) - - # no check on correctness of value set - rot.inputpids = [0, 1] - # but the internal grid is unmodified - assert len(rot.pids) == 14 - # and fallback implemented for unset external bases - assert np.all(rot.targetpids == rot.pids) - - def test_grids(self): - rot = Bases(interpolation.XGrid(self.XGRID_TEST)) - - # no check on correctness of value set - rot.inputgrid = interpolation.XGrid([0.1, 1]) - # but the internal grid is unmodified - assert len(rot.xgrid) == len(self.XGRID_TEST) - # and fallback implemented for unset external grids - assert np.all(rot.targetgrid == rot.xgrid) - - def test_flavored_mu2grid(): mugrid = list(range(5, 40, 5)) masses = [10, 20, 30] From ac22b78df83ffe8a174c3c6c67e5964f24d24d9b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 15:31:23 +0200 Subject: [PATCH 128/154] Complete eko/io/runcards unit tests --- src/eko/msbar_masses.py | 8 ++-- tests/eko/io/test_runcards.py | 82 ++++++++++++++++++++++++++++++++++- 2 files changed, 84 insertions(+), 6 deletions(-) diff --git a/src/eko/msbar_masses.py b/src/eko/msbar_masses.py index 3c6704388..356a81019 100644 --- a/src/eko/msbar_masses.py +++ b/src/eko/msbar_masses.py @@ -417,12 +417,12 @@ def sc(thr_masses): if q_idx + 4 == nf_ref and q2m_ref > mu2_ref: raise ValueError( f"In MSbar scheme, Qm{hq} should be lower than Qref, " - "if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" + f"if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" ) if q_idx + 4 == nf_ref + 1 and q2m_ref < mu2_ref: raise ValueError( f"In MSbar scheme, Qm{hq} should be greater than Qref, " - "if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" + f"if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" ) # check that for higher patches you do forward running @@ -430,7 +430,7 @@ def sc(thr_masses): if q_idx + 3 >= nf_ref and q2m_ref >= m2_ref: raise ValueError( f"In MSbar scheme, Qm{hq} should be lower than m{hq} " - "if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" + f"if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" ) # check that for lower patches you do backward running @@ -439,7 +439,7 @@ def sc(thr_masses): if q2m_ref < m2_ref: raise ValueError( f"In MSbar scheme, Qm{hq} should be greater than m{hq}" - "if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" + f"if alpha_s is given with nfref={nf_ref} at scale Qref={mu2_ref}" ) nf_target += 1 shift = 1 diff --git a/tests/eko/io/test_runcards.py b/tests/eko/io/test_runcards.py index 493ff5357..cec067819 100644 --- a/tests/eko/io/test_runcards.py +++ b/tests/eko/io/test_runcards.py @@ -6,7 +6,6 @@ import yaml from banana.data.theories import default_card as theory_card -from eko import interpolation from eko.io import runcards as rc from eko.io.bases import Bases from ekomark.data.operators import default_card as operator_card @@ -26,12 +25,51 @@ def test_flavored_mu2grid(): stream.seek(0) deser = yaml.safe_load(stream) assert len(flavored) == len(deser) - # after deserialization on is now list instead of tuple + # after deserialization one is now list instead of tuple for t, l in zip(flavored, deser): assert len(t) == len(l) assert t == tuple(l) +def test_runcards_opcard(): + # check conversion + tc = copy.deepcopy(theory_card) + oc = copy.deepcopy(operator_card) + tc["Q0"] = 2.0 + # mugrid + oc["mugrid"] = [2.0, 10.0] + _nt, no = rc.update(tc, oc) + assert isinstance(no, rc.OperatorCard) + assert len(no.evolgrid) == len(oc["mugrid"]) + assert len(no.mu2grid) == len(no.evolgrid) + assert no.evolgrid[0][-1] == 4 + assert no.evolgrid[1][-1] == 5 + np.testing.assert_allclose(no.mu0, tc["Q0"]) + np.testing.assert_allclose(no.mu20, tc["Q0"] ** 2.0) + assert len(no.pids) == 14 + del oc["mugrid"] + # or mu2grid + oc["mu2grid"] = [9.0, 30.0, 32.0] + _nt, no = rc.update(tc, oc) + assert isinstance(no, rc.OperatorCard) + assert len(no.evolgrid) == len(oc["mu2grid"]) + assert len(no.mu2grid) == len(no.evolgrid) + assert no.evolgrid[0][-1] == 4 + assert no.evolgrid[1][-1] == 5 + assert no.evolgrid[2][-1] == 5 + del oc["mu2grid"] + # or Q2grid + oc["Q2grid"] = [15.0, 130.0, 140.0, 1e5] + _nt, no = rc.update(tc, oc) + assert isinstance(no, rc.OperatorCard) + assert len(no.evolgrid) == len(oc["Q2grid"]) + assert len(no.mu2grid) == len(no.evolgrid) + assert no.evolgrid[0][-1] == 4 + assert no.evolgrid[1][-1] == 5 + assert no.evolgrid[2][-1] == 5 + assert no.evolgrid[3][-1] == 6 + + def test_runcards_ekomark(): # check conversion tc = copy.deepcopy(theory_card) @@ -43,3 +81,43 @@ def test_runcards_ekomark(): nnt, nno = rc.update(nt, no) assert nnt == nt assert nno == no + + +def test_runcards_quarkmass(): + tc = copy.deepcopy(theory_card) + tc["nfref"] = 5 + tc["IC"] = 1 + oc = copy.deepcopy(operator_card) + nt, no = rc.update(tc, oc) + assert nt.heavy.intrinsic_flavors == [4] + for _, scale in nt.heavy.masses: + assert np.isnan(scale) + m2s = rc.masses(nt, no.configs.evolution_method) + raw = rc.Legacy.heavies("m%s", tc) + raw2 = np.power(raw, 2.0) + np.testing.assert_allclose(m2s, raw2) + tc["HQ"] = "MSBAR" + tc["Qmc"] = raw[0] * 1.1 + tc["Qmb"] = raw[1] * 1.1 + tc["Qmt"] = raw[2] * 0.9 + nt, no = rc.update(tc, oc) + for _, scale in nt.heavy.masses: + assert not np.isnan(scale) + m2s = rc.masses(nt, no.configs.evolution_method) + for m1, m2 in zip(m2s, raw2): + assert not np.isclose(m1, m2) + tc["HQ"] = "Blub" + with pytest.raises(ValueError, match="mass scheme"): + _nt, _no = rc.update(tc, oc) + nt.heavy.masses_scheme = "Bla" + with pytest.raises(ValueError, match="mass scheme"): + _ms = rc.masses(nt, no.configs.evolution_method) + + +def test_legacy_fallback(): + assert rc.Legacy.fallback(1, 2, 3) == 1 + assert rc.Legacy.fallback(None, 2, 3) == 2 + assert rc.Legacy.fallback(None, 2, 3, default=7) == 2 + assert rc.Legacy.fallback(None, None, 3) == 3 + assert rc.Legacy.fallback(None, None, None) is None + assert rc.Legacy.fallback(None, None, None, default=7) == 7 From b91938c8875b8a94888f11cf7a31ef9e18ab3246 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 15:34:07 +0200 Subject: [PATCH 129/154] Revert commented import --- src/ekore/anomalous_dimensions/polarized/space_like/as2.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/ekore/anomalous_dimensions/polarized/space_like/as2.py b/src/ekore/anomalous_dimensions/polarized/space_like/as2.py index e8ac4f221..95134d7ae 100644 --- a/src/ekore/anomalous_dimensions/polarized/space_like/as2.py +++ b/src/ekore/anomalous_dimensions/polarized/space_like/as2.py @@ -9,8 +9,7 @@ # Non Singlet sector is swapped from ...unpolarized.space_like.as2 import gamma_nsm as gamma_nsp - -# from ...unpolarized.space_like.as2 import gamma_nsp as gamma_nsm +from ...unpolarized.space_like.as2 import gamma_nsp as gamma_nsm @nb.njit(cache=True) From 3bd28a75cc0cce876aee7487d0b0578ab03ddc29 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 15:38:28 +0200 Subject: [PATCH 130/154] Use a_em in unit test --- tests/eko/test_couplings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/eko/test_couplings.py b/tests/eko/test_couplings.py index 6fd901a4d..03e542bed 100644 --- a/tests/eko/test_couplings.py +++ b/tests/eko/test_couplings.py @@ -279,12 +279,12 @@ def test_exact(self): ) if not em_running or qed == 0: np.testing.assert_allclose( - sc_expanded.a(q2)[1], + sc_expanded.a_em(q2), alpharef[1] / (4 * np.pi), rtol=1e-10, ) np.testing.assert_allclose( - sc_exact.a(q2)[1], + sc_exact.a_em(q2), alpharef[1] / (4 * np.pi), rtol=1e-10, ) From 9b3a775b9fe4ad8db2ccc98c29ed8d77cb3feed7 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 15:50:02 +0200 Subject: [PATCH 131/154] Add unit test for eko/io/access --- tests/eko/io/test_access.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 tests/eko/io/test_access.py diff --git a/tests/eko/io/test_access.py b/tests/eko/io/test_access.py new file mode 100644 index 000000000..86d24bf1f --- /dev/null +++ b/tests/eko/io/test_access.py @@ -0,0 +1,31 @@ +import pytest + +from eko.io import access + + +def test_writeable(tmp_path): + c = access.AccessConfigs(tmp_path, False, True) + assert c.read + c.assert_open() + assert c.write + c.assert_writeable() + + +def test_readable(tmp_path): + c = access.AccessConfigs(tmp_path, True, True) + assert c.read + c.assert_open() + assert not c.write + with pytest.raises(access.ReadOnlyOperator): + c.assert_writeable() + + +def test_closed(tmp_path): + for ro in [True, False]: + c = access.AccessConfigs(tmp_path, ro, False) + assert not c.read + with pytest.raises(access.ClosedOperator): + c.assert_open() + assert not c.write + with pytest.raises(access.ClosedOperator): + c.assert_writeable() From 84f23243a879b5f4da98f8cd748fe66bf2c08170 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 9 May 2023 15:37:12 +0200 Subject: [PATCH 132/154] Restore old runner as default --- src/eko/runner/__init__.py | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/src/eko/runner/__init__.py b/src/eko/runner/__init__.py index 80481bd53..a487cdbaf 100644 --- a/src/eko/runner/__init__.py +++ b/src/eko/runner/__init__.py @@ -1,12 +1,10 @@ """Manage steps to DGLAP solution, and operator creation.""" import os -from pathlib import Path from typing import Union -from ..io import runcards from ..io.runcards import OperatorCard, TheoryCard from ..io.types import RawCard -from . import legacy, managed +from . import legacy # TODO: drop this altogether, replacing just with managed.solve @@ -39,18 +37,3 @@ def solve( """ # TODO: drop this legacy.Runner(theory_card, operators_card, path).compute() - - -# TODO: drop this, and directly re-export manaaged.solve -def solve_jets( - theory_card: Union[RawCard, TheoryCard], - operators_card: Union[RawCard, OperatorCard], - path: os.PathLike, -): - """Implement solve for new output.""" - new_theory, new_operator = runcards.update(theory_card, operators_card) - managed.solve(new_theory, new_operator, Path(path)) - - -# TODO: drop this before merging #242 -solve = solve_jets From d9847096b3d3a227ae5973bcf89faaa441831c04 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Tue, 9 May 2023 16:07:43 +0200 Subject: [PATCH 133/154] Avoid importing from top-level Since it might cause cyclic imports, we should always import from siblings, never from parents I forgot, my bad... --- src/eko/runner/parts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eko/runner/parts.py b/src/eko/runner/parts.py index dbb0882ea..e385b8f49 100644 --- a/src/eko/runner/parts.py +++ b/src/eko/runner/parts.py @@ -12,11 +12,11 @@ """ import numpy as np -from .. import EKO from .. import evolution_operator as evop from ..evolution_operator import matching_condition from ..evolution_operator import operator_matrix_element as ome from ..evolution_operator import physical +from ..io import EKO from ..io.items import Evolution, Matching, Operator from ..quantities.heavy_quarks import QuarkMassScheme from . import commons From a69176514185a5fc83653611a6a6946c6191095f Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 16:51:03 +0200 Subject: [PATCH 134/154] Add unit test for eko/io/inventory --- src/eko/io/inventory.py | 13 +++-- tests/eko/io/test_inventory.py | 93 ++++++++++++++++++++++++++++++++++ 2 files changed, 101 insertions(+), 5 deletions(-) create mode 100644 tests/eko/io/test_inventory.py diff --git a/src/eko/io/inventory.py b/src/eko/io/inventory.py index 5116448a1..efdd2e9b0 100644 --- a/src/eko/io/inventory.py +++ b/src/eko/io/inventory.py @@ -102,10 +102,9 @@ def __getitem__(self, header: H) -> Optional[Operator]: # in case of contentless, check header availability instead if self.contentless: - if self.lookup(stem, header=True): - self.cache[header] = None - return None - raise LookupError(f"Impossible to retrieve {header} from {self}.") + self.lookup(stem, header=True) + self.cache[header] = None + return None # for contentful inventories, check operator availability oppath = self.lookup(stem) @@ -117,7 +116,7 @@ def __getitem__(self, header: H) -> Optional[Operator]: return op def __setitem__(self, header: H, operator: Optional[Operator]): - """Set operator for given :math:`mu^2`. + """Set operator for given header. Header and operator are automatically dumped on disk. @@ -179,6 +178,10 @@ def __iter__(self): """ yield from self.cache + def __len__(self): + """Return the number of elements in the cache.""" + return len(self.cache) + def sync(self): """Sync the headers in the cache with the content on disk. diff --git a/tests/eko/io/test_inventory.py b/tests/eko/io/test_inventory.py new file mode 100644 index 000000000..da0c1e6ee --- /dev/null +++ b/tests/eko/io/test_inventory.py @@ -0,0 +1,93 @@ +from dataclasses import dataclass + +import numpy as np +import pytest + +from eko.io import access, inventory, items + + +@dataclass(frozen=True) +class FakeH(items.Header): + blub: int + + +def test_contentless(tmp_path): + acw = inventory.AccessConfigs(tmp_path, False, True) + iw = inventory.Inventory(tmp_path, acw, FakeH, contentless=True, name="Bla") + assert "Bla" in str(iw) + one = FakeH(1) + assert one not in iw + assert len(list(tmp_path.glob("*"))) == 0 + # set an element + iw[one] = "blub" + assert one in iw + assert iw[one] is None + ls = list(tmp_path.glob("*")) + assert len(iw) == 1 + assert len(ls) == 1 + assert ls[0].suffix == inventory.HEADER_EXT + # let's make a second read-only instance + acr = inventory.AccessConfigs(tmp_path, True, True) + ir = inventory.Inventory(tmp_path, acr, FakeH, contentless=True, name="noihaha") + assert "Bla" not in str(ir) + assert one not in ir + assert ir[one] is None + # deletion is empty for contentless + del iw[one] + assert one in iw + assert iw[one] is None + assert len(list(tmp_path.glob("*"))) == 1 + iw.empty() + assert len(list(tmp_path.glob("*"))) == 1 + # non-existant + two = FakeH(2) + assert two not in iw + with pytest.raises(inventory.LookupError): + iw[two] + + +def test_contentfull(tmp_path): + def o(): + return items.Operator(np.random.rand(2, 2, 2, 2)) + + acw = inventory.AccessConfigs(tmp_path, False, True) + iw = inventory.Inventory(tmp_path, acw, FakeH, name="Bla") + one = FakeH(1) + assert one not in iw + assert len(list(tmp_path.glob("*"))) == 0 + # set an element + o_one = o() + iw[one] = o_one + assert one in iw + assert iw[one] is not None + assert len(iw) == 1 + assert len(list(tmp_path.glob("*" + inventory.HEADER_EXT))) == 1 + assert len(list(tmp_path.glob("*" + inventory.COMPRESSED_EXT))) == 1 + # let's make a second read-only instance + acr = inventory.AccessConfigs(tmp_path, True, True) + ir = inventory.Inventory(tmp_path, acr, FakeH, name="noihaha") + assert "Bla" not in str(ir) + assert one not in ir + # if we sync we know the header is there + ~ir + assert one in ir + assert len(ir) == 1 + # do an actual read to load + np.testing.assert_allclose(ir[one].operator, o_one.operator) + # but we can not write + with pytest.raises(access.ReadOnlyOperator): + ir[one] = o() + # let's overwrite the op in the writable, so they diverge + o_one_p = o() + iw[one] = o_one_p + assert len(iw) == 1 + np.testing.assert_allclose(iw.cache[one].operator, o_one_p.operator) + np.testing.assert_allclose(ir.cache[one].operator, o_one.operator) + # let's actually read again + ~ir + np.testing.assert_allclose(ir[one].operator, o_one_p.operator) + # non-existant + two = FakeH(2) + assert two not in ir + with pytest.raises(inventory.LookupError): + ir[two] From e1f9908cf4f439a87ef4b87cdb9f0822c2a7d5b7 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 17:44:00 +0200 Subject: [PATCH 135/154] Drop rng from ekore/h/test_log --- tests/ekore/harmonics/test_log_functions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/ekore/harmonics/test_log_functions.py b/tests/ekore/harmonics/test_log_functions.py index 3f8bf0035..205bc6445 100644 --- a/tests/ekore/harmonics/test_log_functions.py +++ b/tests/ekore/harmonics/test_log_functions.py @@ -10,7 +10,7 @@ def test_lm1pm2(): def mellin_lm1pm2(x, k, N): return x ** (N - 1) * (1 - x) ** 2 * np.log(1 - x) ** k - Ns = 1.0 + 100 * np.random.rand(3) + Ns = [1.0, 1.5, 2.0, 2.34, 56.789] for N in Ns: sx = hsx(N, 4) @@ -31,7 +31,7 @@ def test_lm1pm1(): def mellin_lm1pm1(x, k, N): return x ** (N - 1) * (1 - x) * np.log(1 - x) ** k - Ns = 1.0 + 100 * np.random.rand(3) + Ns = [1.0, 1.5, 2.0, 2.34, 56.789] for N in Ns: sx = hsx(N, 4) @@ -52,7 +52,7 @@ def test_lm1p(): def mellin_lm1p(x, k, N): return x ** (N - 1) * np.log(1 - x) ** k - Ns = 1.0 + 100 * np.random.rand(3) + Ns = [1.0, 1.5, 2.0, 2.34, 56.789] for N in Ns: sx = hsx(N, 5) From 4d493a613cab847ac3b1d27075f6181cb39f5aa3 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 17:49:11 +0200 Subject: [PATCH 136/154] Add test for file conflict to eko/io/inventory --- tests/eko/io/test_inventory.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/eko/io/test_inventory.py b/tests/eko/io/test_inventory.py index da0c1e6ee..38c5ae7b5 100644 --- a/tests/eko/io/test_inventory.py +++ b/tests/eko/io/test_inventory.py @@ -91,3 +91,14 @@ def o(): assert two not in ir with pytest.raises(inventory.LookupError): ir[two] + + +def test_fs(tmp_path): + acw = inventory.AccessConfigs(tmp_path, False, True) + iw = inventory.Inventory(tmp_path, acw, FakeH, contentless=True, name="Bla") + aa = "aa" + inventory.HEADER_EXT + ab = "ab" + inventory.HEADER_EXT + (tmp_path / aa).write_text("aa") + (tmp_path / ab).write_text("ab") + with pytest.raises(inventory.LookupError, match="many"): + iw.lookup("a", True) From 0e25356119cd05fffaf64d6f9542c4f8242b7f4b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 18:07:47 +0200 Subject: [PATCH 137/154] Run new solve in unit tests --- tests/eko/runner/__init__.py | 24 ++++++++++++++++++++++++ tests/eko/runner/test_legacy.py | 27 ++------------------------- tests/eko/runner/test_managed.py | 31 +++++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+), 25 deletions(-) create mode 100644 tests/eko/runner/test_managed.py diff --git a/tests/eko/runner/__init__.py b/tests/eko/runner/__init__.py index e69de29bb..c8ec5b854 100644 --- a/tests/eko/runner/__init__.py +++ b/tests/eko/runner/__init__.py @@ -0,0 +1,24 @@ +import numpy as np + + +def check_shapes(o, txs, ixs, theory_card, operators_card): + tpids = len(o.bases.targetpids) + ipids = len(o.bases.inputpids) + op_shape = (tpids, len(txs), ipids, len(ixs)) + + # check output = input + np.testing.assert_allclose(o.xgrid.raw, operators_card.xgrid.raw) + # targetgrid and inputgrid in the opcard are now ignored, we are testing this + np.testing.assert_allclose( + o.bases.targetgrid.raw, + txs.raw, + ) + np.testing.assert_allclose(o.bases.inputgrid.raw, ixs.raw) + np.testing.assert_allclose(o.mu20, operators_card.mu20) + # check available operators + ~o.operators + assert len(o.mu2grid) == len(operators_card.mu2grid) + assert list(o.mu2grid) == operators_card.mu2grid + for _, ops in o.items(): + assert ops.operator.shape == op_shape + assert ops.error.shape == op_shape diff --git a/tests/eko/runner/test_legacy.py b/tests/eko/runner/test_legacy.py index 1d299a6f0..60395acc5 100644 --- a/tests/eko/runner/test_legacy.py +++ b/tests/eko/runner/test_legacy.py @@ -9,6 +9,8 @@ from eko.io.runcards import TheoryCard from eko.quantities.heavy_quarks import QuarkMassScheme +from . import check_shapes + def test_raw(theory_card, operator_card, tmp_path): """we don't check the content here, but only the shape""" @@ -44,38 +46,13 @@ class FakeEM(enum.Enum): check_shapes(eko_, eko_.xgrid, eko_.xgrid, theory_card, operator_card) -def check_shapes(o, txs, ixs, theory_card, operators_card): - tpids = len(o.bases.targetpids) - ipids = len(o.bases.inputpids) - op_shape = (tpids, len(txs), ipids, len(ixs)) - - # check output = input - np.testing.assert_allclose(o.xgrid.raw, operators_card.xgrid.raw) - # targetgrid and inputgrid in the opcard are now ignored, we are testing this - np.testing.assert_allclose( - o.bases.targetgrid.raw, - txs.raw, - ) - np.testing.assert_allclose(o.bases.inputgrid.raw, ixs.raw) - np.testing.assert_allclose(o.mu20, operators_card.mu20) - # check available operators - ~o.operators - assert len(o.mu2grid) == len(operators_card.mu2grid) - assert list(o.mu2grid) == operators_card.mu2grid - for _, ops in o.items(): - assert ops.operator.shape == op_shape - assert ops.error.shape == op_shape - - def test_vfns(theory_ffns, operator_card, tmp_path): - # change targetpids path = tmp_path / "eko.tar" tc: TheoryCard = theory_ffns(3) oc = copy.deepcopy(operator_card) tc.heavy.matching_ratios.c = 1.0 tc.heavy.matching_ratios.b = 1.0 tc.order = (2, 0) - oc.debug.skip_non_singlet = False r = eko.runner.legacy.Runner(tc, oc, path=path) r.compute() with EKO.read(path) as eko_: diff --git a/tests/eko/runner/test_managed.py b/tests/eko/runner/test_managed.py new file mode 100644 index 000000000..e75fc8676 --- /dev/null +++ b/tests/eko/runner/test_managed.py @@ -0,0 +1,31 @@ +import copy + +import numpy as np + +from eko import EKO +from eko.io.runcards import TheoryCard +from eko.runner.managed import solve + +from . import check_shapes + + +def test_raw(theory_card, operator_card, tmp_path): + """we don't check the content here, but only the shape""" + path = tmp_path / "eko.tar" + tc = theory_card + oc = operator_card + solve(tc, oc, path=path) + with EKO.read(path) as eko_: + check_shapes(eko_, eko_.xgrid, eko_.xgrid, tc, oc) + + +def test_vfns(theory_ffns, operator_card, tmp_path): + path = tmp_path / "eko.tar" + tc: TheoryCard = theory_ffns(3) + oc = copy.deepcopy(operator_card) + tc.heavy.matching_ratios.c = 1.0 + tc.heavy.matching_ratios.b = 1.0 + tc.order = (2, 0) + solve(tc, oc, path=path) + with EKO.read(path) as eko_: + check_shapes(eko_, eko_.xgrid, eko_.xgrid, tc, oc) From 0bd92660657a7a28f94db0a6815ec5f2d9eafa45 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 18:23:49 +0200 Subject: [PATCH 138/154] Add unit tests for eko/io/items --- tests/eko/io/test_items.py | 53 +++++++++++++++++++++++++++++++++----- 1 file changed, 47 insertions(+), 6 deletions(-) diff --git a/tests/eko/io/test_items.py b/tests/eko/io/test_items.py index d2bba3919..762b10e11 100644 --- a/tests/eko/io/test_items.py +++ b/tests/eko/io/test_items.py @@ -4,29 +4,59 @@ import numpy as np import pytest -from eko.io import struct +from eko.io.items import Evolution, Matching, Operator, Target +from eko.matchings import Atlas +from eko.quantities.heavy_quarks import MatchingScales + + +def test_evolution(): + tc = Atlas(MatchingScales([1.5, 5.0, 170.0]), (1.65, 4)) + p = tc.matched_path((30, 5)) + he = Evolution.from_atlas(p[0], True) + assert he.origin == 1.65 + assert he.target == 5 + assert he.nf == 4 + assert he.cliff + assert he.as_atlas == p[0] + + +def test_matching(): + tc = Atlas(MatchingScales([1.5, 5.0, 170.0]), (1.65, 4)) + p = tc.matched_path((30, 5)) + hm = Matching.from_atlas(p[1]) + assert hm.hq == 5 + assert hm.scale == 5.0 + assert hm.as_atlas == p[1] + + +def test_target(): + ep = (10.0, 4) + ht = Target.from_ep(ep) + assert ht.scale == 10.0 + assert ht.nf == 4 + assert ht.ep == ep class TestOperator: def test_value_only(self): v = np.random.rand(2, 2) - opv = struct.Operator(operator=v) + opv = Operator(operator=v) assert opv.error is None stream = io.BytesIO() opv.save(stream) stream.seek(0) - opv_ = struct.Operator.load(stream) + opv_ = Operator.load(stream) np.testing.assert_allclose(opv.operator, opv_.operator) np.testing.assert_allclose(v, opv_.operator) assert opv_.error is None def test_value_and_error(self): v, e = np.random.rand(2, 2, 2) - opve = struct.Operator(operator=v, error=e) + opve = Operator(operator=v, error=e) stream = io.BytesIO() opve.save(stream) stream.seek(0) - opve_ = struct.Operator.load(stream) + opve_ = Operator.load(stream) np.testing.assert_allclose(opve.operator, opve_.operator) np.testing.assert_allclose(v, opve_.operator) np.testing.assert_allclose(opve.error, opve_.error) @@ -35,4 +65,15 @@ def test_value_and_error(self): def test_load_error_is_not_lz4(self, monkeypatch): stream = io.BytesIO() with pytest.raises(RuntimeError, match="LZ4"): - struct.Operator.load(stream) + Operator.load(stream) + + def test_load_error(self, monkeypatch): + # TODO see the other TODO at Operator.load + v, e = np.random.rand(2, 2, 2) + opve = Operator(operator=v, error=e) + stream = io.BytesIO() + opve.save(stream) + stream.seek(0) + monkeypatch.setattr(np, "load", lambda _: None) + with pytest.raises(ValueError): + Operator.load(stream) From 9c73942f699c598ebaa85a37240f7a7ecc608f49 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Tue, 9 May 2023 18:34:05 +0200 Subject: [PATCH 139/154] Drop dead code from eko/qnt/cpl --- src/eko/quantities/couplings.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/src/eko/quantities/couplings.py b/src/eko/quantities/couplings.py index 12511d960..449abab38 100644 --- a/src/eko/quantities/couplings.py +++ b/src/eko/quantities/couplings.py @@ -31,19 +31,6 @@ class CouplingsInfo(DictLike): """ em_running: bool = False - @property - def alphas_ref(self): - r""":math:`\alpha_s` reference point.""" - return CouplingRef.typed(self.alphas, self.scale) - - @property - def alphaem_ref(self): - r""":math:`\alpha` reference point.""" - if self.em_running: - raise ValueError("Non-running electromagnetic coupling.") - - return CouplingRef.typed(self.alphaem, self.scale) - @property def values(self): """Collect only couplings values.""" From ac405b21cb3773ac1948589b4404496218de6e48 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 10 May 2023 17:18:37 +0200 Subject: [PATCH 140/154] Fix alphas tutorial --- doc/source/overview/tutorials/alpha_s.ipynb | 32 +++++++++------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/doc/source/overview/tutorials/alpha_s.ipynb b/doc/source/overview/tutorials/alpha_s.ipynb index 4752b596d..a9d75c0ca 100644 --- a/doc/source/overview/tutorials/alpha_s.ipynb +++ b/doc/source/overview/tutorials/alpha_s.ipynb @@ -18,13 +18,10 @@ "\n", "In `eko` the running of $\\alpha_s$ is managed by an independent class `eko.couplings.Couplings`.\n", "\n", - "To instantiate this object you need to specify at least the boundary conditions on $\\alpha_s(\\mu_{R,0})$ (`alpha_s_ref`,`scale_ref`),\n", - "and the masses of the heavy quarks with the relative thresholds ratios (`heavy_quark_masses`, `thresholds_ratios`). \n", - "Note that all scales have to be given squared. \n", + "To instantiate this object you need to specify at least the boundary conditions on $\\alpha_s(\\mu_{R,0})$ (`alphas`,`scale`), the masses of the heavy quarks with the relative thresholds ratios (`heavy_quark_masses`, `thresholds_ratios`), and the (QCD,QED) perturbative order (`order`).\n", "\n", - "You can also pass some optional arguments such as the (QCD,QED) perturbative order (`order`), \n", - "see [here](https://eko.readthedocs.io/en/latest/modules/eko/eko.html#eko.couplings.Couplings)\n", - "for the complete list." + "See [here](https://eko.readthedocs.io/en/latest/modules/eko/eko.html#eko.couplings.Couplings)\n", + "for detailed API." ] }, { @@ -36,17 +33,14 @@ }, "outputs": [], "source": [ - "import math\n", "import numpy as np\n", "from eko.couplings import Couplings\n", - "from eko.io import types as ekotypes\n", + "from eko.quantities.couplings import CouplingsInfo, CouplingEvolutionMethod\n", "from eko.quantities.heavy_quarks import MatchingScales, QuarkMassScheme\n", "\n", "# set the (alpha_s, alpha_em) reference values\n", - "alphas_ref = ekotypes.FloatRef(value=0.118, scale=91.0)\n", - "alphaem_ref = ekotypes.FloatRef(value=0.007496252, scale=math.nan)\n", - "couplings_ref = ekotypes.CouplingsRef(\n", - " alphas=alphas_ref, alphaem=alphaem_ref, num_flavs_ref=None, max_num_flavs=5\n", + "couplings_ref = CouplingsInfo(\n", + " alphas=0.118, alphaem=0.007496252, scale=91.0, num_flavs_ref=None, max_num_flavs=5\n", ")\n", "\n", "# set heavy quark masses and their threshold ratios\n", @@ -59,7 +53,7 @@ "sc = Couplings(\n", " couplings_ref,\n", " order,\n", - " ekotypes.CouplingEvolutionMethod.EXACT,\n", + " CouplingEvolutionMethod.EXACT,\n", " heavy_quark_masses,\n", " QuarkMassScheme.POLE,\n", " thresholds_ratios,\n", @@ -81,13 +75,15 @@ "cell_type": "code", "execution_count": 2, "id": "8b908bb3-cbcc-4d7f-b631-27f1f37464f8", - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "The value of alpha_s at Q^2=100 GeV^2 is: 0.17805952806666522\n" + "The value of alpha_s at Q^2=100 GeV^2 is: 0.17804304498411003\n" ] } ], @@ -113,9 +109,9 @@ "hash": "0a84ba3ac8703c04e87bc503a7d00188dfd591ad56130da93c406115a1e4a408" }, "kernelspec": { - "display_name": "eko-VHUucTmo-py3.10", + "display_name": "eko-KkPVjVhh-py3.10", "language": "python", - "name": "eko-vhuuctmo-py3.10" + "name": "eko-kkpvjvhh-py3.10" }, "language_info": { "codemirror_mode": { @@ -127,7 +123,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.7" + "version": "3.10.9" } }, "nbformat": 4, From 7abef2118ee281a34426ffa700c02676df296137 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Wed, 10 May 2023 17:59:47 +0200 Subject: [PATCH 141/154] Fix tutorials --- doc/source/overview/tutorials/dglap.ipynb | 45 +++++---- doc/source/overview/tutorials/output.ipynb | 26 ++--- doc/source/overview/tutorials/pdf.ipynb | 105 ++++++++++++--------- 3 files changed, 98 insertions(+), 78 deletions(-) diff --git a/doc/source/overview/tutorials/dglap.ipynb b/doc/source/overview/tutorials/dglap.ipynb index d7f75847e..0e271a00c 100644 --- a/doc/source/overview/tutorials/dglap.ipynb +++ b/doc/source/overview/tutorials/dglap.ipynb @@ -34,8 +34,8 @@ } ], "source": [ - "import eko\n", "import pathlib\n", + "import eko\n", "\n", "eko.version.__version__" ] @@ -68,7 +68,7 @@ "th_card = example.theory()\n", "op_card = example.operator()\n", "# here we replace the grid with a very minimal one, to speed up the example\n", - "op_card.rotations.xgrid = [1e-3, 1e-2, 1e-1, 5e-1, 1.]" + "op_card.xgrid = [1e-3, 1e-2, 1e-1, 5e-1, 1.]" ] }, { @@ -89,19 +89,20 @@ "data": { "text/plain": [ "{'order': [1, 0],\n", - " 'couplings': {'alphas': {'value': 0.118, 'scale': 91.2},\n", - " 'alphaem': {'value': 0.007496252, 'scale': nan},\n", + " 'couplings': {'alphas': 0.118,\n", + " 'alphaem': 0.007496252,\n", + " 'scale': 91.2,\n", " 'max_num_flavs': 6,\n", - " 'num_flavs_ref': None},\n", - " 'num_flavs_init': None,\n", - " 'num_flavs_max_pdf': 6,\n", - " 'intrinsic_flavors': [4],\n", - " 'quark_masses': {'c': {'value': 2.0, 'scale': nan},\n", - " 'b': {'value': 4.5, 'scale': nan},\n", - " 't': {'value': 173.07, 'scale': nan}},\n", - " 'quark_masses_scheme': 'pole',\n", - " 'matching': {'c': 1.0, 'b': 1.0, 't': 1.0},\n", - " 'xif': 1.0}" + " 'num_flavs_ref': 5,\n", + " 'em_running': False},\n", + " 'heavy': {'num_flavs_init': 4,\n", + " 'num_flavs_max_pdf': 6,\n", + " 'intrinsic_flavors': [4],\n", + " 'masses': [[2.0, nan], [4.5, nan], [173.07, nan]],\n", + " 'masses_scheme': 'pole',\n", + " 'matching_ratios': [1.0, 1.0, 1.0]},\n", + " 'xif': 1.0,\n", + " 'n3lo_ad_variation': [0, 0, 0, 0]}" ] }, "execution_count": 3, @@ -123,23 +124,19 @@ "data": { "text/plain": [ "{'mu0': 1.65,\n", + " 'mugrid': [[100.0, 5]],\n", + " 'xgrid': [0.001, 0.01, 0.1, 0.5, 1.0],\n", " 'configs': {'evolution_method': 'iterate-exact',\n", " 'ev_op_max_order': [10, 0],\n", " 'ev_op_iterations': 10,\n", - " 'interpolation_polynomial_degree': 4,\n", - " 'interpolation_is_log': True,\n", " 'scvar_method': None,\n", " 'inversion_method': None,\n", + " 'interpolation_polynomial_degree': 4,\n", + " 'interpolation_is_log': True,\n", + " 'polarized': False,\n", + " 'time_like': False,\n", " 'n_integration_cores': 0},\n", " 'debug': {'skip_singlet': False, 'skip_non_singlet': False},\n", - " 'rotations': {'xgrid': [0.001, 0.01, 0.1, 0.5, 1.0],\n", - " 'pids': [22, -6, -5, -4, -3, -2, -1, 21, 1, 2, 3, 4, 5, 6],\n", - " '_targetgrid': None,\n", - " '_inputgrid': None,\n", - " '_targetpids': None,\n", - " '_inputpids': None},\n", - " '_mugrid': [100.0],\n", - " '_mu2grid': None,\n", " 'eko_version': '0.0.0'}" ] }, diff --git a/doc/source/overview/tutorials/output.ipynb b/doc/source/overview/tutorials/output.ipynb index 5387390ad..47345ac35 100644 --- a/doc/source/overview/tutorials/output.ipynb +++ b/doc/source/overview/tutorials/output.ipynb @@ -21,7 +21,9 @@ "cell_type": "code", "execution_count": 1, "id": "51dbf608-ccfb-4fb5-aca3-723a5f280f65", - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "import eko" @@ -39,7 +41,9 @@ "cell_type": "code", "execution_count": 2, "id": "daae3811-e7a6-4a7c-9f55-eef8ea1564b8", - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [ { "name": "stdout", @@ -50,7 +54,7 @@ } ], "source": [ - "with eko.EKO.open(\"./myeko.tar\") as evolution_operator:\n", + "with eko.EKO.read(\"./myeko.tar\") as evolution_operator:\n", " print(type(evolution_operator))" ] }, @@ -72,13 +76,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "TheoryCard(order=(1, 0), couplings=CouplingsRef(alphas=reference_running..ReferenceRunning(value=0.118, scale=91.2), alphaem=reference_running..ReferenceRunning(value=0.007496252, scale=nan), num_flavs_ref=None, max_num_flavs=6), num_flavs_init=None, num_flavs_max_pdf=6, intrinsic_flavors=[4], quark_masses=heavy_quark..HeavyQuarks(c=reference_running..ReferenceRunning(value=2.0, scale=nan), b=reference_running..ReferenceRunning(value=4.5, scale=nan), t=reference_running..ReferenceRunning(value=173.07, scale=nan)), quark_masses_scheme=, matching=heavy_quark..HeavyQuarks(c=1.0, b=1.0, t=1.0), xif=1.0)\n", - "OperatorCard(mu0=1.65, configs=Configs(evolution_method=, ev_op_max_order=(10, 0), ev_op_iterations=10, interpolation_polynomial_degree=4, interpolation_is_log=True, scvar_method=None, inversion_method=None, n_integration_cores=0), debug=Debug(skip_singlet=False, skip_non_singlet=False), rotations=Rotations(xgrid=, pids=array([22, -6, -5, -4, -3, -2, -1, 21, 1, 2, 3, 4, 5, 6]), _targetgrid=None, _inputgrid=None, _targetpids=None, _inputpids=None), _mugrid=array([100.]), _mu2grid=None, eko_version='0.0.0')\n" + "TheoryCard(order=(1, 0), couplings=CouplingsInfo(alphas=0.118, alphaem=0.007496252, scale=91.2, max_num_flavs=6, num_flavs_ref=5, em_running=False), heavy=HeavyInfo(num_flavs_init=4, num_flavs_max_pdf=6, intrinsic_flavors=[4, 5, 6], masses=[[2.0, nan], [4.5, nan], [173.07, nan]], masses_scheme=, matching_ratios=[1.0, 1.0, 1.0]), xif=1.0, n3lo_ad_variation=(0, 0, 0, 0))\n", + "OperatorCard(mu0=1.65, mugrid=[(100.0, 5)], xgrid=, configs=Configs(evolution_method=, ev_op_max_order=(10, 0), ev_op_iterations=10, scvar_method=None, inversion_method=None, interpolation_polynomial_degree=4, interpolation_is_log=True, polarized=False, time_like=False, n_integration_cores=0), debug=Debug(skip_singlet=False, skip_non_singlet=False), eko_version='0.0.0')\n" ] } ], "source": [ - "with eko.EKO.open(\"./myeko.tar\") as evolution_operator:\n", + "with eko.EKO.read(\"./myeko.tar\") as evolution_operator:\n", " # obtain theory card\n", " print(evolution_operator.theory_card)\n", " # or operator card\n", @@ -104,13 +108,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "[10000.]\n" + "[(10000.0, 5)]\n" ] } ], "source": [ - "with eko.EKO.open(\"./myeko.tar\") as evolution_operator:\n", - " print(evolution_operator.mu2grid)" + "with eko.EKO.read(\"./myeko.tar\") as evolution_operator:\n", + " print(evolution_operator.evolgrid)" ] }, { @@ -138,8 +142,8 @@ } ], "source": [ - "with eko.EKO.open(\"./myeko.tar\") as evolution_operator:\n", - " with evolution_operator.operator(10000.) as op:\n", + "with eko.EKO.read(\"./myeko.tar\") as evolution_operator:\n", + " with evolution_operator.operator((10000.,5)) as op:\n", " print(f\"operator: {op.operator.shape}\")\n", " print(f\"error: {op.error.shape}\")" ] diff --git a/doc/source/overview/tutorials/pdf.ipynb b/doc/source/overview/tutorials/pdf.ipynb index c46408d31..4607ef891 100644 --- a/doc/source/overview/tutorials/pdf.ipynb +++ b/doc/source/overview/tutorials/pdf.ipynb @@ -49,6 +49,7 @@ "metadata": {}, "outputs": [], "source": [ + "import pathlib\n", "import eko\n", "from banana import toy\n", "pdf = toy.mkPDF(\"\", 0)" @@ -70,7 +71,7 @@ "outputs": [], "source": [ "from ekobox.apply import apply_pdf\n", - "with eko.EKO.open(\"./myeko.tar\") as evolution_operator:\n", + "with eko.EKO.read(\"./myeko.tar\") as evolution_operator:\n", " evolved_pdfs = apply_pdf(evolution_operator, pdf)" ] }, @@ -123,7 +124,7 @@ { "data": { "text/plain": [ - "array([ 2.40543555e+04, 3.77471543e+02, 3.33983197e+01, -1.16579864e+01,\n", + "array([ 2.58966998e+04, 4.74768306e+02, 3.57939013e+01, -1.03946292e+01,\n", " 0.00000000e+00])" ] }, @@ -194,8 +195,8 @@ "th_card = example.theory()\n", "op_card = example.operator()\n", "# here we replace the grid with a very minimal one, to speed up the example\n", - "op_card.rotations.xgrid = eko.interpolation.XGrid([1e-3, 1e-2, 1e-1, 5e-1, 1.])\n", - "op_card.mu2grid = [100., 10000.]\n", + "op_card.xgrid = eko.interpolation.XGrid([1e-3, 1e-2, 1e-1, 5e-1, 1.])\n", + "op_card.mugrid = [(10.,5), (100.,5)]\n", "# set QCD LO evolution\n", "th_card.orders = (1,0)" ] @@ -218,6 +219,13 @@ "id": "8c18c327", "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Overwriting old PDF installation\n" + ] + }, { "name": "stdout", "output_type": "stream", @@ -228,13 +236,15 @@ ], "source": [ "from ekobox.evol_pdf import evolve_pdfs\n", + "path = pathlib.Path(\"./myeko2.tar\")\n", + "path.unlink(missing_ok=True)\n", "evolve_pdfs(\n", " [pdf],\n", " th_card,\n", " op_card,\n", " install=True,\n", " name=\"Evolved_PDF\",\n", - " store_path=\"myeko2.tar\"\n", + " store_path=path\n", ")" ] }, @@ -285,7 +295,7 @@ "output_type": "stream", "text": [ "has gluon? True\n", - "xg(x=0.01, Q2=89.1) = 4.0502308231988415\n" + "xg(x=0.01, Q2=89.1) = 4.399576390180355\n" ] } ], @@ -337,32 +347,32 @@ } ], "source": [ + "from math import nan\n", "import numpy as np\n", "import lhapdf\n", "from ekobox.cards import example\n", "from eko.interpolation import make_grid\n", + "from eko.quantities.heavy_quarks import QuarkMassRef,HeavyQuarks\n", "\n", "# get the PDF object\n", "ct14llo = lhapdf.mkPDF(\"CT14llo\")\n", "\n", "# setup the operator card\n", "op_card = example.operator()\n", - "op_card.rotations.xgrid = eko.interpolation.XGrid(make_grid(30, 30)) # x grid\n", - "op_card.mu2grid = np.geomspace(10., 1e5, 5).tolist() # Q2 grid\n", + "op_card.xgrid = eko.interpolation.XGrid(make_grid(30, 30)) # x grid\n", + "op_card.mugrid = [(float(q),5) for q in np.geomspace(5., 100, 5)] # Q2 grid\n", "op_card.mu0 = 1.295000 # starting point for the evolution \n", "\n", "# setup the theory card - this can be mostly inferred from the PDF's .info file\n", "\n", "th_card = example.theory()\n", "th_card.orders = (1,0) # QCD LO\n", - "th_card.quark_masses.c.value = 1.3 # charm mass\n", - "th_card.quark_masses.b.value = 4.75 # bottom mass\n", - "th_card.quark_masses.t.value = 172. # top mass\n", - "th_card.couplings.alphas.value = 0.130000 # reference value of alpha_s\n", - "th_card.couplings.alphas.scale = 91.1876 # the reference scale at which alpha_s is provided\n", + "th_card.heavy.masses = HeavyQuarks([QuarkMassRef([1.3,nan]), QuarkMassRef([4.75,nan]), QuarkMassRef([172.,nan])]) # quark mass\n", + "th_card.couplings.alphas = 0.130000 # reference value of alpha_s\n", + "th_card.couplings.scale = 91.1876 # the reference scale at which alpha_s is provided\n", "th_card.couplings.num_flavs_ref = 5 # the number of flavors active at the alpha_s reference scale\n", "th_card.couplings.max_num_flavs = 5 # the maximum number of flavors active in the alpha_s evolution\n", - "th_card.num_flavs_init = 3 # the number of flavors active at the reference scale\n", + "th_card.couplings.num_flavs_init = 3 # the number of flavors active at the reference scale\n", "th_card.num_flavs_max_pdf = 5 # the maximum number of flavors active in the pdf evolution." ] }, @@ -380,6 +390,13 @@ "id": "05b81dca", "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Overwriting old PDF installation\n" + ] + }, { "name": "stdout", "output_type": "stream", @@ -390,14 +407,15 @@ ], "source": [ "from ekobox.evol_pdf import evolve_pdfs\n", - "\n", + "path = pathlib.Path(\"./myeko_ct14llo.tar\")\n", + "path.unlink(missing_ok=True)\n", "evolve_pdfs(\n", " [ct14llo],\n", " th_card,\n", " op_card,\n", " install=True,\n", " name=\"my_ct14llo\",\n", - " store_path=\"myeko_ct14llo.tar\"\n", + " store_path=path\n", ")" ] }, @@ -421,32 +439,32 @@ "output_type": "stream", "text": [ "LHAPDF 6.4.0 loading /home/felix/local/share/LHAPDF/my_ct14llo/my_ct14llo_0000.dat\n", - " x Q2 ct14llo my_ct14llo relative_diff\n", - "0 0.000010 10.0 4.560628e+01 4.559560e+01 0.000234\n", - "1 0.000173 10.0 2.185377e+01 2.184884e+01 0.000225\n", - "2 0.003000 10.0 8.734759e+00 8.732856e+00 0.000218\n", - "3 0.051962 10.0 1.873738e+00 1.874169e+00 -0.000230\n", - "4 0.900000 10.0 4.278472e-05 4.346894e-05 -0.015992\n", - "5 0.000010 100.0 1.288848e+02 1.287965e+02 0.000685\n", - "6 0.000173 100.0 4.661146e+01 4.657867e+01 0.000703\n", - "7 0.003000 100.0 1.323922e+01 1.322967e+01 0.000721\n", - "8 0.051962 100.0 1.978974e+00 1.979055e+00 -0.000041\n", - "9 0.900000 100.0 7.121784e-06 7.245044e-06 -0.017308\n", - "10 0.000010 1000.0 2.272552e+02 2.270758e+02 0.000790\n", - "11 0.000173 1000.0 6.959939e+01 6.954206e+01 0.000824\n", - "12 0.003000 1000.0 1.620144e+01 1.618786e+01 0.000838\n", - "13 0.051962 1000.0 1.924501e+00 1.924600e+00 -0.000051\n", - "14 0.900000 1000.0 2.135864e-06 2.175017e-06 -0.018331\n", - "15 0.000010 10000.0 3.314097e+02 3.311450e+02 0.000799\n", - "16 0.000173 10000.0 9.023010e+01 9.015576e+01 0.000824\n", - "17 0.003000 10000.0 1.825934e+01 1.824477e+01 0.000798\n", - "18 0.051962 10000.0 1.830992e+00 1.831291e+00 -0.000163\n", - "19 0.900000 10000.0 9.288458e-07 9.463689e-07 -0.018866\n", - "20 0.000010 100000.0 4.363142e+02 4.359699e+02 0.000789\n", - "21 0.000173 100000.0 1.085159e+02 1.084286e+02 0.000804\n", - "22 0.003000 100000.0 1.972432e+01 1.970961e+01 0.000746\n", - "23 0.051962 100000.0 1.730267e+00 1.730707e+00 -0.000254\n", - "24 0.900000 100000.0 5.051592e-07 5.156517e-07 -0.020771\n", + " x Q2 ct14llo my_ct14llo relative_diff\n", + "0 0.000010 25.000000 7.635785e+01 7.630461e+01 0.000697\n", + "1 0.000173 25.000000 3.194273e+01 3.192092e+01 0.000683\n", + "2 0.003000 25.000000 1.081843e+01 1.081086e+01 0.000701\n", + "3 0.051962 25.000000 1.958956e+00 1.958632e+00 0.000166\n", + "4 0.900000 25.000000 1.922415e-05 1.955026e-05 -0.016963\n", + "5 0.000010 111.803399 1.333957e+02 1.332985e+02 0.000729\n", + "6 0.000173 111.803399 4.777286e+01 4.773664e+01 0.000758\n", + "7 0.003000 111.803399 1.341028e+01 1.339967e+01 0.000791\n", + "8 0.051962 111.803399 1.978216e+00 1.978130e+00 0.000044\n", + "9 0.900000 111.803399 6.644805e-06 6.753652e-06 -0.016381\n", + "10 0.000010 500.000000 1.967032e+02 1.965456e+02 0.000801\n", + "11 0.000173 500.000000 6.291393e+01 6.286095e+01 0.000842\n", + "12 0.003000 500.000000 1.542347e+01 1.540996e+01 0.000876\n", + "13 0.051962 500.000000 1.947465e+00 1.947391e+00 0.000038\n", + "14 0.900000 500.000000 2.929060e-06 2.977306e-06 -0.016471\n", + "15 0.000010 2236.067977 2.633266e+02 2.631109e+02 0.000819\n", + "16 0.000173 2236.067977 7.708540e+01 7.701938e+01 0.000856\n", + "17 0.003000 2236.067977 1.700410e+01 1.698928e+01 0.000872\n", + "18 0.051962 2236.067977 1.893923e+00 1.893971e+00 -0.000025\n", + "19 0.900000 2236.067977 1.544450e-06 1.570997e-06 -0.017189\n", + "20 0.000010 10000.000000 3.314097e+02 3.311351e+02 0.000829\n", + "21 0.000173 10000.000000 9.023010e+01 9.015279e+01 0.000857\n", + "22 0.003000 10000.000000 1.825934e+01 1.824402e+01 0.000839\n", + "23 0.051962 10000.000000 1.830992e+00 1.831183e+00 -0.000104\n", + "24 0.900000 10000.000000 9.288458e-07 9.447927e-07 -0.017169\n", "my_ct14llo PDF set, member #0, version 1\n" ] } @@ -461,7 +479,8 @@ "\n", "# collect data\n", "log = {\"x\": [], \"Q2\" : [], \"ct14llo\": [], \"my_ct14llo\": [], \"relative_diff\": []} \n", - "for q2 in np.geomspace(10., 1e5, 5):\n", + "for q in np.geomspace(5., 100, 5):\n", + " q2 = q**2.\n", " for x in np.geomspace(1e-5, 0.9, 5):\n", " value = ct14llo.xfxQ2(pid, x, q2)\n", " my_value = my_ct14llo.xfxQ2(pid, x, q2)\n", From 36d2e1bc859ccadbf446048850b83ccec87daa84 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 10:37:52 +0200 Subject: [PATCH 142/154] Fix eko/qnt/hq and add unit test --- tests/eko/test_quantities.py | 47 ++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 tests/eko/test_quantities.py diff --git a/tests/eko/test_quantities.py b/tests/eko/test_quantities.py new file mode 100644 index 000000000..df5ecdbf2 --- /dev/null +++ b/tests/eko/test_quantities.py @@ -0,0 +1,47 @@ +from math import nan + +import pytest + +from eko.quantities import heavy_quarks as hq + + +def test_HeavyQuarks(): + with pytest.raises(ValueError): + hq.MatchingRatios([1, 2, 3, 4]) + l = hq.MatchingRatios([0.5, 2.0, 3.0]) + assert len(l) == 3 + assert l.c == 0.5 + assert l.b == 2.0 + assert l.t == 3.0 + l.c = 0.7 + assert len(l) == 3 + assert l.c == 0.7 + assert l.b == 2.0 + assert l.t == 3.0 + l.b = 2.7 + assert len(l) == 3 + assert l.c == 0.7 + assert l.b == 2.7 + assert l.t == 3.0 + l.t = 3.7 + assert len(l) == 3 + assert l.c == 0.7 + assert l.b == 2.7 + assert l.t == 3.7 + + +def test_HeavyInfo(): + i = hq.HeavyInfo( + num_flavs_init=4, + num_flavs_max_pdf=6, + intrinsic_flavors=[4, 5], + masses=hq.HeavyQuarkMasses( + [ + hq.QuarkMassRef(2.0, nan), + hq.QuarkMassRef(5.0, nan), + hq.QuarkMassRef(100.0, nan), + ] + ), + masses_scheme=hq.QuarkMassScheme.POLE, + matching_ratios=hq.MatchingRatios([1.5, 2.0, 3.0]), + ) From 7e99af498b1e3340f54b0a7ddb8e49931c97a3ab Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 10:39:05 +0200 Subject: [PATCH 143/154] Fix actually eko/qnt/hq and add unit test --- src/eko/quantities/heavy_quarks.py | 18 ------------------ tests/eko/test_quantities.py | 8 +++++--- 2 files changed, 5 insertions(+), 21 deletions(-) diff --git a/src/eko/quantities/heavy_quarks.py b/src/eko/quantities/heavy_quarks.py index ae1ca86c6..de5e3240a 100644 --- a/src/eko/quantities/heavy_quarks.py +++ b/src/eko/quantities/heavy_quarks.py @@ -65,19 +65,6 @@ def t(self, value: T): MatchingScales = HeavyQuarks[MatchingScale] -def scales_from_ratios( - ratios: MatchingRatios, masses: HeavyQuarkMasses -) -> MatchingScales: - """Convert ratios to squared scales. - - .. todo:: - - make this a method - - """ - return MatchingScales(*(np.power(ratios, 2.0) * np.power(masses, 2.0)).tolist()) - - # TODO: upgrade the following to StrEnum (requires py>=3.11) with that, it is # possible to replace all non-alias right sides with calls to enum.auto() @@ -120,11 +107,6 @@ class HeavyInfo(DictLike): matching_ratios: MatchingRatios """Matching scale of heavy quark masses""" - @property - def matching_scales(self) -> MatchingScales: - """Compute matching scales.""" - return scales_from_ratios(self.matching_ratios, self.masses) - @property def squared_ratios(self) -> List[float]: """Squared ratios of matching scales.""" diff --git a/tests/eko/test_quantities.py b/tests/eko/test_quantities.py index df5ecdbf2..0c2628b30 100644 --- a/tests/eko/test_quantities.py +++ b/tests/eko/test_quantities.py @@ -1,5 +1,6 @@ from math import nan +import numpy as np import pytest from eko.quantities import heavy_quarks as hq @@ -37,11 +38,12 @@ def test_HeavyInfo(): intrinsic_flavors=[4, 5], masses=hq.HeavyQuarkMasses( [ - hq.QuarkMassRef(2.0, nan), - hq.QuarkMassRef(5.0, nan), - hq.QuarkMassRef(100.0, nan), + hq.QuarkMassRef([2.0, nan]), + hq.QuarkMassRef([5.0, nan]), + hq.QuarkMassRef([100.0, nan]), ] ), masses_scheme=hq.QuarkMassScheme.POLE, matching_ratios=hq.MatchingRatios([1.5, 2.0, 3.0]), ) + np.testing.assert_allclose(i.squared_ratios, [2.25, 4.0, 9.0]) From ef1289d8abbc8f9e7daafab28e8bca050343226c Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 11 May 2023 15:22:35 +0200 Subject: [PATCH 144/154] Poetry update --- poetry.lock | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3249915c4..6c76357d1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -204,14 +204,14 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -781,14 +781,14 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "p [[package]] name = "fonttools" -version = "4.39.3" +version = "4.39.4" description = "Tools to manipulate font files" category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "fonttools-4.39.3-py3-none-any.whl", hash = "sha256:64c0c05c337f826183637570ac5ab49ee220eec66cf50248e8df527edfa95aeb"}, - {file = "fonttools-4.39.3.zip", hash = "sha256:9234b9f57b74e31b192c3fc32ef1a40750a8fbc1cd9837a7b7bfc4ca4a5c51d7"}, + {file = "fonttools-4.39.4-py3-none-any.whl", hash = "sha256:106caf6167c4597556b31a8d9175a3fdc0356fdcd70ab19973c3b0d4c893c461"}, + {file = "fonttools-4.39.4.zip", hash = "sha256:dba8d7cdb8e2bac1b3da28c5ed5960de09e59a2fe7e63bb73f5a59e57b0430d2"}, ] [package.extras] @@ -956,14 +956,14 @@ files = [ [[package]] name = "ipykernel" -version = "6.22.0" +version = "6.23.0" description = "IPython Kernel for Jupyter" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.22.0-py3-none-any.whl", hash = "sha256:1ae6047c1277508933078163721bbb479c3e7292778a04b4bacf0874550977d6"}, - {file = "ipykernel-6.22.0.tar.gz", hash = "sha256:302558b81f1bc22dc259fb2a0c5c7cf2f4c0bdb21b50484348f7bafe7fb71421"}, + {file = "ipykernel-6.23.0-py3-none-any.whl", hash = "sha256:fc886f1dcdc0ec17f277e4d21fd071c857d381adcb04f3f3735d25325ca323c6"}, + {file = "ipykernel-6.23.0.tar.gz", hash = "sha256:bd6f487d9e2744c84f6e667d46462d7647a4c862e70e08282f05a52b9d4b705f"}, ] [package.dependencies] @@ -990,14 +990,14 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.12.1" +version = "8.12.2" description = "IPython: Productive Interactive Computing" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "ipython-8.12.1-py3-none-any.whl", hash = "sha256:e3015a1a4aa09b3984fb81b9cef4f0772af5a549878b81efb094cda8bb121993"}, - {file = "ipython-8.12.1.tar.gz", hash = "sha256:2442915417763b62181009259782975fa50bb5eedb97ae97fb614204bf6ecc21"}, + {file = "ipython-8.12.2-py3-none-any.whl", hash = "sha256:ea8801f15dfe4ffb76dea1b09b847430ffd70d827b41735c64a0638a04103bfc"}, + {file = "ipython-8.12.2.tar.gz", hash = "sha256:c7b80eb7f5a855a88efc971fda506ff7a91c280b42cdae26643e0f601ea281ea"}, ] [package.dependencies] @@ -1573,14 +1573,14 @@ test = ["flaky", "ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "p [[package]] name = "nbconvert" -version = "7.3.1" +version = "7.4.0" description = "Converting Jupyter Notebooks" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "nbconvert-7.3.1-py3-none-any.whl", hash = "sha256:d2e95904666f1ff77d36105b9de4e0801726f93b862d5b28f69e93d99ad3b19c"}, - {file = "nbconvert-7.3.1.tar.gz", hash = "sha256:78685362b11d2e8058e70196fe83b09abed8df22d3e599cf271f4d39fdc48b9e"}, + {file = "nbconvert-7.4.0-py3-none-any.whl", hash = "sha256:af5064a9db524f9f12f4e8be7f0799524bd5b14c1adea37e34e83c95127cc818"}, + {file = "nbconvert-7.4.0.tar.gz", hash = "sha256:51b6c77b507b177b73f6729dba15676e42c4e92bcb00edc8cc982ee72e7d89d7"}, ] [package.dependencies] @@ -2171,14 +2171,14 @@ plugins = ["importlib-metadata"] [[package]] name = "pylint" -version = "2.17.3" +version = "2.17.4" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.17.3-py3-none-any.whl", hash = "sha256:a6cbb4c6e96eab4a3c7de7c6383c512478f58f88d95764507d84c899d656a89a"}, - {file = "pylint-2.17.3.tar.gz", hash = "sha256:761907349e699f8afdcd56c4fe02f3021ab5b3a0fc26d19a9bfdc66c7d0d5cd5"}, + {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"}, + {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"}, ] [package.dependencies] @@ -3023,14 +3023,14 @@ files = [ [[package]] name = "urllib3" -version = "2.0.1" +version = "2.0.2" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.1-py3-none-any.whl", hash = "sha256:d75e5ece05ff170e323303fd924edf29e705f5ae057c489f453a686b639bb68a"}, - {file = "urllib3-2.0.1.tar.gz", hash = "sha256:2ce66a68134be469f5df5d46d724237489b3cd85b2bba2223dbbee1746548826"}, + {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, + {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, ] [package.extras] From cb64de4101e2b86660e9788666bb8ccb8d8fddba Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 16:17:46 +0200 Subject: [PATCH 145/154] Add unit test eko/io/metadata --- tests/eko/io/test_metadata.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 tests/eko/io/test_metadata.py diff --git a/tests/eko/io/test_metadata.py b/tests/eko/io/test_metadata.py new file mode 100644 index 000000000..a5651e0c8 --- /dev/null +++ b/tests/eko/io/test_metadata.py @@ -0,0 +1,33 @@ +import logging + +import pytest +import yaml + +from eko.io import bases, metadata, paths + + +def test_metadata(tmp_path, caplog): + m = metadata.Metadata((1.0, 3), bases.Bases([0.1, 1.0])) + # errors + with caplog.at_level(logging.INFO): + m.update() + assert "no file" in caplog.text + with pytest.raises(RuntimeError): + m.path + # now modify + m.path = tmp_path + m.update() + p = paths.InternalPaths(tmp_path) + assert tmp_path.exists() + assert p.metadata.exists() + assert p.metadata.is_file() + assert "version" in p.metadata.read_text() + # change version + m.version = "0.0.0-a1~really1.0.0" + m.update() + # if I read back the thing should be what I set + mn = metadata.Metadata((1.0, 3), bases.Bases([0.1, 1.0])) + mm = metadata.Metadata.load(tmp_path) + assert m.path == tmp_path + assert mm.version != mn.version + assert mm.version == "0.0.0-a1~really1.0.0" From 605798cda32ff6c0ba9b6f05471b9dbd37be688f Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 16:43:24 +0200 Subject: [PATCH 146/154] Test error propagation in manipulate --- tests/eko/io/test_manipulate.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/eko/io/test_manipulate.py b/tests/eko/io/test_manipulate.py index c79716eb1..dabc35dbc 100644 --- a/tests/eko/io/test_manipulate.py +++ b/tests/eko/io/test_manipulate.py @@ -42,6 +42,7 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): manipulate.xgrid_reshape(ot, xgp) chk_keys(o1.raw, ot.raw) assert ot[epout].operator.shape == (lpids, len(xgp), lpids, len(xg)) + assert ot[epout].error is None ottpath = tmp_path / "ott.tar" o1.deepcopy(ottpath) with EKO.edit(ottpath) as ott: @@ -74,7 +75,20 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): op = eko_identity([1, 2, len(xgp), 2, len(xgp)]) np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) - # error + # op error handling + o1[epout] = eko.io.Operator( + operator=eko_identity([1, lpids, len(xg), lpids, len(xg)])[0], + error=0.1 * eko_identity([1, lpids, len(xg), lpids, len(xg)])[0], + ) + ot2path = tmp_path / "ot2.tar" + o1.deepcopy(ot2path) + with EKO.edit(ot2path) as ot2: + manipulate.xgrid_reshape(ot2, xgp) + chk_keys(o1.raw, ot2.raw) + assert ot[epout].operator.shape == (lpids, len(xgp), lpids, len(xg)) + assert ot[epout].error is not None + + # Python error with pytest.raises(ValueError): manipulate.xgrid_reshape(o1) From 7133a2abe48b318e0bed379ab24a56bccd239acf Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 17:10:36 +0200 Subject: [PATCH 147/154] Add another test case to test_quad_ker --- tests/eko/evolution_operator/test_init.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/eko/evolution_operator/test_init.py b/tests/eko/evolution_operator/test_init.py index 492b96429..642de5bb7 100644 --- a/tests/eko/evolution_operator/test_init.py +++ b/tests/eko/evolution_operator/test_init.py @@ -60,6 +60,7 @@ def test_quad_ker(monkeypatch): monkeypatch.setattr(s, "dispatcher", lambda *args: np.identity(2)) params = [ ((1, 0), br.non_singlet_pids_map["ns+"], 0, "", 0.0, 0.0), + ((1, 0), br.non_singlet_pids_map["ns+"], 0, "", 0.123, 1.0), ((3, 1), br.non_singlet_pids_map["ns+u"], 0, "", 0.0, 0.0), ((1, 0), 100, 100, "", 0.123, 1.0), ((1, 0), 100, 21, "", 0.0, 0.0), From eec07b52eacb305a5bc0bb125e66fb43e8bb4616 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 17:13:29 +0200 Subject: [PATCH 148/154] Use second ep in test_xgrid_reshape --- tests/eko/io/test_manipulate.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/eko/io/test_manipulate.py b/tests/eko/io/test_manipulate.py index dabc35dbc..3431d93ce 100644 --- a/tests/eko/io/test_manipulate.py +++ b/tests/eko/io/test_manipulate.py @@ -42,7 +42,6 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): manipulate.xgrid_reshape(ot, xgp) chk_keys(o1.raw, ot.raw) assert ot[epout].operator.shape == (lpids, len(xgp), lpids, len(xg)) - assert ot[epout].error is None ottpath = tmp_path / "ott.tar" o1.deepcopy(ottpath) with EKO.edit(ottpath) as ott: @@ -76,7 +75,8 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): np.testing.assert_allclose(oit[epout].operator, op[0], atol=1e-10) # op error handling - o1[epout] = eko.io.Operator( + ep2 = (25, 5) + o1[ep2] = eko.io.Operator( operator=eko_identity([1, lpids, len(xg), lpids, len(xg)])[0], error=0.1 * eko_identity([1, lpids, len(xg), lpids, len(xg)])[0], ) @@ -85,8 +85,9 @@ def test_xgrid_reshape(self, eko_factory: EKOFactory, tmp_path: pathlib.Path): with EKO.edit(ot2path) as ot2: manipulate.xgrid_reshape(ot2, xgp) chk_keys(o1.raw, ot2.raw) - assert ot[epout].operator.shape == (lpids, len(xgp), lpids, len(xg)) - assert ot[epout].error is not None + assert ot2[ep2].operator.shape == (lpids, len(xgp), lpids, len(xg)) + assert ot2[epout].error is None + assert ot2[ep2].error is not None # Python error with pytest.raises(ValueError): From 5b5ef7205e4584b96454079b2450b6698bcf2fa4 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 17:25:05 +0200 Subject: [PATCH 149/154] Extend slightly unit test of eko/evop/ome --- tests/eko/evolution_operator/test_ome.py | 154 ++++++++++++----------- 1 file changed, 78 insertions(+), 76 deletions(-) diff --git a/tests/eko/evolution_operator/test_ome.py b/tests/eko/evolution_operator/test_ome.py index 23ab65e9f..d7b1aa1ad 100644 --- a/tests/eko/evolution_operator/test_ome.py +++ b/tests/eko/evolution_operator/test_ome.py @@ -101,13 +101,75 @@ def test_quad_ker_errors(): # Test OME integration def test_quad_ker(monkeypatch): monkeypatch.setattr( - mellin, "Talbot_path", lambda *args: 2 + mellin, "Talbot_path", lambda *args: 3.0 ) # N=2 is a safe evaluation point monkeypatch.setattr( mellin, "Talbot_jac", lambda *args: complex(0, np.pi) ) # negate mellin prefactor monkeypatch.setattr(interpolation, "log_evaluate_Nx", lambda *args: 1) monkeypatch.setattr(interpolation, "evaluate_Nx", lambda *args: 1) + for is_log in [True, False]: + for order, p, t in [((3, 0), False, False), ((2, 0), False, True)]: + for sv_mode in [sv.Modes.expanded, sv.Modes.exponentiated]: + res_ns = quad_ker( + u=0, + order=order, + mode0=200, + mode1=200, + is_log=is_log, + logx=0.123, + areas=np.zeros(3), + backward_method=None, + a_s=0.0, + nf=3, + L=0.0, + sv_mode=sv_mode, + Lsv=0.0, + is_msbar=False, + is_polarized=p, + is_time_like=t, + ) + np.testing.assert_allclose(res_ns, 1.0) + res_s = quad_ker( + u=0, + order=order, + mode0=100, + mode1=100, + is_log=is_log, + logx=0.123, + areas=np.zeros(3), + backward_method=None, + a_s=0.0, + nf=3, + L=0.0, + sv_mode=sv_mode, + Lsv=0.0, + is_msbar=False, + is_polarized=p, + is_time_like=t, + ) + np.testing.assert_allclose(res_s, 1.0) + res_s = quad_ker( + u=0, + order=order, + mode0=100, + mode1=21, + is_log=is_log, + logx=0.0, + areas=np.zeros(3), + backward_method=None, + a_s=0.0, + nf=3, + L=0.0, + sv_mode=sv_mode, + Lsv=0.0, + is_msbar=False, + is_polarized=p, + is_time_like=t, + ) + np.testing.assert_allclose(res_s, 0.0) + + # test expanded intrisic inverse kernels zeros = np.zeros((2, 2)) monkeypatch.setattr( "ekore.operator_matrix_elements.unpolarized.space_like.A_non_singlet", @@ -118,91 +180,31 @@ def test_quad_ker(monkeypatch): "ekore.operator_matrix_elements.unpolarized.space_like.A_singlet", lambda *args: np.array([zeros, zeros, zeros]), ) - for is_log in [True, False]: - for order, p, t in [((3, 0), False, False), ((2, 0), False, True)]: + labels = [(200, 200), *br.singlet_labels] + for label in labels: + for sv_mode in [sv.Modes.expanded, sv.Modes.exponentiated]: res_ns = quad_ker( u=0, - order=order, - mode0=200, - mode1=200, - is_log=is_log, - logx=0.123, - areas=np.zeros(3), - backward_method=None, - a_s=0.0, - nf=3, - L=0.0, - sv_mode=sv.Modes.expanded, - Lsv=0.0, - is_msbar=False, - is_polarized=p, - is_time_like=t, - ) - np.testing.assert_allclose(res_ns, 1.0) - res_s = quad_ker( - u=0, - order=order, - mode0=100, - mode1=100, - is_log=is_log, + order=(3, 0), + mode0=label[0], + mode1=label[1], + is_log=True, logx=0.123, areas=np.zeros(3), - backward_method=None, - a_s=0.0, - nf=3, - L=0.0, - sv_mode=sv.Modes.expanded, - Lsv=0.0, - is_msbar=False, - is_polarized=p, - is_time_like=t, - ) - np.testing.assert_allclose(res_s, 1.0) - res_s = quad_ker( - u=0, - order=order, - mode0=100, - mode1=21, - is_log=is_log, - logx=0.0, - areas=np.zeros(3), - backward_method=None, + backward_method=InversionMethod.EXPANDED, a_s=0.0, nf=3, L=0.0, - sv_mode=sv.Modes.expanded, + sv_mode=sv_mode, Lsv=0.0, is_msbar=False, - is_polarized=p, - is_time_like=t, + is_polarized=False, + is_time_like=False, ) - np.testing.assert_allclose(res_s, 0.0) - - # test expanded intrisic inverse kernels - labels = [(200, 200), *br.singlet_labels] - for label in labels: - res_ns = quad_ker( - u=0, - order=(3, 0), - mode0=label[0], - mode1=label[1], - is_log=True, - logx=0.123, - areas=np.zeros(3), - backward_method=InversionMethod.EXPANDED, - a_s=0.0, - nf=3, - L=0.0, - sv_mode=sv.Modes.expanded, - Lsv=0.0, - is_msbar=False, - is_polarized=False, - is_time_like=False, - ) - if label[-1] == label[-2]: - np.testing.assert_allclose(res_ns, 1.0) - else: - np.testing.assert_allclose(res_ns, 0.0) + if label[-1] == label[-2]: + np.testing.assert_allclose(res_ns, 1.0) + else: + np.testing.assert_allclose(res_ns, 0.0) # test exact intrinsic inverse kernel labels.extend( From 48a02413dcc25aca306cdcaa1708d746f9897866 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 17:48:07 +0200 Subject: [PATCH 150/154] Add more unit tests to eko/io/struct --- src/eko/io/struct.py | 6 +----- tests/eko/io/test_struct.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/eko/io/struct.py b/src/eko/io/struct.py index d0f253c93..fecba3c57 100644 --- a/src/eko/io/struct.py +++ b/src/eko/io/struct.py @@ -182,10 +182,6 @@ def load_recipes(self, recipes: List[Recipe]): else: self.recipes_matching[recipe] = None - def __ilshift__(self, recipes: List[Recipe]): - """Shortcut for :meth:`load_recipes`.""" - self.load_recipes(recipes) - # operator management # ------------------- def __getitem__(self, ep: EPoint) -> Optional[Operator]: @@ -374,7 +370,7 @@ def open(cls, path: os.PathLike, mode="r"): elif mode in "a": load = True else: - raise ValueError + raise ValueError(f"Unknown file mode: {mode}") tmpdir = pathlib.Path(tempfile.mkdtemp(prefix=TEMP_PREFIX)) if load: diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index e7ebacfa4..09ec54871 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -13,7 +13,7 @@ class TestEKO: - def test_new_error(self, tmp_path: pathlib.Path): + def test_new_error(self, tmp_path: pathlib.Path, theory_card, operator_card): # try to write to a file different from bla no_tar_path = tmp_path / "Blub.bla" no_tar_path.touch() @@ -25,13 +25,19 @@ def test_new_error(self, tmp_path: pathlib.Path): tar.add(no_tar_path) with pytest.raises(FileExistsError, match="Blub.tar"): struct.EKO.create(exists_path) + for args in [(None, None), (theory_card, None), (None, operator_card)]: + with pytest.raises(RuntimeError, match="missing"): + with struct.EKO.create(tmp_path / "Blub2.tar") as builder: + eko = builder.load_cards(*args).build() def test_load_error(self, tmp_path): # try to read from a non-tar path no_tar_path = tmp_path / "Blub.tar" no_tar_path.write_text("Blub", encoding="utf-8") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="tar"): struct.EKO.read(no_tar_path) + with pytest.raises(ValueError, match="file mode"): + struct.EKO.open(no_tar_path, "ΓΌ") def test_properties(self, eko_factory: EKOFactory): mu = 10.0 @@ -55,6 +61,8 @@ def test_properties(self, eko_factory: EKOFactory): stream.seek(0) raw_eko = yaml.safe_load(stream) assert "metadata" in raw_eko + assert "read" in eko.permissions + assert "write" in eko.permissions def test_ops(self, eko_factory: EKOFactory): mu = 10.0 From 8f696ba4405343e0e874f594f78d3931a9fa2c6b Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Thu, 11 May 2023 18:13:17 +0200 Subject: [PATCH 151/154] Add yet more unit tests to eko/io/struct --- tests/eko/io/test_struct.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/eko/io/test_struct.py b/tests/eko/io/test_struct.py index 09ec54871..b7ff9877b 100644 --- a/tests/eko/io/test_struct.py +++ b/tests/eko/io/test_struct.py @@ -48,7 +48,11 @@ def test_properties(self, eko_factory: EKOFactory): assert hasattr(eko.theory_card.heavy, "masses") assert hasattr(eko.operator_card, "debug") np.testing.assert_allclose(eko.mu2grid, [mu**2]) - assert (mu**2, nf) in eko + ep2 = (mu**2, nf) + assert ep2 in eko + assert eko[ep2] is not None + del eko.operators + assert ep2 not in eko.operators.cache default_grid = eko.operator_card.xgrid assert eko.xgrid == default_grid xg = interpolation.XGrid([0.1, 1.0]) @@ -56,6 +60,7 @@ def test_properties(self, eko_factory: EKOFactory): assert eko.xgrid == xg assert "metadata" in eko.raw # check we can dump and reload + eko.assert_permissions(True, True) stream = io.StringIO() yaml.safe_dump(eko.raw, stream) stream.seek(0) @@ -63,6 +68,15 @@ def test_properties(self, eko_factory: EKOFactory): assert "metadata" in raw_eko assert "read" in eko.permissions assert "write" in eko.permissions + np.testing.assert_allclose(eko.mu20, 1.65**2.0) + assert len(eko.evolgrid) == 1 + np.testing.assert_allclose(eko.evolgrid[0][0], mu**2.0) + assert eko.evolgrid[0][1] == nf + # __delattr__ + eko.blub = "bla" + del eko.blub + with pytest.raises(AttributeError, match="blub"): + eko.blub def test_ops(self, eko_factory: EKOFactory): mu = 10.0 From 431a0556980246fdabd0a1c2cc61c749253275ce Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Fri, 12 May 2023 11:45:46 +0200 Subject: [PATCH 152/154] Fix eko/kernels/test_ns --- tests/eko/kernels/test_ns.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/eko/kernels/test_ns.py b/tests/eko/kernels/test_ns.py index 7b3c16a3c..013fc5433 100644 --- a/tests/eko/kernels/test_ns.py +++ b/tests/eko/kernels/test_ns.py @@ -202,8 +202,9 @@ def test_ode_n3lo(): np.testing.assert_allclose(lhs, rhs) -def test_error(): - with pytest.raises(ValueError): +def test_error(monkeypatch): + monkeypatch.setattr("eko.beta.beta_qcd", lambda *_args: 1.0) + with pytest.raises(NotImplementedError, match="order is not implemented"): ns.dispatcher((5, 0), "iterate-exact", np.random.rand(3) + 0j, 0.2, 0.1, 3, 10) with pytest.raises(NotImplementedError): ad.gamma_ns((2, 0), 10202, 1, 3) From f02b16fee2fbfd696de38e7ed6de1ef3c84a9092 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Fri, 12 May 2023 15:00:13 +0200 Subject: [PATCH 153/154] Drop masses union, reducing pole masses to msbar format --- src/eko/quantities/heavy_quarks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/eko/quantities/heavy_quarks.py b/src/eko/quantities/heavy_quarks.py index de5e3240a..df0285649 100644 --- a/src/eko/quantities/heavy_quarks.py +++ b/src/eko/quantities/heavy_quarks.py @@ -1,7 +1,7 @@ """Heavy quarks related quantities.""" import enum from dataclasses import dataclass -from typing import Generic, List, Sequence, TypeVar, Union +from typing import Generic, List, Sequence, TypeVar import numpy as np @@ -58,7 +58,7 @@ def t(self, value: T): QuarkMass = LinearScale QuarkMassRef = ReferenceRunning[QuarkMass] -HeavyQuarkMasses = HeavyQuarks[Union[QuarkMass, QuarkMassRef]] +HeavyQuarkMasses = HeavyQuarks[QuarkMassRef] MatchingRatio = float MatchingRatios = HeavyQuarks[MatchingRatio] MatchingScale = SquaredScale From d32d95bdcc8197baf02a9a699dd6b74e7842960c Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Fri, 12 May 2023 16:16:58 +0200 Subject: [PATCH 154/154] Drop empty file --- tests/eko/runner/test_parts.py | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 tests/eko/runner/test_parts.py diff --git a/tests/eko/runner/test_parts.py b/tests/eko/runner/test_parts.py deleted file mode 100644 index 0f3b7a87a..000000000 --- a/tests/eko/runner/test_parts.py +++ /dev/null @@ -1,5 +0,0 @@ -from eko import EKO - - -def test_evolve(eko_factory: EKO): - pass