From 328d462780abbb20b92100066c46d135cf14b5da Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Thu, 25 Aug 2022 10:52:16 +0200 Subject: [PATCH 01/80] basic prototype --- .gitignore | 1 + python/amici/__init__.py | 4 + python/amici/__init__.template.py | 7 ++ python/amici/jax.template.py | 71 +++++++++++++++ python/amici/jaxcodeprinter.py | 49 +++++++++++ python/amici/ode_export.py | 105 ++++++++++++++++++---- python/sdist/amici/jax.py | 127 +++++++++++++++++++++++++++ python/sdist/amici/jax.template.py | 1 + python/sdist/amici/jaxcodeprinter.py | 1 + python/tests/test_jax.py | 70 +++++++++++++++ 10 files changed, 420 insertions(+), 16 deletions(-) create mode 100644 python/amici/jax.template.py create mode 100644 python/amici/jaxcodeprinter.py create mode 100644 python/sdist/amici/jax.py create mode 120000 python/sdist/amici/jax.template.py create mode 120000 python/sdist/amici/jaxcodeprinter.py create mode 100644 python/tests/test_jax.py diff --git a/.gitignore b/.gitignore index 6b0a18901b..a9902f30e6 100644 --- a/.gitignore +++ b/.gitignore @@ -137,6 +137,7 @@ tests/test/* */tests/explicit_amici/* */tests/fixed_initial_amici/* */tests/localfunc_amici/* +*/tests/conversion/* tests/cpp/writeResults.h5 tests/cpp/writeResults.h5.bak tests/sbml-test-suite/* diff --git a/python/amici/__init__.py b/python/amici/__init__.py index 46eac0cfb1..004709f0c9 100644 --- a/python/amici/__init__.py +++ b/python/amici/__init__.py @@ -119,6 +119,7 @@ def _imported_from_setup() -> bool: # These modules don't require the swig interface from .sbml_import import SbmlImporter, assignmentRules2observables from .ode_export import ODEModel, ODEExporter + from .jax import JAXModel from typing import Protocol @@ -129,6 +130,9 @@ class ModelModule(Protocol): def getModel(self) -> amici.Model: pass + def get_jax_model(self) -> JAXModel: + pass + class add_path: """Context manager for temporarily changing PYTHONPATH""" diff --git a/python/amici/__init__.template.py b/python/amici/__init__.template.py index 9fbab85003..85c4f9c69b 100644 --- a/python/amici/__init__.template.py +++ b/python/amici/__init__.template.py @@ -1,6 +1,7 @@ """AMICI-generated module for model TPL_MODELNAME""" import amici +from amici.jax import JAXModel from pathlib import Path # Ensure we are binary-compatible, see #556 @@ -15,5 +16,11 @@ ) from TPL_MODELNAME._TPL_MODELNAME import * +from TPL_MODELNAME.jax import JAXModel_TPL_MODELNAME + + +def get_jax_model() -> JAXModel: + return JAXModel_TPL_MODELNAME() + __version__ = 'TPL_PACKAGE_VERSION' diff --git a/python/amici/jax.template.py b/python/amici/jax.template.py new file mode 100644 index 0000000000..4a60258008 --- /dev/null +++ b/python/amici/jax.template.py @@ -0,0 +1,71 @@ +import jax.numpy as jnp + +from amici.jax import JAXModel + + +class JAXModel_TPL_MODEL_NAME(JAXModel): + def __init__(self): + super().__init__() + + def xdot(self, t, x, args): + + p, k, tcl = args + + TPL_X_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + TPL_TCL_SYMS = tcl + TPL_W_SYMS = self._w(x, p, k, tcl) + +TPL_XDOT_EQ + + return TPL_XDOT_RET + + def _w(self, x, p, k, tcl): + + TPL_X_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + TPL_TCL_SYMS = tcl + +TPL_W_EQ + + return TPL_W_RET + + def x0(self, p, k): + + TPL_P_SYMS = p + TPL_K_SYMS = k + +TPL_X0_EQ + + return TPL_X0_RET + + def y(self, x, p, k, tcl): + + TPL_X_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + TPL_W_SYMS = self._w(x, p, k, tcl) + +TPL_Y_EQ + + return TPL_Y_RET + + def sigmay(self, y, p, k): + TPL_Y_SYMS = y + TPL_P_SYMS = p + TPL_K_SYMS = k + +TPL_SIGMAY_EQ + + return TPL_SIGMAY_RET + + def Jy(self, y, my, sigmay): + TPL_Y_SYMS = y + TPL_MY_SYMS = my + TPL_SIGMAY_SYMS = sigmay + +TPL_JY_EQ + + return TPL_JY_RET diff --git a/python/amici/jaxcodeprinter.py b/python/amici/jaxcodeprinter.py new file mode 100644 index 0000000000..0f96153423 --- /dev/null +++ b/python/amici/jaxcodeprinter.py @@ -0,0 +1,49 @@ +"""Jax code generation""" +import re +from typing import List, Optional, Union, Iterable + +import sympy as sp +from sympy.printing.numpy import NumPyPrinter + + +class AmiciJaxCodePrinter(NumPyPrinter): + """JAX code printer""" + + def doprint(self, expr: sp.Expr, assign_to: Optional[str] = None) -> str: + try: + code = super().doprint(expr, assign_to) + code = re.sub(r'numpy\.', r'jnp.', code) + + return code + except TypeError as e: + raise ValueError( + f'Encountered unsupported function in expression "{expr}"' + ) from e + + def _get_sym_lines( + self, + symbols: Union[Iterable[str], sp.Matrix], + equations: sp.Matrix, + indent_level: int + ) -> List[str]: + """ + Generate C++ code for assigning symbolic terms in symbols to C++ array + `variable`. + + :param equations: + vectors of symbolic expressions + + :param symbols: + names of the symbols to assign to + + :param indent_level: + indentation level (number of leading blanks) + + :return: + C++ code as list of lines + """ + indent = ' ' * indent_level + return [ + f'{indent}{s} = {self.doprint(e)}' + for s, e in zip(symbols, equations) + ] diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index ff725f3cae..f5883bb357 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -33,6 +33,7 @@ amiciSwigPath, sbml_import) from .constants import SymbolId from .cxxcodeprinter import AmiciCxxCodePrinter, get_switch_statement +from .jaxcodeprinter import AmiciJaxCodePrinter from .import_utils import (ObservableTransformation, generate_flux_symbol, smart_subs_dict, strip_pysb, symbol_with_assumptions, toposort_symbols) @@ -699,7 +700,10 @@ class ODEModel: whether all observables have a gaussian noise model, i.e. whether res and FIM make sense. - :ivar _code_printer: + :ivar _code_printer_jax: + Code printer to generate JAX code + + :ivar _code_printer_cpp: Code printer to generate C++ code :ivar _z2event: @@ -829,9 +833,10 @@ def cached_simplify( self._has_quadratic_nllh: bool = True set_log_level(logger, verbose) - self._code_printer = AmiciCxxCodePrinter() + self._code_printer_cpp = AmiciCxxCodePrinter() + self._code_printer_jax = AmiciJaxCodePrinter() for fun in CUSTOM_FUNCTIONS: - self._code_printer.known_functions[fun['sympy']] = fun['c++'] + self._code_printer_cpp.known_functions[fun['sympy']] = fun['c++'] @log_execution_time('importing SbmlImporter', logger) def import_from_sbml_importer( @@ -1519,7 +1524,7 @@ def _generate_sparse_symbol(self, name: str) -> None: for iy in range(self.num_obs()): symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ - sparse_matrix = self._code_printer.csc_matrix( + sparse_matrix = self._code_printer_cpp.csc_matrix( matrix[iy, :], rownames=rownames, colnames=colnames, identifier=iy) self._colptrs[name].append(symbol_col_ptrs) @@ -1529,7 +1534,7 @@ def _generate_sparse_symbol(self, name: str) -> None: self._syms[name].append(sparse_matrix) else: symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ - sparse_matrix = self._code_printer.csc_matrix( + sparse_matrix = self._code_printer_cpp.csc_matrix( matrix, rownames=rownames, colnames=colnames, pattern_only=name in nobody_functions ) @@ -2497,6 +2502,7 @@ def generate_model_code(self) -> None: _custom_pow_eval_derivative): self._prepare_model_folder() + self._generate_jax_code() self._generate_c_code() self._generate_m_code() @@ -2520,6 +2526,73 @@ def _prepare_model_folder(self) -> None: if os.path.isfile(file_path): os.remove(file_path) + @log_execution_time('generating jax code', logger) + def _generate_jax_code(self) -> None: + + eq_names = {'xdot', 'w', 'x0', 'y', 'sigmay', 'Jy'} + sym_names = {'p', 'k', 'x', 'tcl', 'w', 'my', 'y', 'sigmay'} + + indent = 8 + + def jnp_stack_str(array) -> str: + elems = ', '.join(str(x) for x in array) + + if not elems: + return 'jnp.empty((1,))' + + # scalar + if ',' not in elems: + elems += ', ' + + return f'jnp.stack(({elems}), axis=-1)' + + tpl_data = { + **{ + f'{eq_name.upper()}_EQ': '\n'.join( + self.model._code_printer_jax._get_sym_lines( + (str(strip_pysb(s)) for s in self.model.sym(eq_name)), + self.model.eq(eq_name), + indent + ) + ) + for eq_name in eq_names + }, + **{ + f'{eq_name.upper()}_RET': jnp_stack_str( + strip_pysb(s) for s in self.model.sym(eq_name) + ) if eq_name is not 'Jy' + else '0 + ' + ' + '.join( + str(s) for s in self.model.sym(eq_name) + ) + for eq_name in eq_names + }, + **{ + f'{sym_name.upper()}_SYMS': ', '.join( + (str(strip_pysb(s)) for s in self.model.sym(sym_name)) + ) + if self.model.sym(sym_name) else '_' + for sym_name in sym_names + }, + **{ + 'MODEL_NAME': self.model_name, + 'NTCL': self.model.num_cons_law(), + 'PAR_VALS': jnp_stack_str( + p.get_val() for p in self.model._parameters + ), + 'CONST_VALS': jnp_stack_str( + k.get_val() for k in self.model._constants + ), + } + } + os.makedirs(os.path.join(self.model_path, self.model_name), + exist_ok=True) + + apply_template( + os.path.join(amiciModulePath, 'jax.template.py'), + os.path.join(self.model_path, self.model_name, f'jax.py'), + tpl_data + ) + def _generate_c_code(self) -> None: """ Create C++ code files for the model based on @@ -2947,7 +3020,7 @@ def _get_function_body( f'reinitialization_state_idxs.cend(), {index}) != ' 'reinitialization_state_idxs.cend())', f' {function}[{index}] = ' - f'{self.model._code_printer.doprint(formula)};' + f'{self.model._code_printer_cpp.doprint(formula)};' ]) cases[ipar] = expressions lines.extend(get_switch_statement('ip', cases, 1)) @@ -2962,12 +3035,12 @@ def _get_function_body( f'reinitialization_state_idxs.cend(), {index}) != ' 'reinitialization_state_idxs.cend())\n ' f'{function}[{index}] = ' - f'{self.model._code_printer.doprint(formula)};' + f'{self.model._code_printer_cpp.doprint(formula)};' ) elif function in event_functions: cases = { - ie: self.model._code_printer._get_sym_lines_array( + ie: self.model._code_printer_cpp._get_sym_lines_array( equations[ie], function, 0) for ie in range(self.model.num_events()) if not smart_is_zero_matrix(equations[ie]) @@ -2979,7 +3052,7 @@ def _get_function_body( for ie, inner_equations in enumerate(equations): inner_lines = [] inner_cases = { - ipar: self.model._code_printer._get_sym_lines_array( + ipar: self.model._code_printer_cpp._get_sym_lines_array( inner_equations[:, ipar], function, 0) for ipar in range(self.model.num_par()) if not smart_is_zero_matrix(inner_equations[:, ipar]) @@ -2992,7 +3065,7 @@ def _get_function_body( elif function in sensi_functions \ and equations.shape[1] == self.model.num_par(): cases = { - ipar: self.model._code_printer._get_sym_lines_array( + ipar: self.model._code_printer_cpp._get_sym_lines_array( equations[:, ipar], function, 0) for ipar in range(self.model.num_par()) if not smart_is_zero_matrix(equations[:, ipar]) @@ -3001,14 +3074,14 @@ def _get_function_body( elif function in multiobs_functions: if function == 'dJydy': cases = { - iobs: self.model._code_printer._get_sym_lines_array( + iobs: self.model._code_printer_cpp._get_sym_lines_array( equations[iobs], function, 0) for iobs in range(self.model.num_obs()) if not smart_is_zero_matrix(equations[iobs]) } else: cases = { - iobs: self.model._code_printer._get_sym_lines_array( + iobs: self.model._code_printer_cpp._get_sym_lines_array( equations[:, iobs], function, 0) for iobs in range(equations.shape[1]) if not smart_is_zero_matrix(equations[:, iobs]) @@ -3025,11 +3098,11 @@ def _get_function_body( symbols = self.model.sparsesym(function) else: symbols = self.model.sym(function, stripped=True) - lines += self.model._code_printer._get_sym_lines_symbols( + lines += self.model._code_printer_cpp._get_sym_lines_symbols( symbols, equations, function, 4) else: - lines += self.model._code_printer._get_sym_lines_array( + lines += self.model._code_printer_cpp._get_sym_lines_array( equations, function, 4) return [line for line in lines if line] @@ -3105,9 +3178,9 @@ def _write_model_header_cpp(self) -> None: 'NK': str(self.model.num_const()), 'O2MODE': 'amici::SecondOrderMode::none', # using cxxcode ensures proper handling of nan/inf - 'PARAMETERS': self.model._code_printer.doprint( + 'PARAMETERS': self.model._code_printer_cpp.doprint( self.model.val('p'))[1:-1], - 'FIXED_PARAMETERS': self.model._code_printer.doprint( + 'FIXED_PARAMETERS': self.model._code_printer_cpp.doprint( self.model.val('k'))[1:-1], 'PARAMETER_NAMES_INITIALIZER_LIST': self._get_symbol_name_initializer_list('p'), diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py new file mode 100644 index 0000000000..7adbed4148 --- /dev/null +++ b/python/sdist/amici/jax.py @@ -0,0 +1,127 @@ +from abc import abstractmethod +from dataclasses import dataclass + +import diffrax +import jax.numpy as jnp +import numpy as np +import equinox as eqx +import functools as ft + +import amici + + +class JAXModel(object): + @abstractmethod + def xdot(self, t, x, args): + ... + + @abstractmethod + def _w(self, x, p, k, tcl): + ... + + @abstractmethod + def x0(self, p, k): + ... + + @abstractmethod + def y(self, x, p, k, tcl): + ... + + @abstractmethod + def sigmay(self, y, p, k): + ... + + @abstractmethod + def Jy(self, y, my, sigmay): + ... + + def get_solver(self): + return JAXSolver(model=self) + + +class JAXSolver(object): + def __init__(self, model: JAXModel): + self.model: JAXModel = model + self.solver: diffrax.AbstractSolver = diffrax.Tsit5() + self.atol: float = 1e-8 + self.rtol: float = 1e-8 + self.sensi_mode: amici.SensitivityMethod = \ + amici.SensitivityMethod.adjoint + + def solve(self, ts, p, k): + y0 = self.model.x0(p, k) + tcl = 0 + sol = diffrax.diffeqsolve( + diffrax.ODETerm(self.model.xdot), + self.solver, + args=(p, k, tcl), + t0=ts[0], + t1=ts[-1], + dt0=ts[1] - ts[0], + y0=y0, + stepsize_controller=diffrax.PIDController( + rtol=self.rtol, + atol=self.atol + ), + saveat=diffrax.SaveAt(ts=ts) + ) + return sol + + def obs(self, sol, p, k, tcl): + return jnp.apply_along_axis( + lambda x: self.model.y(x, p, k, tcl), + axis=1, + arr=sol.ys + )[:, :, 0] + + def sigmay(self, obs, p, k): + return jnp.apply_along_axis( + lambda y: self.model.sigmay(y, p, k), + axis=1, + arr=obs + ) + + def loss(self, obs, sigmay, my): + return -jnp.sum(jnp.stack( + [self.model.Jy(obs[i, :], my[i, :], sigmay[i, :]) + for i in range(my.shape[0])] + )) + + +def runAmiciSimulationJAX(model: JAXModel, + solver: JAXSolver, + edata: amici.ExpData): + ts = jnp.asarray(edata.getTimepoints()) + p = jnp.asarray(edata.parameters) + k = jnp.asarray(edata.fixedParameters) + + tcl = 0 + + sol = solver.solve(ts, p, k) + obs = solver.obs(sol, p, k, tcl) + my = jnp.asarray(edata.getObservedData()).reshape(obs.shape) + sigmay = solver.sigmay(obs, p, k) + loss = solver.loss(obs, sigmay, my) + + return ReturnDataJAX( + x=sol.ys, + y=obs, + sigmay=sigmay, + llh=loss, + ) + + +@dataclass +class ReturnDataJAX(dict): + x: np.array = None + sx: np.array = None + y: np.array = None + sy: np.array = None + sigmay: np.array = None + ssigmay: np.array = None + llh: np.array = None + sllh: np.array = None + + def __init__(self, *args, **kwargs): + super(ReturnDataJAX, self).__init__(*args, **kwargs) + self.__dict__ = self diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py new file mode 120000 index 0000000000..26e8aef02f --- /dev/null +++ b/python/sdist/amici/jax.template.py @@ -0,0 +1 @@ +../../amici/jax.template.py \ No newline at end of file diff --git a/python/sdist/amici/jaxcodeprinter.py b/python/sdist/amici/jaxcodeprinter.py new file mode 120000 index 0000000000..d4f2655649 --- /dev/null +++ b/python/sdist/amici/jaxcodeprinter.py @@ -0,0 +1 @@ +../../amici/jaxcodeprinter.py \ No newline at end of file diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py new file mode 100644 index 0000000000..e327539875 --- /dev/null +++ b/python/tests/test_jax.py @@ -0,0 +1,70 @@ +import pytest +import amici +import amici.jax + +import jax.numpy as jnp +import numpy as np + +from amici.pysb_import import pysb2amici +from numpy.testing import assert_allclose + +pysb = pytest.importorskip("pysb") + + +def test_simulation(): + pysb.SelfExporter.cleanup() # reset pysb + pysb.SelfExporter.do_export = True + + model = pysb.Model('conversion') + a = pysb.Monomer('A') + b = pysb.Monomer('B') + pysb.Initial(a(), pysb.Parameter('a0', 1.2)) + pysb.Rule( + 'conv', + a() >> b(), pysb.Parameter('kcat', 0.05) + ) + pysb.Observable('b', b()) + + outdir = model.name + pysb2amici(model, outdir, verbose=True, + observables=['b']) + + model_module = amici.import_model_module(module_name=model.name, + module_path=outdir) + + amici_model = model_module.getModel() + + ts = jnp.linspace(0, 1, 10) + amici_model.setTimepoints(np.asarray(ts, dtype=np.float64)) + sol_amici_ref = amici.runAmiciSimulation(amici_model, amici_model.getSolver()) + + jax_model = model_module.get_jax_model() + jax_solver = jax_model.get_solver() + + p = jnp.stack((1.0, 0.1), axis=-1) + k = jnp.empty((0,)) + + amici_model.setParameters(np.asarray(p, dtype=np.float64)) + amici_model.setFixedParameters(np.asarray(k, dtype=np.float64)) + edata = amici.ExpData(sol_amici_ref, 1.0, 1.0) + edata.parameters = amici_model.getParameters() + edata.fixedParameters = amici_model.getFixedParameters() + edata.pscale = amici_model.getParameterScale() + r_amici = amici.runAmiciSimulation( + amici_model, + amici_model.getSolver(), + edata + ) + + r_jax = amici.jax.runAmiciSimulationJAX( + jax_model, + jax_solver, + edata + ) + for field in ['x', 'y', 'sigmay', 'llh']: + assert_allclose( + actual=r_amici[field], + desired=r_jax[field], + atol=1e-6, + rtol=1e-6 + ) From d4f8552435edb8a8e2c50490fa15cc9cf3119780 Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 14:07:18 +0200 Subject: [PATCH 02/80] add dimerization example, add second order code, refactor jit --- .gitignore | 1 + python/sdist/amici/jax.py | 107 +++++++++++++++++++++++++++----------- python/tests/test_jax.py | 78 ++++++++++++++++++++++++--- 3 files changed, 148 insertions(+), 38 deletions(-) diff --git a/.gitignore b/.gitignore index a9902f30e6..bad2a48d0f 100644 --- a/.gitignore +++ b/.gitignore @@ -138,6 +138,7 @@ tests/test/* */tests/fixed_initial_amici/* */tests/localfunc_amici/* */tests/conversion/* +*/tests/dimerization/* tests/cpp/writeResults.h5 tests/cpp/writeResults.h5.bak tests/sbml-test-suite/* diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 7adbed4148..802683969a 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -2,15 +2,16 @@ from dataclasses import dataclass import diffrax +import equinox as eqx import jax.numpy as jnp import numpy as np -import equinox as eqx -import functools as ft +import jax +from functools import partial import amici -class JAXModel(object): +class JAXModel(eqx.Module): @abstractmethod def xdot(self, t, x, args): ... @@ -47,8 +48,10 @@ def __init__(self, model: JAXModel): self.rtol: float = 1e-8 self.sensi_mode: amici.SensitivityMethod = \ amici.SensitivityMethod.adjoint + self.sensi_order: amici.SensitivityOrder = \ + amici.SensitivityOrder.none - def solve(self, ts, p, k): + def _solve(self, ts, p, k): y0 = self.model.x0(p, k) tcl = 0 sol = diffrax.diffeqsolve( @@ -65,50 +68,94 @@ def solve(self, ts, p, k): ), saveat=diffrax.SaveAt(ts=ts) ) - return sol + return sol.ys - def obs(self, sol, p, k, tcl): - return jnp.apply_along_axis( + def _obs(self, x, p, k, tcl): + y = jnp.apply_along_axis( lambda x: self.model.y(x, p, k, tcl), axis=1, - arr=sol.ys - )[:, :, 0] + arr=x + ) + return y - def sigmay(self, obs, p, k): - return jnp.apply_along_axis( + def _sigmay(self, obs, p, k): + sigmay = jnp.apply_along_axis( lambda y: self.model.sigmay(y, p, k), axis=1, arr=obs ) + return sigmay - def loss(self, obs, sigmay, my): - return -jnp.sum(jnp.stack( + def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): + llh = - jnp.sum(jnp.stack( [self.model.Jy(obs[i, :], my[i, :], sigmay[i, :]) for i in range(my.shape[0])] )) + return llh + + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my')) + def run(self, + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple): + x = self._solve(ts, p, k) + tcl = 0 + obs = self._obs(x, p, k, tcl) + my_r = np.asarray(my).reshape(obs.shape) + sigmay = self._sigmay(obs, p, k) + llh = self._loss(obs, sigmay, my_r) + return llh, (x, obs) + + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my')) + def srun(self, + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple): + (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( + ts, p, k, my + ) + return llh, sllh, (x, obs) + + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my')) + def s2run(self, + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple): + (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( + ts, p, k, my + ) + s2llh, (x, obs) = jax.jacfwd(jax.grad(self.run, 1, True), 1, True)( + ts, p, k, my + ) + return llh, sllh, s2llh, (x, obs) def runAmiciSimulationJAX(model: JAXModel, solver: JAXSolver, edata: amici.ExpData): - ts = jnp.asarray(edata.getTimepoints()) + ts = tuple(edata.getTimepoints()) p = jnp.asarray(edata.parameters) - k = jnp.asarray(edata.fixedParameters) - - tcl = 0 - - sol = solver.solve(ts, p, k) - obs = solver.obs(sol, p, k, tcl) - my = jnp.asarray(edata.getObservedData()).reshape(obs.shape) - sigmay = solver.sigmay(obs, p, k) - loss = solver.loss(obs, sigmay, my) - - return ReturnDataJAX( - x=sol.ys, - y=obs, - sigmay=sigmay, - llh=loss, - ) + k = tuple(edata.fixedParameters) + my = tuple(edata.getObservedData()) + + rdata_kwargs = dict() + + if solver.sensi_order == amici.SensitivityOrder.none: + rdata_kwargs['llh'], (rdata_kwargs['x'], rdata_kwargs['y']) = \ + solver.run(ts, p, k, my) + elif solver.sensi_order == amici.SensitivityOrder.first: + rdata_kwargs['llh'], rdata_kwargs['sllh'], ( + rdata_kwargs['x'], rdata_kwargs['y'] + ) = solver.srun(ts, p, k, my) + elif solver.sensi_order == amici.SensitivityOrder.second: + rdata_kwargs['llh'], rdata_kwargs['sllh'], rdata_kwargs['s2llh'], ( + rdata_kwargs['x'], rdata_kwargs['y'] + ) = solver.s2run(ts, p, k, my) + + return ReturnDataJAX(**rdata_kwargs) @dataclass diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index e327539875..554df949db 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -11,7 +11,7 @@ pysb = pytest.importorskip("pysb") -def test_simulation(): +def test_conversion(): pysb.SelfExporter.cleanup() # reset pysb pysb.SelfExporter.do_export = True @@ -32,36 +32,98 @@ def test_simulation(): model_module = amici.import_model_module(module_name=model.name, module_path=outdir) + ts = tuple(np.linspace(0, 1, 10)) + p = jnp.stack((1.0, 0.1), axis=-1) + k = tuple() + _test_model(model_module, ts, p, k) + + +def test_dimerization(): + pysb.SelfExporter.cleanup() # reset pysb + pysb.SelfExporter.do_export = True + + model = pysb.Model('dimerization') + a = pysb.Monomer('A', sites=['b']) + b = pysb.Monomer('B', sites=['a']) + + pysb.Rule('turnover_a', + a(b=None) | None, + pysb.Parameter('kdeg_a', 10), + pysb.Parameter('ksyn_a', 0.1)) + pysb.Rule('turnover_b', + b(a=None) | None, + pysb.Parameter('kdeg_b', 0.1), + pysb.Parameter('ksyn_b', 10)) + pysb.Rule('dimer', + a(b=None) + b(a=None) | a(b=1) % b(a=1), + pysb.Parameter('kon', 1.0), + pysb.Parameter('koff', 0.1)) + + pysb.Observable('a_obs', a()) + pysb.Observable('b_obs', b()) + + outdir = model.name + pysb2amici(model, outdir, verbose=True, + observables=['a_obs', 'b_obs'], + constant_parameters=['ksyn_a', 'ksyn_b']) + + model_module = amici.import_model_module(module_name=model.name, + module_path=outdir) + + ts = tuple(np.linspace(0, 1, 10)) + p = jnp.stack((5, 0.5, 0.5, 0.5), axis=-1) + k = (0.5, 5) + _test_model(model_module, ts, p, k) + + +def _test_model(model_module, ts, p, k): amici_model = model_module.getModel() - ts = jnp.linspace(0, 1, 10) amici_model.setTimepoints(np.asarray(ts, dtype=np.float64)) - sol_amici_ref = amici.runAmiciSimulation(amici_model, amici_model.getSolver()) + sol_amici_ref = amici.runAmiciSimulation(amici_model, + amici_model.getSolver()) jax_model = model_module.get_jax_model() jax_solver = jax_model.get_solver() - p = jnp.stack((1.0, 0.1), axis=-1) - k = jnp.empty((0,)) - amici_model.setParameters(np.asarray(p, dtype=np.float64)) amici_model.setFixedParameters(np.asarray(k, dtype=np.float64)) edata = amici.ExpData(sol_amici_ref, 1.0, 1.0) edata.parameters = amici_model.getParameters() edata.fixedParameters = amici_model.getFixedParameters() edata.pscale = amici_model.getParameterScale() + amici_solver = amici_model.getSolver() + amici_solver.setSensitivityMethod(amici.SensitivityMethod.forward) + amici_solver.setSensitivityOrder(amici.SensitivityOrder.first) r_amici = amici.runAmiciSimulation( amici_model, - amici_model.getSolver(), + amici_solver, edata ) + check_fields_jax(r_amici, jax_model, jax_solver, edata, + ['x', 'y', 'llh']) + + jax_solver.sensi_order = amici.SensitivityOrder.first + check_fields_jax(r_amici, jax_model, jax_solver, edata, + ['x', 'y', 'llh', 'sllh']) + + jax_solver.sensi_order = amici.SensitivityOrder.second + check_fields_jax(r_amici, jax_model, jax_solver, edata, + ['x', 'y', 'llh', 'sllh']) + + +def check_fields_jax(r_amici, + jax_model, + jax_solver, + edata, + fields): r_jax = amici.jax.runAmiciSimulationJAX( jax_model, jax_solver, edata ) - for field in ['x', 'y', 'sigmay', 'llh']: + for field in fields: assert_allclose( actual=r_amici[field], desired=r_jax[field], From d37a85076af8a8a469bd84522ddb537fcb0c6395 Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 14:16:57 +0200 Subject: [PATCH 03/80] remove equinox dependency, list dependencies --- python/sdist/amici/jax.py | 3 +-- python/sdist/setup.cfg | 3 +++ scripts/installAmiciSource.sh | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 802683969a..47d9d71c5a 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -2,7 +2,6 @@ from dataclasses import dataclass import diffrax -import equinox as eqx import jax.numpy as jnp import numpy as np import jax @@ -11,7 +10,7 @@ import amici -class JAXModel(eqx.Module): +class JAXModel(object): @abstractmethod def xdot(self, t, x, args): ... diff --git a/python/sdist/setup.cfg b/python/sdist/setup.cfg index 35b0796925..64faa93bb2 100644 --- a/python/sdist/setup.cfg +++ b/python/sdist/setup.cfg @@ -45,6 +45,9 @@ zip_safe = False [options.extras_require] petab = petab>=0.1.27 pysb = pysb>=1.13.1 +jax = + jax + diffrax test = pytest pytest-cov diff --git a/scripts/installAmiciSource.sh b/scripts/installAmiciSource.sh index ddc7a57ef5..26da5855e7 100755 --- a/scripts/installAmiciSource.sh +++ b/scripts/installAmiciSource.sh @@ -28,7 +28,8 @@ else fi pip install --upgrade pip pkgconfig scipy matplotlib coverage pytest pytest-cov +pip install jax[cpu] # need to install CPU version of jax pip install git+https://github.com/pysb/pysb # pin to develop to fix sympy compatibility pip install -U "setuptools<64" -pip install --verbose -e ${AMICI_PATH}/python/sdist[petab,test] --no-build-isolation +pip install --verbose -e ${AMICI_PATH}/python/sdist[petab,test,jax] --no-build-isolation deactivate From ff37c7ee5935ef02405d15493b812b5f40de089d Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 14:55:09 +0200 Subject: [PATCH 04/80] make jax optional --- python/amici/__init__.py | 5 ++- python/amici/__init__.template.py | 11 ++++--- python/sdist/amici/jax.py | 53 ++++++++++++++++++++----------- 3 files changed, 45 insertions(+), 24 deletions(-) diff --git a/python/amici/__init__.py b/python/amici/__init__.py index 004709f0c9..2fcd97e03c 100644 --- a/python/amici/__init__.py +++ b/python/amici/__init__.py @@ -119,7 +119,10 @@ def _imported_from_setup() -> bool: # These modules don't require the swig interface from .sbml_import import SbmlImporter, assignmentRules2observables from .ode_export import ODEModel, ODEExporter - from .jax import JAXModel + try: + from .jax import JAXModel + except (ImportError, ModuleNotFoundError): + JAXModel = object from typing import Protocol diff --git a/python/amici/__init__.template.py b/python/amici/__init__.template.py index 85c4f9c69b..356231116d 100644 --- a/python/amici/__init__.template.py +++ b/python/amici/__init__.template.py @@ -16,11 +16,12 @@ ) from TPL_MODELNAME._TPL_MODELNAME import * -from TPL_MODELNAME.jax import JAXModel_TPL_MODELNAME - - -def get_jax_model() -> JAXModel: - return JAXModel_TPL_MODELNAME() +try: + from TPL_MODELNAME.jax import JAXModel_TPL_MODELNAME + def get_jax_model() -> JAXModel: + return JAXModel_TPL_MODELNAME() +except (ModuleNotFoundError, ImportError): + pass __version__ = 'TPL_PACKAGE_VERSION' diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 47d9d71c5a..78c7b330c1 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -11,6 +11,12 @@ class JAXModel(object): + _unscale_funs = { + amici.ParameterScaling.none: lambda x: x, + amici.ParameterScaling.ln: lambda x: jnp.exp(x), + amici.ParameterScaling.log10: lambda x: jnp.power(10, x) + } + @abstractmethod def xdot(self, t, x, args): ... @@ -35,6 +41,12 @@ def sigmay(self, y, p, k): def Jy(self, y, my, sigmay): ... + def unscale_p(self, p, pscale): + return jnp.stack([ + self._unscale_funs[pscale_i](p_i) + for p_i, pscale_i in zip(p, pscale) + ]) + def get_solver(self): return JAXSolver(model=self) @@ -92,42 +104,46 @@ def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): )) return llh - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my')) + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) def run(self, ts: tuple, p: jnp.ndarray, k: tuple, - my: tuple): - x = self._solve(ts, p, k) + my: tuple, + pscale: tuple): + ps = self.model.unscale_p(p, pscale) + x = self._solve(ts, ps, k) tcl = 0 - obs = self._obs(x, p, k, tcl) + obs = self._obs(x, ps, k, tcl) my_r = np.asarray(my).reshape(obs.shape) - sigmay = self._sigmay(obs, p, k) + sigmay = self._sigmay(obs, ps, k) llh = self._loss(obs, sigmay, my_r) return llh, (x, obs) - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my')) + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) def srun(self, ts: tuple, p: jnp.ndarray, k: tuple, - my: tuple): + my: tuple, + pscale: tuple): (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( - ts, p, k, my + ts, p, k, my, pscale ) return llh, sllh, (x, obs) - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my')) + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) def s2run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple): + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple, + pscale: tuple): (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( - ts, p, k, my + ts, p, k, my, pscale ) s2llh, (x, obs) = jax.jacfwd(jax.grad(self.run, 1, True), 1, True)( - ts, p, k, my + ts, p, k, my, pscale ) return llh, sllh, s2llh, (x, obs) @@ -139,20 +155,21 @@ def runAmiciSimulationJAX(model: JAXModel, p = jnp.asarray(edata.parameters) k = tuple(edata.fixedParameters) my = tuple(edata.getObservedData()) + pscale = tuple(edata.pscale) rdata_kwargs = dict() if solver.sensi_order == amici.SensitivityOrder.none: rdata_kwargs['llh'], (rdata_kwargs['x'], rdata_kwargs['y']) = \ - solver.run(ts, p, k, my) + solver.run(ts, p, k, my, pscale) elif solver.sensi_order == amici.SensitivityOrder.first: rdata_kwargs['llh'], rdata_kwargs['sllh'], ( rdata_kwargs['x'], rdata_kwargs['y'] - ) = solver.srun(ts, p, k, my) + ) = solver.srun(ts, p, k, my, pscale) elif solver.sensi_order == amici.SensitivityOrder.second: rdata_kwargs['llh'], rdata_kwargs['sllh'], rdata_kwargs['s2llh'], ( rdata_kwargs['x'], rdata_kwargs['y'] - ) = solver.s2run(ts, p, k, my) + ) = solver.s2run(ts, p, k, my, pscale) return ReturnDataJAX(**rdata_kwargs) From 7cd8553879ece20bb6609b5e319bfc9fadfe6ccb Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 16:24:32 +0200 Subject: [PATCH 05/80] support conservation laws --- python/amici/jax.py | 209 +++++++++++++++++++++++++++++++++++ python/amici/jax.template.py | 27 +++++ python/amici/ode_export.py | 6 +- python/sdist/amici/jax.py | 191 +------------------------------- python/tests/test_jax.py | 11 +- 5 files changed, 246 insertions(+), 198 deletions(-) create mode 100644 python/amici/jax.py mode change 100644 => 120000 python/sdist/amici/jax.py diff --git a/python/amici/jax.py b/python/amici/jax.py new file mode 100644 index 0000000000..08f8b93bb1 --- /dev/null +++ b/python/amici/jax.py @@ -0,0 +1,209 @@ +from abc import abstractmethod +from dataclasses import dataclass + +import diffrax +import jax.numpy as jnp +import numpy as np +import jax +from functools import partial + +import amici + + +class JAXModel(object): + _unscale_funs = { + amici.ParameterScaling.none: lambda x: x, + amici.ParameterScaling.ln: lambda x: jnp.exp(x), + amici.ParameterScaling.log10: lambda x: jnp.power(10, x) + } + + @abstractmethod + def xdot(self, t, x, args): + ... + + @abstractmethod + def _w(self, x, p, k, tcl): + ... + + @abstractmethod + def x0(self, p, k): + ... + + @abstractmethod + def x_solver(self, x): + ... + + @abstractmethod + def x_rdata(self, x, tcl): + ... + + @abstractmethod + def tcl(self, x, p, k): + ... + + @abstractmethod + def y(self, x, p, k, tcl): + ... + + @abstractmethod + def sigmay(self, y, p, k): + ... + + @abstractmethod + def Jy(self, y, my, sigmay): + ... + + def unscale_p(self, p, pscale): + return jnp.stack([ + self._unscale_funs[pscale_i](p_i) + for p_i, pscale_i in zip(p, pscale) + ]) + + def get_solver(self): + return JAXSolver(model=self) + + +class JAXSolver(object): + def __init__(self, model: JAXModel): + self.model: JAXModel = model + self.solver: diffrax.AbstractSolver = diffrax.Tsit5() + self.atol: float = 1e-8 + self.rtol: float = 1e-8 + self.sensi_mode: amici.SensitivityMethod = \ + amici.SensitivityMethod.adjoint + self.sensi_order: amici.SensitivityOrder = \ + amici.SensitivityOrder.none + + def _solve(self, ts, p, k): + x0 = self.model.x0(p, k) + tcl = self.model.tcl(x0, p, k) + sol = diffrax.diffeqsolve( + diffrax.ODETerm(self.model.xdot), + self.solver, + args=(p, k, tcl), + t0=ts[0], + t1=ts[-1], + dt0=ts[1] - ts[0], + y0=self.model.x_solver(x0), + stepsize_controller=diffrax.PIDController( + rtol=self.rtol, + atol=self.atol + ), + saveat=diffrax.SaveAt(ts=ts) + ) + return sol.ys, tcl + + def _obs(self, x, p, k, tcl): + y = jnp.apply_along_axis( + lambda x: self.model.y(x, p, k, tcl), + axis=1, + arr=x + ) + return y + + def _sigmay(self, obs, p, k): + sigmay = jnp.apply_along_axis( + lambda y: self.model.sigmay(y, p, k), + axis=1, + arr=obs + ) + return sigmay + + def _x_rdata(self, x, tcl): + return jnp.apply_along_axis( + lambda y: self.model.x_rdata(x, tcl), + axis=1, + arr=x + ) + + def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): + llh = - jnp.sum(jnp.stack( + [self.model.Jy(obs[i, :], my[i, :], sigmay[i, :]) + for i in range(my.shape[0])] + )) + return llh + + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) + def run(self, + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple, + pscale: tuple): + ps = self.model.unscale_p(p, pscale) + x, tcl = self._solve(ts, ps, k) + obs = self._obs(x, ps, k, tcl) + my_r = np.asarray(my).reshape(obs.shape) + sigmay = self._sigmay(obs, ps, k) + llh = self._loss(obs, sigmay, my_r) + x_rdata = self._x_rdata(x, tcl) + return llh, (x_rdata, obs) + + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) + def srun(self, + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple, + pscale: tuple): + (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( + ts, p, k, my, pscale + ) + return llh, sllh, (x, obs) + + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) + def s2run(self, + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple, + pscale: tuple): + (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( + ts, p, k, my, pscale + ) + s2llh, (x, obs) = jax.jacfwd(jax.grad(self.run, 1, True), 1, True)( + ts, p, k, my, pscale + ) + return llh, sllh, s2llh, (x, obs) + + +def runAmiciSimulationJAX(model: JAXModel, + solver: JAXSolver, + edata: amici.ExpData): + ts = tuple(edata.getTimepoints()) + p = jnp.asarray(edata.parameters) + k = tuple(edata.fixedParameters) + my = tuple(edata.getObservedData()) + pscale = tuple(edata.pscale) + + rdata_kwargs = dict() + + if solver.sensi_order == amici.SensitivityOrder.none: + rdata_kwargs['llh'], (rdata_kwargs['x'], rdata_kwargs['y']) = \ + solver.run(ts, p, k, my, pscale) + elif solver.sensi_order == amici.SensitivityOrder.first: + rdata_kwargs['llh'], rdata_kwargs['sllh'], ( + rdata_kwargs['x'], rdata_kwargs['y'] + ) = solver.srun(ts, p, k, my, pscale) + elif solver.sensi_order == amici.SensitivityOrder.second: + rdata_kwargs['llh'], rdata_kwargs['sllh'], rdata_kwargs['s2llh'], ( + rdata_kwargs['x'], rdata_kwargs['y'] + ) = solver.s2run(ts, p, k, my, pscale) + + return ReturnDataJAX(**rdata_kwargs) + + +@dataclass +class ReturnDataJAX(dict): + x: np.array = None + sx: np.array = None + y: np.array = None + sy: np.array = None + sigmay: np.array = None + ssigmay: np.array = None + llh: np.array = None + sllh: np.array = None + + def __init__(self, *args, **kwargs): + super(ReturnDataJAX, self).__init__(*args, **kwargs) + self.__dict__ = self diff --git a/python/amici/jax.template.py b/python/amici/jax.template.py index 4a60258008..746d662540 100644 --- a/python/amici/jax.template.py +++ b/python/amici/jax.template.py @@ -41,6 +41,33 @@ def x0(self, p, k): return TPL_X0_RET + def x_solver(self, x): + + TPL_X_RDATA_SYMS = x + +TPL_X_SOLVER_EQ + + return TPL_X_SOLVER_RET + + def x_rdata(self, x, tcl): + + TPL_X_SYMS = x + TPL_TCL_SYMS = tcl + +TPL_X_RDATA_EQ + + return TPL_X_RDATA_RET + + def tcl(self, x, p, k): + + TPL_X_RDATA_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + +TPL_TOTAL_CL_EQ + + return TPL_TOTAL_CL_RET + def y(self, x, p, k, tcl): TPL_X_SYMS = x diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index c2a29ebf65..d241439825 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -2488,8 +2488,10 @@ def _prepare_model_folder(self) -> None: @log_execution_time('generating jax code', logger) def _generate_jax_code(self) -> None: - eq_names = {'xdot', 'w', 'x0', 'y', 'sigmay', 'Jy'} - sym_names = {'p', 'k', 'x', 'tcl', 'w', 'my', 'y', 'sigmay'} + eq_names = ('xdot', 'w', 'x0', 'y', 'sigmay', 'Jy', 'x_solver', + 'x_rdata', 'total_cl') + sym_names = ('p', 'k', 'x', 'tcl', 'w', 'my', 'y', 'sigmay', + 'x_rdata') indent = 8 diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py deleted file mode 100644 index 78c7b330c1..0000000000 --- a/python/sdist/amici/jax.py +++ /dev/null @@ -1,190 +0,0 @@ -from abc import abstractmethod -from dataclasses import dataclass - -import diffrax -import jax.numpy as jnp -import numpy as np -import jax -from functools import partial - -import amici - - -class JAXModel(object): - _unscale_funs = { - amici.ParameterScaling.none: lambda x: x, - amici.ParameterScaling.ln: lambda x: jnp.exp(x), - amici.ParameterScaling.log10: lambda x: jnp.power(10, x) - } - - @abstractmethod - def xdot(self, t, x, args): - ... - - @abstractmethod - def _w(self, x, p, k, tcl): - ... - - @abstractmethod - def x0(self, p, k): - ... - - @abstractmethod - def y(self, x, p, k, tcl): - ... - - @abstractmethod - def sigmay(self, y, p, k): - ... - - @abstractmethod - def Jy(self, y, my, sigmay): - ... - - def unscale_p(self, p, pscale): - return jnp.stack([ - self._unscale_funs[pscale_i](p_i) - for p_i, pscale_i in zip(p, pscale) - ]) - - def get_solver(self): - return JAXSolver(model=self) - - -class JAXSolver(object): - def __init__(self, model: JAXModel): - self.model: JAXModel = model - self.solver: diffrax.AbstractSolver = diffrax.Tsit5() - self.atol: float = 1e-8 - self.rtol: float = 1e-8 - self.sensi_mode: amici.SensitivityMethod = \ - amici.SensitivityMethod.adjoint - self.sensi_order: amici.SensitivityOrder = \ - amici.SensitivityOrder.none - - def _solve(self, ts, p, k): - y0 = self.model.x0(p, k) - tcl = 0 - sol = diffrax.diffeqsolve( - diffrax.ODETerm(self.model.xdot), - self.solver, - args=(p, k, tcl), - t0=ts[0], - t1=ts[-1], - dt0=ts[1] - ts[0], - y0=y0, - stepsize_controller=diffrax.PIDController( - rtol=self.rtol, - atol=self.atol - ), - saveat=diffrax.SaveAt(ts=ts) - ) - return sol.ys - - def _obs(self, x, p, k, tcl): - y = jnp.apply_along_axis( - lambda x: self.model.y(x, p, k, tcl), - axis=1, - arr=x - ) - return y - - def _sigmay(self, obs, p, k): - sigmay = jnp.apply_along_axis( - lambda y: self.model.sigmay(y, p, k), - axis=1, - arr=obs - ) - return sigmay - - def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): - llh = - jnp.sum(jnp.stack( - [self.model.Jy(obs[i, :], my[i, :], sigmay[i, :]) - for i in range(my.shape[0])] - )) - return llh - - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) - def run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): - ps = self.model.unscale_p(p, pscale) - x = self._solve(ts, ps, k) - tcl = 0 - obs = self._obs(x, ps, k, tcl) - my_r = np.asarray(my).reshape(obs.shape) - sigmay = self._sigmay(obs, ps, k) - llh = self._loss(obs, sigmay, my_r) - return llh, (x, obs) - - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) - def srun(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): - (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( - ts, p, k, my, pscale - ) - return llh, sllh, (x, obs) - - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) - def s2run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): - (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( - ts, p, k, my, pscale - ) - s2llh, (x, obs) = jax.jacfwd(jax.grad(self.run, 1, True), 1, True)( - ts, p, k, my, pscale - ) - return llh, sllh, s2llh, (x, obs) - - -def runAmiciSimulationJAX(model: JAXModel, - solver: JAXSolver, - edata: amici.ExpData): - ts = tuple(edata.getTimepoints()) - p = jnp.asarray(edata.parameters) - k = tuple(edata.fixedParameters) - my = tuple(edata.getObservedData()) - pscale = tuple(edata.pscale) - - rdata_kwargs = dict() - - if solver.sensi_order == amici.SensitivityOrder.none: - rdata_kwargs['llh'], (rdata_kwargs['x'], rdata_kwargs['y']) = \ - solver.run(ts, p, k, my, pscale) - elif solver.sensi_order == amici.SensitivityOrder.first: - rdata_kwargs['llh'], rdata_kwargs['sllh'], ( - rdata_kwargs['x'], rdata_kwargs['y'] - ) = solver.srun(ts, p, k, my, pscale) - elif solver.sensi_order == amici.SensitivityOrder.second: - rdata_kwargs['llh'], rdata_kwargs['sllh'], rdata_kwargs['s2llh'], ( - rdata_kwargs['x'], rdata_kwargs['y'] - ) = solver.s2run(ts, p, k, my, pscale) - - return ReturnDataJAX(**rdata_kwargs) - - -@dataclass -class ReturnDataJAX(dict): - x: np.array = None - sx: np.array = None - y: np.array = None - sy: np.array = None - sigmay: np.array = None - ssigmay: np.array = None - llh: np.array = None - sllh: np.array = None - - def __init__(self, *args, **kwargs): - super(ReturnDataJAX, self).__init__(*args, **kwargs) - self.__dict__ = self diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py new file mode 120000 index 0000000000..ab27c5e6d8 --- /dev/null +++ b/python/sdist/amici/jax.py @@ -0,0 +1 @@ +../../amici/jax.py \ No newline at end of file diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 554df949db..d690898824 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -16,18 +16,17 @@ def test_conversion(): pysb.SelfExporter.do_export = True model = pysb.Model('conversion') - a = pysb.Monomer('A') - b = pysb.Monomer('B') - pysb.Initial(a(), pysb.Parameter('a0', 1.2)) + a = pysb.Monomer('A', sites=['s'], site_states={'s': ['a', 'b']}) + pysb.Initial(a(s='a'), pysb.Parameter('aa0', 1.2)) pysb.Rule( 'conv', - a() >> b(), pysb.Parameter('kcat', 0.05) + a(s='a') >> a(s='b'), pysb.Parameter('kcat', 0.05) ) - pysb.Observable('b', b()) + pysb.Observable('ab', a(s='b')) outdir = model.name pysb2amici(model, outdir, verbose=True, - observables=['b']) + observables=['ab']) model_module = amici.import_model_module(module_name=model.name, module_path=outdir) From 5177ad75a496e3204ca4abe6fff16360a1881635 Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 16:31:38 +0200 Subject: [PATCH 06/80] fixup --- python/amici/ode_export.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index d241439825..2c88b0676d 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -2521,10 +2521,10 @@ def jnp_stack_str(array) -> str: **{ f'{eq_name.upper()}_RET': jnp_stack_str( strip_pysb(s) for s in self.model.sym(eq_name) - ) if eq_name is not 'Jy' - else '0 + ' + ' + '.join( + ) if eq_name != 'Jy' + else ' + '.join( str(s) for s in self.model.sym(eq_name) - ) + ) if self.model.sym(eq_name) else '0' for eq_name in eq_names }, **{ From 5612cfce25b9fa29db51d86447c7e011170a6f6a Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 16:36:18 +0200 Subject: [PATCH 07/80] fix jit nesting --- python/amici/jax.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/python/amici/jax.py b/python/amici/jax.py index 08f8b93bb1..28dc5c9569 100644 --- a/python/amici/jax.py +++ b/python/amici/jax.py @@ -123,8 +123,7 @@ def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): )) return llh - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) - def run(self, + def _run(self, ts: tuple, p: jnp.ndarray, k: tuple, @@ -139,6 +138,15 @@ def run(self, x_rdata = self._x_rdata(x, tcl) return llh, (x_rdata, obs) + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) + def run(self, + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple, + pscale: tuple): + return self._run(ts, p, k, my, pscale) + @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) def srun(self, ts: tuple, @@ -146,7 +154,7 @@ def srun(self, k: tuple, my: tuple, pscale: tuple): - (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( + (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( ts, p, k, my, pscale ) return llh, sllh, (x, obs) @@ -158,10 +166,10 @@ def s2run(self, k: tuple, my: tuple, pscale: tuple): - (llh, (x, obs)), sllh = (jax.value_and_grad(self.run, 1, True))( + (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( ts, p, k, my, pscale ) - s2llh, (x, obs) = jax.jacfwd(jax.grad(self.run, 1, True), 1, True)( + s2llh, (x, obs) = jax.jacfwd(jax.grad(self._run, 1, True), 1, True)( ts, p, k, my, pscale ) return llh, sllh, s2llh, (x, obs) From 2dd0377f4dfd4b5fe0808bffbfa94dea1619d7fa Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 17:22:13 +0200 Subject: [PATCH 08/80] use vmap for vectorization --- python/amici/jax.py | 55 +++++++++++++++++--------------------- python/amici/ode_export.py | 15 ++--------- 2 files changed, 26 insertions(+), 44 deletions(-) diff --git a/python/amici/jax.py b/python/amici/jax.py index 28dc5c9569..0b03a8eed1 100644 --- a/python/amici/jax.py +++ b/python/amici/jax.py @@ -94,41 +94,26 @@ def _solve(self, ts, p, k): return sol.ys, tcl def _obs(self, x, p, k, tcl): - y = jnp.apply_along_axis( - lambda x: self.model.y(x, p, k, tcl), - axis=1, - arr=x + return jax.vmap(self.model.y, in_axes=(0, None, None, None))( + x, p, k, tcl ) - return y def _sigmay(self, obs, p, k): - sigmay = jnp.apply_along_axis( - lambda y: self.model.sigmay(y, p, k), - axis=1, - arr=obs - ) - return sigmay + return jax.vmap(self.model.sigmay, in_axes=(0, None, None))(obs, p, k) def _x_rdata(self, x, tcl): - return jnp.apply_along_axis( - lambda y: self.model.x_rdata(x, tcl), - axis=1, - arr=x - ) + return jax.vmap(self.model.x_rdata, in_axes=(0, None))(x, tcl) def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): - llh = - jnp.sum(jnp.stack( - [self.model.Jy(obs[i, :], my[i, :], sigmay[i, :]) - for i in range(my.shape[0])] - )) - return llh + loss_fun = jax.vmap(self.model.Jy, in_axes=(0, 0, 0)) + return - jnp.sum(loss_fun(obs, my, sigmay)) def _run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple, + pscale: tuple): ps = self.model.unscale_p(p, pscale) x, tcl = self._solve(ts, ps, k) obs = self._obs(x, ps, k, tcl) @@ -140,11 +125,11 @@ def _run(self, @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) def run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): + ts: tuple, + p: jnp.ndarray, + k: tuple, + my: tuple, + pscale: tuple): return self._run(ts, p, k, my, pscale) @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) @@ -198,6 +183,14 @@ def runAmiciSimulationJAX(model: JAXModel, rdata_kwargs['x'], rdata_kwargs['y'] ) = solver.s2run(ts, p, k, my, pscale) + for field in rdata_kwargs.keys(): + if field == 'llh': + rdata_kwargs[field] = np.float(rdata_kwargs[field]) + elif field not in ['sllh', 's2llh']: + rdata_kwargs[field] = np.asarray(rdata_kwargs[field]).T + if rdata_kwargs[field].ndim == 1: + rdata_kwargs[field] = np.expand_dims(rdata_kwargs[field], 1) + return ReturnDataJAX(**rdata_kwargs) diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index 2c88b0676d..a415f04969 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -2499,13 +2499,9 @@ def jnp_stack_str(array) -> str: elems = ', '.join(str(x) for x in array) if not elems: - return 'jnp.empty((1,))' + return 'tuple()' - # scalar - if ',' not in elems: - elems += ', ' - - return f'jnp.stack(({elems}), axis=-1)' + return elems tpl_data = { **{ @@ -2536,13 +2532,6 @@ def jnp_stack_str(array) -> str: }, **{ 'MODEL_NAME': self.model_name, - 'NTCL': self.model.num_cons_law(), - 'PAR_VALS': jnp_stack_str( - p.get_val() for p in self.model._parameters - ), - 'CONST_VALS': jnp_stack_str( - k.get_val() for k in self.model._constants - ), } } os.makedirs(os.path.join(self.model_path, self.model_name), From e9bd14fdb5ebc5727f62342a7cc5641e675c0f10 Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 17:26:50 +0200 Subject: [PATCH 09/80] fixups --- python/amici/__init__.template.py | 8 ++++++-- python/tests/test_jax.py | 1 + 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/python/amici/__init__.template.py b/python/amici/__init__.template.py index 356231116d..838429817f 100644 --- a/python/amici/__init__.template.py +++ b/python/amici/__init__.template.py @@ -1,7 +1,10 @@ """AMICI-generated module for model TPL_MODELNAME""" import amici -from amici.jax import JAXModel +try: + from amici.jax import JAXModel +except (ModuleNotFoundError, ImportError): + JAXModel = object from pathlib import Path # Ensure we are binary-compatible, see #556 @@ -22,6 +25,7 @@ def get_jax_model() -> JAXModel: return JAXModel_TPL_MODELNAME() except (ModuleNotFoundError, ImportError): - pass + def get_jax_model() -> JAXModel: + raise NotImplementedError() __version__ = 'TPL_PACKAGE_VERSION' diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index d690898824..07768580cd 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -1,5 +1,6 @@ import pytest import amici +pytest.importorskip("jax") import amici.jax import jax.numpy as jnp From bbb524646375e63949b96a594d75558a71e2aafd Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 18:23:39 +0200 Subject: [PATCH 10/80] add multithreaded simulation runner --- python/amici/jax.py | 15 ++++++++++++++ python/tests/test_jax.py | 43 ++++++++++++++++++++++------------------ 2 files changed, 39 insertions(+), 19 deletions(-) diff --git a/python/amici/jax.py b/python/amici/jax.py index 0b03a8eed1..05949a201d 100644 --- a/python/amici/jax.py +++ b/python/amici/jax.py @@ -1,11 +1,13 @@ from abc import abstractmethod from dataclasses import dataclass +from concurrent.futures import ThreadPoolExecutor import diffrax import jax.numpy as jnp import numpy as np import jax from functools import partial +from typing import Iterable import amici @@ -160,6 +162,19 @@ def s2run(self, return llh, sllh, s2llh, (x, obs) +def runAmiciSimulationsJAX(model: JAXModel, + solver: JAXSolver, + edatas: Iterable[amici.ExpData], + num_threads: int = 1): + + def run_simulation(edata): + return runAmiciSimulationJAX(model, solver, edata) + + with ThreadPoolExecutor(max_workers=num_threads) as pool: + results = pool.map(run_simulation, edatas) + return results + + def runAmiciSimulationJAX(model: JAXModel, solver: JAXSolver, edata: amici.ExpData): diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 07768580cd..b503cf4e68 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -88,45 +88,50 @@ def _test_model(model_module, ts, p, k): amici_model.setParameters(np.asarray(p, dtype=np.float64)) amici_model.setFixedParameters(np.asarray(k, dtype=np.float64)) - edata = amici.ExpData(sol_amici_ref, 1.0, 1.0) - edata.parameters = amici_model.getParameters() - edata.fixedParameters = amici_model.getFixedParameters() - edata.pscale = amici_model.getParameterScale() + edatas = ( + amici.ExpData(sol_amici_ref, 1.0, 1.0), + amici.ExpData(sol_amici_ref, 1.0, 1.0), + ) + for edata in edatas: + edata.parameters = amici_model.getParameters() + edata.fixedParameters = amici_model.getFixedParameters() + edata.pscale = amici_model.getParameterScale() amici_solver = amici_model.getSolver() amici_solver.setSensitivityMethod(amici.SensitivityMethod.forward) amici_solver.setSensitivityOrder(amici.SensitivityOrder.first) - r_amici = amici.runAmiciSimulation( + rs_amici = amici.runAmiciSimulations( amici_model, amici_solver, - edata + edatas ) - check_fields_jax(r_amici, jax_model, jax_solver, edata, + check_fields_jax(rs_amici, jax_model, jax_solver, edatas, ['x', 'y', 'llh']) jax_solver.sensi_order = amici.SensitivityOrder.first - check_fields_jax(r_amici, jax_model, jax_solver, edata, + check_fields_jax(rs_amici, jax_model, jax_solver, edatas, ['x', 'y', 'llh', 'sllh']) jax_solver.sensi_order = amici.SensitivityOrder.second - check_fields_jax(r_amici, jax_model, jax_solver, edata, + check_fields_jax(rs_amici, jax_model, jax_solver, edatas, ['x', 'y', 'llh', 'sllh']) -def check_fields_jax(r_amici, +def check_fields_jax(rs_amici, jax_model, jax_solver, - edata, + edatas, fields): - r_jax = amici.jax.runAmiciSimulationJAX( + rs_jax = amici.jax.runAmiciSimulationsJAX( jax_model, jax_solver, - edata + edatas ) for field in fields: - assert_allclose( - actual=r_amici[field], - desired=r_jax[field], - atol=1e-6, - rtol=1e-6 - ) + for r_amici, r_jax in zip(rs_amici, rs_jax): + assert_allclose( + actual=r_amici[field], + desired=r_jax[field], + atol=1e-6, + rtol=1e-6 + ) From 9bd1004bcf1d5f06f87323c5763ca1d0b579d49d Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Fri, 26 Aug 2022 18:28:22 +0200 Subject: [PATCH 11/80] fix my --- python/amici/jax.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/amici/jax.py b/python/amici/jax.py index 05949a201d..9c0f5d1e8f 100644 --- a/python/amici/jax.py +++ b/python/amici/jax.py @@ -119,7 +119,7 @@ def _run(self, ps = self.model.unscale_p(p, pscale) x, tcl = self._solve(ts, ps, k) obs = self._obs(x, ps, k, tcl) - my_r = np.asarray(my).reshape(obs.shape) + my_r = np.asarray(my).reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) llh = self._loss(obs, sigmay, my_r) x_rdata = self._x_rdata(x, tcl) From 599aa711caf5cef6e4ace8c4a730a1be1765c368 Mon Sep 17 00:00:00 2001 From: FFroehlich Date: Tue, 13 Sep 2022 16:03:42 +0200 Subject: [PATCH 12/80] fixes --- python/amici/jax.py | 45 ++++++------ python/amici/jax.template.py | 8 +-- python/amici/ode_export.py | 13 ++-- python/tests/test_jax.py | 2 +- tests/benchmark-models/benchmark_models.yaml | 2 +- .../test_benchmark_collection.sh | 4 +- tests/benchmark-models/test_petab_model.py | 69 ++++++++++++++++++- 7 files changed, 106 insertions(+), 37 deletions(-) diff --git a/python/amici/jax.py b/python/amici/jax.py index 9c0f5d1e8f..382aa08ba4 100644 --- a/python/amici/jax.py +++ b/python/amici/jax.py @@ -11,6 +11,9 @@ import amici +from jax.config import config +config.update("jax_enable_x64", True) + class JAXModel(object): _unscale_funs = { @@ -24,7 +27,7 @@ def xdot(self, t, x, args): ... @abstractmethod - def _w(self, x, p, k, tcl): + def _w(self, t, x, p, k, tcl): ... @abstractmethod @@ -44,7 +47,7 @@ def tcl(self, x, p, k): ... @abstractmethod - def y(self, x, p, k, tcl): + def y(self, t, x, p, k, tcl): ... @abstractmethod @@ -68,9 +71,10 @@ def get_solver(self): class JAXSolver(object): def __init__(self, model: JAXModel): self.model: JAXModel = model - self.solver: diffrax.AbstractSolver = diffrax.Tsit5() + self.solver: diffrax.AbstractSolver = diffrax.Kvaerno5() self.atol: float = 1e-8 self.rtol: float = 1e-8 + self.maxsteps: int = int(1e6) self.sensi_mode: amici.SensitivityMethod = \ amici.SensitivityMethod.adjoint self.sensi_order: amici.SensitivityOrder = \ @@ -83,21 +87,22 @@ def _solve(self, ts, p, k): diffrax.ODETerm(self.model.xdot), self.solver, args=(p, k, tcl), - t0=ts[0], + t0=0.0, t1=ts[-1], - dt0=ts[1] - ts[0], + dt0=None, y0=self.model.x_solver(x0), stepsize_controller=diffrax.PIDController( rtol=self.rtol, atol=self.atol ), + max_steps=self.maxsteps, saveat=diffrax.SaveAt(ts=ts) ) return sol.ys, tcl - def _obs(self, x, p, k, tcl): - return jax.vmap(self.model.y, in_axes=(0, None, None, None))( - x, p, k, tcl + def _obs(self, ts, x, p, k, tcl): + return jax.vmap(self.model.y, in_axes=(0, 0, None, None, None))( + np.asarray(ts), x, p, k, tcl ) def _sigmay(self, obs, p, k): @@ -118,7 +123,7 @@ def _run(self, pscale: tuple): ps = self.model.unscale_p(p, pscale) x, tcl = self._solve(ts, ps, k) - obs = self._obs(x, ps, k, tcl) + obs = self._obs(ts, x, ps, k, tcl) my_r = np.asarray(my).reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) llh = self._loss(obs, sigmay, my_r) @@ -162,22 +167,22 @@ def s2run(self, return llh, sllh, s2llh, (x, obs) -def runAmiciSimulationsJAX(model: JAXModel, - solver: JAXSolver, - edatas: Iterable[amici.ExpData], - num_threads: int = 1): +def run_simulations(model: JAXModel, + solver: JAXSolver, + edatas: Iterable[amici.ExpData], + num_threads: int = 1): - def run_simulation(edata): - return runAmiciSimulationJAX(model, solver, edata) + def run(edata): + return run_simulation(model, solver, edata) with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map(run_simulation, edatas) - return results + results = pool.map(run, edatas) + return list(results) -def runAmiciSimulationJAX(model: JAXModel, - solver: JAXSolver, - edata: amici.ExpData): +def run_simulation(model: JAXModel, + solver: JAXSolver, + edata: amici.ExpData): ts = tuple(edata.getTimepoints()) p = jnp.asarray(edata.parameters) k = tuple(edata.fixedParameters) diff --git a/python/amici/jax.template.py b/python/amici/jax.template.py index 746d662540..3cb3bbb4d5 100644 --- a/python/amici/jax.template.py +++ b/python/amici/jax.template.py @@ -15,13 +15,13 @@ def xdot(self, t, x, args): TPL_P_SYMS = p TPL_K_SYMS = k TPL_TCL_SYMS = tcl - TPL_W_SYMS = self._w(x, p, k, tcl) + TPL_W_SYMS = self._w(t, x, p, k, tcl) TPL_XDOT_EQ return TPL_XDOT_RET - def _w(self, x, p, k, tcl): + def _w(self, t, x, p, k, tcl): TPL_X_SYMS = x TPL_P_SYMS = p @@ -68,12 +68,12 @@ def tcl(self, x, p, k): return TPL_TOTAL_CL_RET - def y(self, x, p, k, tcl): + def y(self, t, x, p, k, tcl): TPL_X_SYMS = x TPL_P_SYMS = p TPL_K_SYMS = k - TPL_W_SYMS = self._w(x, p, k, tcl) + TPL_W_SYMS = self._w(t, x, p, k, tcl) TPL_Y_EQ diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index a415f04969..0d2f4323c9 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -2496,7 +2496,7 @@ def _generate_jax_code(self) -> None: indent = 8 def jnp_stack_str(array) -> str: - elems = ', '.join(str(x) for x in array) + elems = ''.join(str(x) + ', ' for x in array) if not elems: return 'tuple()' @@ -2518,14 +2518,15 @@ def jnp_stack_str(array) -> str: f'{eq_name.upper()}_RET': jnp_stack_str( strip_pysb(s) for s in self.model.sym(eq_name) ) if eq_name != 'Jy' - else ' + '.join( - str(s) for s in self.model.sym(eq_name) - ) if self.model.sym(eq_name) else '0' + else ('jnp.nansum(jnp.stack((' + ''.join( + str(s) + ', ' for s in self.model.sym(eq_name) + ) + '), axis=-1))') if self.model.sym(eq_name) else '0' for eq_name in eq_names }, **{ - f'{sym_name.upper()}_SYMS': ', '.join( - (str(strip_pysb(s)) for s in self.model.sym(sym_name)) + f'{sym_name.upper()}_SYMS': ''.join( + (str(strip_pysb(s)) + ', ' + for s in self.model.sym(sym_name)) ) if self.model.sym(sym_name) else '_' for sym_name in sym_names diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index b503cf4e68..492d5162b6 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -122,7 +122,7 @@ def check_fields_jax(rs_amici, jax_solver, edatas, fields): - rs_jax = amici.jax.runAmiciSimulationsJAX( + rs_jax = amici.jax.run_simulations( jax_model, jax_solver, edatas diff --git a/tests/benchmark-models/benchmark_models.yaml b/tests/benchmark-models/benchmark_models.yaml index 49e509f421..e608781c99 100644 --- a/tests/benchmark-models/benchmark_models.yaml +++ b/tests/benchmark-models/benchmark_models.yaml @@ -40,7 +40,7 @@ Fiedler_BMC2016: llh: 58.58390161681 Fujita_SciSignal2010: - llh: 53.08377736998929 + llh: 53.08748642432372 # Hass_PONE2017 None diff --git a/tests/benchmark-models/test_benchmark_collection.sh b/tests/benchmark-models/test_benchmark_collection.sh index d8d9e5f2f5..db060e1430 100755 --- a/tests/benchmark-models/test_benchmark_collection.sh +++ b/tests/benchmark-models/test_benchmark_collection.sh @@ -90,8 +90,8 @@ for model in $models; do yaml="${model_dir}"/"${model}"/"${model}".yaml amici_model_dir=test_bmc/"${model}" mkdir -p "$amici_model_dir" - cmd_import="amici_import_petab --verbose -y ${yaml} -o ${amici_model_dir} -n ${model} --flatten" - cmd_run="$script_path/test_petab_model.py --verbose -y ${yaml} -d ${amici_model_dir} -m ${model} -c" + cmd_import="amici_import_petab -y ${yaml} -o ${amici_model_dir} -n ${model} --flatten" + cmd_run="$script_path/test_petab_model.py -y ${yaml} -d ${amici_model_dir} -m ${model} -c" printf '=%.0s' {1..40} printf " %s " "${model}" diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index f5e58e7535..58cbf21590 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -15,8 +15,11 @@ import amici from amici.logging import get_logger -from amici.petab_objective import (simulate_petab, rdatas_to_measurement_df, - LLH, RDATAS) +from amici.petab_objective import ( + simulate_petab, rdatas_to_measurement_df, LLH, RDATAS, create_edatas, + fill_in_parameters, create_parameter_mapping +) +from timeit import default_timer as timer from petab.visualize import plot_problem logger = get_logger(f"amici.{__name__}", logging.WARNING) @@ -87,13 +90,73 @@ def main(): if args.model_name == "Isensee_JCB2018": amici_solver.setAbsoluteTolerance(1e-12) amici_solver.setRelativeTolerance(1e-12) + elif args.model_name == "Fujita_SciSignal2010": + amici_solver.setAbsoluteTolerance(1e-12) + amici_solver.setRelativeTolerance(1e-12) res = simulate_petab( petab_problem=problem, amici_model=amici_model, - solver=amici_solver, log_level=logging.DEBUG) + solver=amici_solver, log_level=logging.INFO) rdatas = res[RDATAS] llh = res[LLH] + if args.model_name not in ( + 'Bachmann_MSB2011', 'Beer_MolBioSystems2014', 'Brannmark_JBC2010', + 'Isensee_JCB2018', 'Weber_BMC2015', 'Zheng_PNAS2012' + ): + # Bachmann: integration failure even with 1e6 steps + # Beer: Heaviside + # Brannmark_JBC2010: preeq + # Isensee_JCB2018: preeq + # Weber_BMC2015: preeq + # Zheng_PNAS2012: preeq + + jax_model = model_module.get_jax_model() + jax_solver = jax_model.get_solver() + simulation_conditions = \ + problem.get_simulation_conditions_from_measurement_df() + edatas = create_edatas( + amici_model=amici_model, + petab_problem=problem, + simulation_conditions=simulation_conditions + ) + problem_parameters = {t.Index: getattr(t, petab.NOMINAL_VALUE) for t in + problem.parameter_df.itertuples()} + parameter_mapping = create_parameter_mapping( + petab_problem=problem, + simulation_conditions=simulation_conditions, + scaled_parameters=False, + amici_model=amici_model + ) + fill_in_parameters( + edatas=edatas, + problem_parameters=problem_parameters, + scaled_parameters=False, + parameter_mapping=parameter_mapping, + amici_model=amici_model + ) + # run once to JIT + amici.jax.run_simulations( + jax_model, + jax_solver, + edatas + ) + start_jax = timer() + rdatas_jax = amici.jax.run_simulations( + jax_model, + jax_solver, + edatas + ) + end_jax = timer() + + t_jax = end_jax - start_jax + t_amici = sum(r.cpu_time for r in rdatas)/1e3 + + llh_jax = sum(r.llh for r in rdatas_jax) + + print(f'amici (llh={res["llh"]} after {t_amici}s) vs ' + f'jax (llh={llh_jax} after {t_jax}s)') + for rdata in rdatas: assert rdata.status == amici.AMICI_SUCCESS, \ f"Simulation failed for {rdata.id}" From 3fbd17a289526b4ed5584e52361fe449afc44234 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 15:54:37 +0100 Subject: [PATCH 13/80] fixup merge --- .pre-commit-config.yaml | 7 - python/amici/jax.py | 230 ------------------------ python/amici/jax.template.py | 98 ---------- python/amici/jaxcodeprinter.py | 49 ----- python/sdist/amici/__init__.py | 8 + python/sdist/amici/__init__.template.py | 16 ++ python/sdist/amici/de_export.py | 113 ++++++++++-- python/sdist/amici/jax.py | 227 ++++++++++++++++++++++- python/sdist/amici/jax.template.py | 99 +++++++++- python/sdist/amici/jaxcodeprinter.py | 51 +++++- python/sdist/amici/pysb_import.py | 2 +- python/sdist/pyproject.toml | 2 + 12 files changed, 496 insertions(+), 406 deletions(-) delete mode 100644 python/amici/jax.py delete mode 100644 python/amici/jax.template.py delete mode 100644 python/amici/jaxcodeprinter.py mode change 120000 => 100644 python/sdist/amici/jax.py mode change 120000 => 100644 python/sdist/amici/jax.template.py mode change 120000 => 100644 python/sdist/amici/jaxcodeprinter.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a2d00e00c1..84438209b1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,11 +27,4 @@ repos: - --config - python/sdist/pyproject.toml -- repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 - hooks: - - id: pyupgrade - args: ["--py39-plus"] - additional_dependencies: [pyupgrade==3.15.0] - exclude: '^(ThirdParty|models)/' diff --git a/python/amici/jax.py b/python/amici/jax.py deleted file mode 100644 index 382aa08ba4..0000000000 --- a/python/amici/jax.py +++ /dev/null @@ -1,230 +0,0 @@ -from abc import abstractmethod -from dataclasses import dataclass -from concurrent.futures import ThreadPoolExecutor - -import diffrax -import jax.numpy as jnp -import numpy as np -import jax -from functools import partial -from typing import Iterable - -import amici - -from jax.config import config -config.update("jax_enable_x64", True) - - -class JAXModel(object): - _unscale_funs = { - amici.ParameterScaling.none: lambda x: x, - amici.ParameterScaling.ln: lambda x: jnp.exp(x), - amici.ParameterScaling.log10: lambda x: jnp.power(10, x) - } - - @abstractmethod - def xdot(self, t, x, args): - ... - - @abstractmethod - def _w(self, t, x, p, k, tcl): - ... - - @abstractmethod - def x0(self, p, k): - ... - - @abstractmethod - def x_solver(self, x): - ... - - @abstractmethod - def x_rdata(self, x, tcl): - ... - - @abstractmethod - def tcl(self, x, p, k): - ... - - @abstractmethod - def y(self, t, x, p, k, tcl): - ... - - @abstractmethod - def sigmay(self, y, p, k): - ... - - @abstractmethod - def Jy(self, y, my, sigmay): - ... - - def unscale_p(self, p, pscale): - return jnp.stack([ - self._unscale_funs[pscale_i](p_i) - for p_i, pscale_i in zip(p, pscale) - ]) - - def get_solver(self): - return JAXSolver(model=self) - - -class JAXSolver(object): - def __init__(self, model: JAXModel): - self.model: JAXModel = model - self.solver: diffrax.AbstractSolver = diffrax.Kvaerno5() - self.atol: float = 1e-8 - self.rtol: float = 1e-8 - self.maxsteps: int = int(1e6) - self.sensi_mode: amici.SensitivityMethod = \ - amici.SensitivityMethod.adjoint - self.sensi_order: amici.SensitivityOrder = \ - amici.SensitivityOrder.none - - def _solve(self, ts, p, k): - x0 = self.model.x0(p, k) - tcl = self.model.tcl(x0, p, k) - sol = diffrax.diffeqsolve( - diffrax.ODETerm(self.model.xdot), - self.solver, - args=(p, k, tcl), - t0=0.0, - t1=ts[-1], - dt0=None, - y0=self.model.x_solver(x0), - stepsize_controller=diffrax.PIDController( - rtol=self.rtol, - atol=self.atol - ), - max_steps=self.maxsteps, - saveat=diffrax.SaveAt(ts=ts) - ) - return sol.ys, tcl - - def _obs(self, ts, x, p, k, tcl): - return jax.vmap(self.model.y, in_axes=(0, 0, None, None, None))( - np.asarray(ts), x, p, k, tcl - ) - - def _sigmay(self, obs, p, k): - return jax.vmap(self.model.sigmay, in_axes=(0, None, None))(obs, p, k) - - def _x_rdata(self, x, tcl): - return jax.vmap(self.model.x_rdata, in_axes=(0, None))(x, tcl) - - def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): - loss_fun = jax.vmap(self.model.Jy, in_axes=(0, 0, 0)) - return - jnp.sum(loss_fun(obs, my, sigmay)) - - def _run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): - ps = self.model.unscale_p(p, pscale) - x, tcl = self._solve(ts, ps, k) - obs = self._obs(ts, x, ps, k, tcl) - my_r = np.asarray(my).reshape((len(ts), -1)) - sigmay = self._sigmay(obs, ps, k) - llh = self._loss(obs, sigmay, my_r) - x_rdata = self._x_rdata(x, tcl) - return llh, (x_rdata, obs) - - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) - def run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): - return self._run(ts, p, k, my, pscale) - - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) - def srun(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): - (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( - ts, p, k, my, pscale - ) - return llh, sllh, (x, obs) - - @partial(jax.jit, static_argnames=('self', 'ts', 'k', 'my', 'pscale')) - def s2run(self, - ts: tuple, - p: jnp.ndarray, - k: tuple, - my: tuple, - pscale: tuple): - (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( - ts, p, k, my, pscale - ) - s2llh, (x, obs) = jax.jacfwd(jax.grad(self._run, 1, True), 1, True)( - ts, p, k, my, pscale - ) - return llh, sllh, s2llh, (x, obs) - - -def run_simulations(model: JAXModel, - solver: JAXSolver, - edatas: Iterable[amici.ExpData], - num_threads: int = 1): - - def run(edata): - return run_simulation(model, solver, edata) - - with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map(run, edatas) - return list(results) - - -def run_simulation(model: JAXModel, - solver: JAXSolver, - edata: amici.ExpData): - ts = tuple(edata.getTimepoints()) - p = jnp.asarray(edata.parameters) - k = tuple(edata.fixedParameters) - my = tuple(edata.getObservedData()) - pscale = tuple(edata.pscale) - - rdata_kwargs = dict() - - if solver.sensi_order == amici.SensitivityOrder.none: - rdata_kwargs['llh'], (rdata_kwargs['x'], rdata_kwargs['y']) = \ - solver.run(ts, p, k, my, pscale) - elif solver.sensi_order == amici.SensitivityOrder.first: - rdata_kwargs['llh'], rdata_kwargs['sllh'], ( - rdata_kwargs['x'], rdata_kwargs['y'] - ) = solver.srun(ts, p, k, my, pscale) - elif solver.sensi_order == amici.SensitivityOrder.second: - rdata_kwargs['llh'], rdata_kwargs['sllh'], rdata_kwargs['s2llh'], ( - rdata_kwargs['x'], rdata_kwargs['y'] - ) = solver.s2run(ts, p, k, my, pscale) - - for field in rdata_kwargs.keys(): - if field == 'llh': - rdata_kwargs[field] = np.float(rdata_kwargs[field]) - elif field not in ['sllh', 's2llh']: - rdata_kwargs[field] = np.asarray(rdata_kwargs[field]).T - if rdata_kwargs[field].ndim == 1: - rdata_kwargs[field] = np.expand_dims(rdata_kwargs[field], 1) - - return ReturnDataJAX(**rdata_kwargs) - - -@dataclass -class ReturnDataJAX(dict): - x: np.array = None - sx: np.array = None - y: np.array = None - sy: np.array = None - sigmay: np.array = None - ssigmay: np.array = None - llh: np.array = None - sllh: np.array = None - - def __init__(self, *args, **kwargs): - super(ReturnDataJAX, self).__init__(*args, **kwargs) - self.__dict__ = self diff --git a/python/amici/jax.template.py b/python/amici/jax.template.py deleted file mode 100644 index 3cb3bbb4d5..0000000000 --- a/python/amici/jax.template.py +++ /dev/null @@ -1,98 +0,0 @@ -import jax.numpy as jnp - -from amici.jax import JAXModel - - -class JAXModel_TPL_MODEL_NAME(JAXModel): - def __init__(self): - super().__init__() - - def xdot(self, t, x, args): - - p, k, tcl = args - - TPL_X_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k - TPL_TCL_SYMS = tcl - TPL_W_SYMS = self._w(t, x, p, k, tcl) - -TPL_XDOT_EQ - - return TPL_XDOT_RET - - def _w(self, t, x, p, k, tcl): - - TPL_X_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k - TPL_TCL_SYMS = tcl - -TPL_W_EQ - - return TPL_W_RET - - def x0(self, p, k): - - TPL_P_SYMS = p - TPL_K_SYMS = k - -TPL_X0_EQ - - return TPL_X0_RET - - def x_solver(self, x): - - TPL_X_RDATA_SYMS = x - -TPL_X_SOLVER_EQ - - return TPL_X_SOLVER_RET - - def x_rdata(self, x, tcl): - - TPL_X_SYMS = x - TPL_TCL_SYMS = tcl - -TPL_X_RDATA_EQ - - return TPL_X_RDATA_RET - - def tcl(self, x, p, k): - - TPL_X_RDATA_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k - -TPL_TOTAL_CL_EQ - - return TPL_TOTAL_CL_RET - - def y(self, t, x, p, k, tcl): - - TPL_X_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k - TPL_W_SYMS = self._w(t, x, p, k, tcl) - -TPL_Y_EQ - - return TPL_Y_RET - - def sigmay(self, y, p, k): - TPL_Y_SYMS = y - TPL_P_SYMS = p - TPL_K_SYMS = k - -TPL_SIGMAY_EQ - - return TPL_SIGMAY_RET - - def Jy(self, y, my, sigmay): - TPL_Y_SYMS = y - TPL_MY_SYMS = my - TPL_SIGMAY_SYMS = sigmay - -TPL_JY_EQ - - return TPL_JY_RET diff --git a/python/amici/jaxcodeprinter.py b/python/amici/jaxcodeprinter.py deleted file mode 100644 index 0f96153423..0000000000 --- a/python/amici/jaxcodeprinter.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Jax code generation""" -import re -from typing import List, Optional, Union, Iterable - -import sympy as sp -from sympy.printing.numpy import NumPyPrinter - - -class AmiciJaxCodePrinter(NumPyPrinter): - """JAX code printer""" - - def doprint(self, expr: sp.Expr, assign_to: Optional[str] = None) -> str: - try: - code = super().doprint(expr, assign_to) - code = re.sub(r'numpy\.', r'jnp.', code) - - return code - except TypeError as e: - raise ValueError( - f'Encountered unsupported function in expression "{expr}"' - ) from e - - def _get_sym_lines( - self, - symbols: Union[Iterable[str], sp.Matrix], - equations: sp.Matrix, - indent_level: int - ) -> List[str]: - """ - Generate C++ code for assigning symbolic terms in symbols to C++ array - `variable`. - - :param equations: - vectors of symbolic expressions - - :param symbols: - names of the symbols to assign to - - :param indent_level: - indentation level (number of leading blanks) - - :return: - C++ code as list of lines - """ - indent = ' ' * indent_level - return [ - f'{indent}{s} = {self.doprint(e)}' - for s, e in zip(symbols, equations) - ] diff --git a/python/sdist/amici/__init__.py b/python/sdist/amici/__init__.py index cd7bcb0500..6da9023865 100644 --- a/python/sdist/amici/__init__.py +++ b/python/sdist/amici/__init__.py @@ -120,6 +120,11 @@ def _imported_from_setup() -> bool: assignmentRules2observables, ) + try: + from .jax import JAXModel + except (ImportError, ModuleNotFoundError): + JAXModel = object + @runtime_checkable class ModelModule(Protocol): # noqa: F811 """Type of AMICI-generated model modules. @@ -134,6 +139,9 @@ def get_model(self) -> amici.Model: """Create a model instance.""" ... + def get_jax_model(self) -> JAXModel: + ... + AmiciModel = Union[amici.Model, amici.ModelPtr] diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py index f5e49b03dd..78b380e433 100644 --- a/python/sdist/amici/__init__.template.py +++ b/python/sdist/amici/__init__.template.py @@ -4,6 +4,11 @@ import amici +try: + from amici.jax import JAXModel +except (ModuleNotFoundError, ImportError): + JAXModel = object + # Ensure we are binary-compatible, see #556 if "TPL_AMICI_VERSION" != amici.__version__: raise amici.AmiciVersionError( @@ -18,4 +23,15 @@ from .TPL_MODELNAME import * # noqa: F403, F401 from .TPL_MODELNAME import getModel as get_model # noqa: F401 +try: + from TPL_MODELNAME.jax import JAXModel_TPL_MODELNAME + + def get_jax_model() -> JAXModel: + return JAXModel_TPL_MODELNAME() +except (ModuleNotFoundError, ImportError): + + def get_jax_model() -> JAXModel: + raise NotImplementedError() + + __version__ = "TPL_PACKAGE_VERSION" diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index fea9325ab2..5520bc5a00 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -53,6 +53,7 @@ AmiciCxxCodePrinter, get_switch_statement, ) +from .jaxcodeprinter import AmiciJaxCodePrinter from .de_model import DEModel from .de_model_components import * from .import_utils import ( @@ -142,7 +143,10 @@ class DEExporter: If the given model uses special functions, this set contains hints for model building. - :ivar _code_printer: + :ivar _code_printer_jax: + Code printer to generate JAX code + + :ivar _code_printer_cpp: Code printer to generate C++ code :ivar generate_sensitivity_code: @@ -211,14 +215,15 @@ def __init__( self.set_name(model_name) self.set_paths(outdir) - self._code_printer = AmiciCxxCodePrinter() + self._code_printer_cpp = AmiciCxxCodePrinter() + self._code_printer_jax = AmiciJaxCodePrinter() for fun in CUSTOM_FUNCTIONS: - self._code_printer.known_functions[fun["sympy"]] = fun["c++"] + self._code_printer_cpp.known_functions[fun["sympy"]] = fun["c++"] # Signatures and properties of generated model functions (see # include/amici/model.h for details) self.model: DEModel = de_model - self._code_printer.known_functions.update( + self._code_printer_cpp.known_functions.update( splines.spline_user_functions( self.model._splines, self._get_index("p") ) @@ -268,6 +273,78 @@ def _prepare_model_folder(self) -> None: if os.path.isfile(file_path): os.remove(file_path) + @log_execution_time("generating jax code", logger) + def _generate_jax_code(self) -> None: + eq_names = ( + "xdot", + "w", + "x0", + "y", + "sigmay", + "Jy", + "x_solver", + "x_rdata", + "total_cl", + ) + sym_names = ("p", "k", "x", "tcl", "w", "my", "y", "sigmay", "x_rdata") + + indent = 8 + + def jnp_stack_str(array) -> str: + elems = "".join(str(x) + ", " for x in array) + + if not elems: + return "tuple()" + + return elems + + tpl_data = { + **{ + f"{eq_name.upper()}_EQ": "\n".join( + self.model._code_printer_jax._get_sym_lines( + (str(strip_pysb(s)) for s in self.model.sym(eq_name)), + self.model.eq(eq_name), + indent, + ) + ) + for eq_name in eq_names + }, + **{ + f"{eq_name.upper()}_RET": jnp_stack_str( + strip_pysb(s) for s in self.model.sym(eq_name) + ) + if eq_name != "Jy" + else ( + "jnp.nansum(jnp.stack((" + + "".join(str(s) + ", " for s in self.model.sym(eq_name)) + + "), axis=-1))" + ) + if self.model.sym(eq_name) + else "0" + for eq_name in eq_names + }, + **{ + f"{sym_name.upper()}_SYMS": "".join( + str(strip_pysb(s)) + ", " for s in self.model.sym(sym_name) + ) + if self.model.sym(sym_name) + else "_" + for sym_name in sym_names + }, + **{ + "MODEL_NAME": self.model_name, + }, + } + os.makedirs( + os.path.join(self.model_path, self.model_name), exist_ok=True + ) + + apply_template( + os.path.join(amiciModulePath, "jax.template.py"), + os.path.join(self.model_path, self.model_name, "jax.py"), + tpl_data, + ) + def _generate_c_code(self) -> None: """ Create C++ code files for the model based on @@ -726,7 +803,7 @@ def _get_function_body( f"reinitialization_state_idxs.cend(), {index}) != " "reinitialization_state_idxs.cend())", f" {function}[{index}] = " - f"{self._code_printer.doprint(formula)};", + f"{self._code_printer_cpp.doprint(formula)};", ] ) cases[ipar] = expressions @@ -741,12 +818,12 @@ def _get_function_body( f"reinitialization_state_idxs.cend(), {index}) != " "reinitialization_state_idxs.cend())\n " f"{function}[{index}] = " - f"{self._code_printer.doprint(formula)};" + f"{self._code_printer_cpp.doprint(formula)};" ) elif function in event_functions: cases = { - ie: self._code_printer._get_sym_lines_array( + ie: self._code_printer_cpp._get_sym_lines_array( equations[ie], function, 0 ) for ie in range(self.model.num_events()) @@ -759,7 +836,7 @@ def _get_function_body( for ie, inner_equations in enumerate(equations): inner_lines = [] inner_cases = { - ipar: self._code_printer._get_sym_lines_array( + ipar: self._code_printer_cpp._get_sym_lines_array( inner_equations[:, ipar], function, 0 ) for ipar in range(self.model.num_par()) @@ -774,7 +851,7 @@ def _get_function_body( and equations.shape[1] == self.model.num_par() ): cases = { - ipar: self._code_printer._get_sym_lines_array( + ipar: self._code_printer_cpp._get_sym_lines_array( equations[:, ipar], function, 0 ) for ipar in range(self.model.num_par()) @@ -784,7 +861,7 @@ def _get_function_body( elif function in multiobs_functions: if function == "dJydy": cases = { - iobs: self._code_printer._get_sym_lines_array( + iobs: self._code_printer_cpp._get_sym_lines_array( equations[iobs], function, 0 ) for iobs in range(self.model.num_obs()) @@ -792,7 +869,7 @@ def _get_function_body( } else: cases = { - iobs: self._code_printer._get_sym_lines_array( + iobs: self._code_printer_cpp._get_sym_lines_array( equations[:, iobs], function, 0 ) for iobs in range(equations.shape[1]) @@ -822,7 +899,7 @@ def _get_function_body( tmp_equations = sp.Matrix( [equations[i] for i in static_idxs] ) - tmp_lines = self._code_printer._get_sym_lines_symbols( + tmp_lines = self._code_printer_cpp._get_sym_lines_symbols( tmp_symbols, tmp_equations, function, @@ -848,7 +925,7 @@ def _get_function_body( [equations[i] for i in dynamic_idxs] ) - tmp_lines = self._code_printer._get_sym_lines_symbols( + tmp_lines = self._code_printer_cpp._get_sym_lines_symbols( tmp_symbols, tmp_equations, function, @@ -860,12 +937,12 @@ def _get_function_body( lines.extend(tmp_lines) else: - lines += self._code_printer._get_sym_lines_symbols( + lines += self._code_printer_cpp._get_sym_lines_symbols( symbols, equations, function, 4 ) else: - lines += self._code_printer._get_sym_lines_array( + lines += self._code_printer_cpp._get_sym_lines_array( equations, function, 4 ) @@ -1021,10 +1098,10 @@ def _write_model_header_cpp(self) -> None: "NK": self.model.num_const(), "O2MODE": "amici::SecondOrderMode::none", # using code printer ensures proper handling of nan/inf - "PARAMETERS": self._code_printer.doprint(self.model.val("p"))[ + "PARAMETERS": self._code_printer_cpp.doprint(self.model.val("p"))[ 1:-1 ], - "FIXED_PARAMETERS": self._code_printer.doprint( + "FIXED_PARAMETERS": self._code_printer_cpp.doprint( self.model.val("k") )[1:-1], "PARAMETER_NAMES_INITIALIZER_LIST": self._get_symbol_name_initializer_list( @@ -1218,7 +1295,7 @@ def _get_symbol_id_initializer_list(self, name: str) -> str: Template initializer list of ids """ return "\n".join( - f'"{self._code_printer.doprint(symbol)}", // {name}[{idx}]' + f'"{self._code_printer_cpp.doprint(symbol)}", // {name}[{idx}]' for idx, symbol in enumerate(self.model.sym(name)) ) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py deleted file mode 120000 index ab27c5e6d8..0000000000 --- a/python/sdist/amici/jax.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/jax.py \ No newline at end of file diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py new file mode 100644 index 0000000000..63e633e331 --- /dev/null +++ b/python/sdist/amici/jax.py @@ -0,0 +1,226 @@ +from abc import abstractmethod +from dataclasses import dataclass +from concurrent.futures import ThreadPoolExecutor + +import diffrax +import jax.numpy as jnp +import numpy as np +import jax +from functools import partial +from collections.abc import Iterable + +import amici + +from jax.config import config + +config.update("jax_enable_x64", True) + + +class JAXModel: + _unscale_funs = { + amici.ParameterScaling.none: lambda x: x, + amici.ParameterScaling.ln: lambda x: jnp.exp(x), + amici.ParameterScaling.log10: lambda x: jnp.power(10, x), + } + + @abstractmethod + def xdot(self, t, x, args): + ... + + @abstractmethod + def _w(self, t, x, p, k, tcl): + ... + + @abstractmethod + def x0(self, p, k): + ... + + @abstractmethod + def x_solver(self, x): + ... + + @abstractmethod + def x_rdata(self, x, tcl): + ... + + @abstractmethod + def tcl(self, x, p, k): + ... + + @abstractmethod + def y(self, t, x, p, k, tcl): + ... + + @abstractmethod + def sigmay(self, y, p, k): + ... + + @abstractmethod + def Jy(self, y, my, sigmay): + ... + + def unscale_p(self, p, pscale): + return jnp.stack( + [ + self._unscale_funs[pscale_i](p_i) + for p_i, pscale_i in zip(p, pscale) + ] + ) + + def get_solver(self): + return JAXSolver(model=self) + + +class JAXSolver: + def __init__(self, model: JAXModel): + self.model: JAXModel = model + self.solver: diffrax.AbstractSolver = diffrax.Kvaerno5() + self.atol: float = 1e-8 + self.rtol: float = 1e-8 + self.maxsteps: int = int(1e6) + self.sensi_mode: amici.SensitivityMethod = ( + amici.SensitivityMethod.adjoint + ) + self.sensi_order: amici.SensitivityOrder = amici.SensitivityOrder.none + + def _solve(self, ts, p, k): + x0 = self.model.x0(p, k) + tcl = self.model.tcl(x0, p, k) + sol = diffrax.diffeqsolve( + diffrax.ODETerm(self.model.xdot), + self.solver, + args=(p, k, tcl), + t0=0.0, + t1=ts[-1], + dt0=None, + y0=self.model.x_solver(x0), + stepsize_controller=diffrax.PIDController( + rtol=self.rtol, atol=self.atol + ), + max_steps=self.maxsteps, + saveat=diffrax.SaveAt(ts=ts), + ) + return sol.ys, tcl + + def _obs(self, ts, x, p, k, tcl): + return jax.vmap(self.model.y, in_axes=(0, 0, None, None, None))( + np.asarray(ts), x, p, k, tcl + ) + + def _sigmay(self, obs, p, k): + return jax.vmap(self.model.sigmay, in_axes=(0, None, None))(obs, p, k) + + def _x_rdata(self, x, tcl): + return jax.vmap(self.model.x_rdata, in_axes=(0, None))(x, tcl) + + def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): + loss_fun = jax.vmap(self.model.Jy, in_axes=(0, 0, 0)) + return -jnp.sum(loss_fun(obs, my, sigmay)) + + def _run( + self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + ): + ps = self.model.unscale_p(p, pscale) + x, tcl = self._solve(ts, ps, k) + obs = self._obs(ts, x, ps, k, tcl) + my_r = np.asarray(my).reshape((len(ts), -1)) + sigmay = self._sigmay(obs, ps, k) + llh = self._loss(obs, sigmay, my_r) + x_rdata = self._x_rdata(x, tcl) + return llh, (x_rdata, obs) + + @partial(jax.jit, static_argnames=("self", "ts", "k", "my", "pscale")) + def run( + self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + ): + return self._run(ts, p, k, my, pscale) + + @partial(jax.jit, static_argnames=("self", "ts", "k", "my", "pscale")) + def srun( + self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + ): + (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( + ts, p, k, my, pscale + ) + return llh, sllh, (x, obs) + + @partial(jax.jit, static_argnames=("self", "ts", "k", "my", "pscale")) + def s2run( + self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + ): + (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( + ts, p, k, my, pscale + ) + s2llh, (x, obs) = jax.jacfwd(jax.grad(self._run, 1, True), 1, True)( + ts, p, k, my, pscale + ) + return llh, sllh, s2llh, (x, obs) + + +def run_simulations( + model: JAXModel, + solver: JAXSolver, + edatas: Iterable[amici.ExpData], + num_threads: int = 1, +): + def run(edata): + return run_simulation(model, solver, edata) + + with ThreadPoolExecutor(max_workers=num_threads) as pool: + results = pool.map(run, edatas) + return list(results) + + +def run_simulation(model: JAXModel, solver: JAXSolver, edata: amici.ExpData): + ts = tuple(edata.getTimepoints()) + p = jnp.asarray(edata.parameters) + k = tuple(edata.fixedParameters) + my = tuple(edata.getObservedData()) + pscale = tuple(edata.pscale) + + rdata_kwargs = dict() + + if solver.sensi_order == amici.SensitivityOrder.none: + ( + rdata_kwargs["llh"], + (rdata_kwargs["x"], rdata_kwargs["y"]), + ) = solver.run(ts, p, k, my, pscale) + elif solver.sensi_order == amici.SensitivityOrder.first: + ( + rdata_kwargs["llh"], + rdata_kwargs["sllh"], + (rdata_kwargs["x"], rdata_kwargs["y"]), + ) = solver.srun(ts, p, k, my, pscale) + elif solver.sensi_order == amici.SensitivityOrder.second: + ( + rdata_kwargs["llh"], + rdata_kwargs["sllh"], + rdata_kwargs["s2llh"], + (rdata_kwargs["x"], rdata_kwargs["y"]), + ) = solver.s2run(ts, p, k, my, pscale) + + for field in rdata_kwargs.keys(): + if field == "llh": + rdata_kwargs[field] = np.float(rdata_kwargs[field]) + elif field not in ["sllh", "s2llh"]: + rdata_kwargs[field] = np.asarray(rdata_kwargs[field]).T + if rdata_kwargs[field].ndim == 1: + rdata_kwargs[field] = np.expand_dims(rdata_kwargs[field], 1) + + return ReturnDataJAX(**rdata_kwargs) + + +@dataclass +class ReturnDataJAX(dict): + x: np.array = None + sx: np.array = None + y: np.array = None + sy: np.array = None + sigmay: np.array = None + ssigmay: np.array = None + llh: np.array = None + sllh: np.array = None + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.__dict__ = self diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py deleted file mode 120000 index 26e8aef02f..0000000000 --- a/python/sdist/amici/jax.template.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/jax.template.py \ No newline at end of file diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py new file mode 100644 index 0000000000..3cb3bbb4d5 --- /dev/null +++ b/python/sdist/amici/jax.template.py @@ -0,0 +1,98 @@ +import jax.numpy as jnp + +from amici.jax import JAXModel + + +class JAXModel_TPL_MODEL_NAME(JAXModel): + def __init__(self): + super().__init__() + + def xdot(self, t, x, args): + + p, k, tcl = args + + TPL_X_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + TPL_TCL_SYMS = tcl + TPL_W_SYMS = self._w(t, x, p, k, tcl) + +TPL_XDOT_EQ + + return TPL_XDOT_RET + + def _w(self, t, x, p, k, tcl): + + TPL_X_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + TPL_TCL_SYMS = tcl + +TPL_W_EQ + + return TPL_W_RET + + def x0(self, p, k): + + TPL_P_SYMS = p + TPL_K_SYMS = k + +TPL_X0_EQ + + return TPL_X0_RET + + def x_solver(self, x): + + TPL_X_RDATA_SYMS = x + +TPL_X_SOLVER_EQ + + return TPL_X_SOLVER_RET + + def x_rdata(self, x, tcl): + + TPL_X_SYMS = x + TPL_TCL_SYMS = tcl + +TPL_X_RDATA_EQ + + return TPL_X_RDATA_RET + + def tcl(self, x, p, k): + + TPL_X_RDATA_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + +TPL_TOTAL_CL_EQ + + return TPL_TOTAL_CL_RET + + def y(self, t, x, p, k, tcl): + + TPL_X_SYMS = x + TPL_P_SYMS = p + TPL_K_SYMS = k + TPL_W_SYMS = self._w(t, x, p, k, tcl) + +TPL_Y_EQ + + return TPL_Y_RET + + def sigmay(self, y, p, k): + TPL_Y_SYMS = y + TPL_P_SYMS = p + TPL_K_SYMS = k + +TPL_SIGMAY_EQ + + return TPL_SIGMAY_RET + + def Jy(self, y, my, sigmay): + TPL_Y_SYMS = y + TPL_MY_SYMS = my + TPL_SIGMAY_SYMS = sigmay + +TPL_JY_EQ + + return TPL_JY_RET diff --git a/python/sdist/amici/jaxcodeprinter.py b/python/sdist/amici/jaxcodeprinter.py deleted file mode 120000 index d4f2655649..0000000000 --- a/python/sdist/amici/jaxcodeprinter.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/jaxcodeprinter.py \ No newline at end of file diff --git a/python/sdist/amici/jaxcodeprinter.py b/python/sdist/amici/jaxcodeprinter.py new file mode 100644 index 0000000000..b768d44fc9 --- /dev/null +++ b/python/sdist/amici/jaxcodeprinter.py @@ -0,0 +1,50 @@ +"""Jax code generation""" +import re +from typing import Optional, Union +from collections.abc import Iterable + +import sympy as sp +from sympy.printing.numpy import NumPyPrinter + + +class AmiciJaxCodePrinter(NumPyPrinter): + """JAX code printer""" + + def doprint(self, expr: sp.Expr, assign_to: Optional[str] = None) -> str: + try: + code = super().doprint(expr, assign_to) + code = re.sub(r"numpy\.", r"jnp.", code) + + return code + except TypeError as e: + raise ValueError( + f'Encountered unsupported function in expression "{expr}"' + ) from e + + def _get_sym_lines( + self, + symbols: Union[Iterable[str], sp.Matrix], + equations: sp.Matrix, + indent_level: int, + ) -> list[str]: + """ + Generate C++ code for assigning symbolic terms in symbols to C++ array + `variable`. + + :param equations: + vectors of symbolic expressions + + :param symbols: + names of the symbols to assign to + + :param indent_level: + indentation level (number of leading blanks) + + :return: + C++ code as list of lines + """ + indent = " " * indent_level + return [ + f"{indent}{s} = {self.doprint(e)}" + for s, e in zip(symbols, equations) + ] diff --git a/python/sdist/amici/pysb_import.py b/python/sdist/amici/pysb_import.py index 94aad595d9..fd377cf328 100644 --- a/python/sdist/amici/pysb_import.py +++ b/python/sdist/amici/pysb_import.py @@ -181,7 +181,7 @@ def pysb2amici( # Sympy code optimizations are incompatible with PySB objects, as # `pysb.Observable` comes with its own `.match` which overrides # `sympy.Basic.match()`, breaking `sympy.codegen.rewriting.optimize`. - exporter._code_printer._fpoptimizer = None + exporter._code_printer_cpp._fpoptimizer = None exporter.generate_model_code() if compile: diff --git a/python/sdist/pyproject.toml b/python/sdist/pyproject.toml index 91b8484af6..a3273e9f86 100644 --- a/python/sdist/pyproject.toml +++ b/python/sdist/pyproject.toml @@ -20,3 +20,5 @@ line-length = 79 line-length = 79 ignore = ["E402", "F403", "F405", "E741"] extend-include = ["*.ipynb"] +exclude = ['jax.template.py'] +extend-select = ["UP"] From 5974d47b51e76c752737556c1610c6e25da77ed9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 19:54:33 +0100 Subject: [PATCH 14/80] fix install --- scripts/installAmiciSource.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/installAmiciSource.sh b/scripts/installAmiciSource.sh index 2dbc789b6e..d4fb696502 100755 --- a/scripts/installAmiciSource.sh +++ b/scripts/installAmiciSource.sh @@ -33,8 +33,8 @@ fi export PYTHON_EXECUTABLE="${AMICI_PATH}/venv/bin/python" python -m pip install --upgrade pip wheel -python -m pip install --upgrade pip setuptools cmake_build_extension numpy jax[cpu] +python -m pip install --upgrade pip setuptools cmake_build_extension numpy python -m pip install git+https://github.com/FFroehlich/pysb@fix_pattern_matching # pin to PR for SPM with compartments AMICI_BUILD_TEMP="${AMICI_PATH}/python/sdist/build/temp" \ - python -m pip install --verbose -e "${AMICI_PATH}/python/sdist[petab,test,vis]" --no-build-isolation + python -m pip install --verbose -e "${AMICI_PATH}/python/sdist[petab,test,vis,jax]" --no-build-isolation deactivate From 37cdc816da8bb294eb4c454d1e15db5d5e0d8678 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 20:06:59 +0100 Subject: [PATCH 15/80] actually generate code --- python/sdist/amici/de_export.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index 5520bc5a00..d4ed62c1f8 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -246,6 +246,7 @@ def generate_model_code(self) -> None: sp.Pow, "_eval_derivative", _custom_pow_eval_derivative ): self._prepare_model_folder() + self._generate_jax_code() self._generate_c_code() self._generate_m_code() From 9e6a0ff6154a6b9c76e72bdf68303057a550d585 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 20:08:10 +0100 Subject: [PATCH 16/80] fix --- python/sdist/amici/de_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index d4ed62c1f8..218b01ad0c 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -302,7 +302,7 @@ def jnp_stack_str(array) -> str: tpl_data = { **{ f"{eq_name.upper()}_EQ": "\n".join( - self.model._code_printer_jax._get_sym_lines( + self._code_printer_jax._get_sym_lines( (str(strip_pysb(s)) for s in self.model.sym(eq_name)), self.model.eq(eq_name), indent, From 22b2b3883a725458297c78d6f11bfb56113fa310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 20:13:16 +0100 Subject: [PATCH 17/80] fix --- python/sdist/amici/__init__.template.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py index 78b380e433..70182083dc 100644 --- a/python/sdist/amici/__init__.template.py +++ b/python/sdist/amici/__init__.template.py @@ -24,7 +24,7 @@ from .TPL_MODELNAME import getModel as get_model # noqa: F401 try: - from TPL_MODELNAME.jax import JAXModel_TPL_MODELNAME + from .TPL_MODELNAME.jax import JAXModel_TPL_MODELNAME def get_jax_model() -> JAXModel: return JAXModel_TPL_MODELNAME() From 48a2e49cc79f39b1b59686c6a818093a5b663403 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 20:44:34 +0100 Subject: [PATCH 18/80] add better default coefficients, fix jax --- python/sdist/amici/__init__.template.py | 2 +- python/sdist/amici/jax.py | 22 +++++++++++++++------- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py index 70182083dc..f59108b2d5 100644 --- a/python/sdist/amici/__init__.template.py +++ b/python/sdist/amici/__init__.template.py @@ -24,7 +24,7 @@ from .TPL_MODELNAME import getModel as get_model # noqa: F401 try: - from .TPL_MODELNAME.jax import JAXModel_TPL_MODELNAME + from .jax import JAXModel_TPL_MODELNAME def get_jax_model() -> JAXModel: return JAXModel_TPL_MODELNAME() diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 63e633e331..8b6bb39311 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -11,9 +11,7 @@ import amici -from jax.config import config - -config.update("jax_enable_x64", True) +jax.config.update("jax_enable_x64", True) class JAXModel: @@ -77,6 +75,9 @@ def __init__(self, model: JAXModel): self.solver: diffrax.AbstractSolver = diffrax.Kvaerno5() self.atol: float = 1e-8 self.rtol: float = 1e-8 + self.pcoeff: float = 0.4 + self.icoeff: float = 0.3 + self.dcoeff: float = 0.0 self.maxsteps: int = int(1e6) self.sensi_mode: amici.SensitivityMethod = ( amici.SensitivityMethod.adjoint @@ -95,7 +96,11 @@ def _solve(self, ts, p, k): dt0=None, y0=self.model.x_solver(x0), stepsize_controller=diffrax.PIDController( - rtol=self.rtol, atol=self.atol + rtol=self.rtol, + atol=self.atol, + pcoeff=self.pcoeff, + icoeff=self.icoeff, + dcoeff=self.dcoeff, ), max_steps=self.maxsteps, saveat=diffrax.SaveAt(ts=ts), @@ -166,8 +171,11 @@ def run_simulations( def run(edata): return run_simulation(model, solver, edata) - with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map(run, edatas) + if num_threads > 1: + with ThreadPoolExecutor(max_workers=num_threads) as pool: + results = pool.map(run, edatas) + else: + results = map(run, edatas) return list(results) @@ -201,7 +209,7 @@ def run_simulation(model: JAXModel, solver: JAXSolver, edata: amici.ExpData): for field in rdata_kwargs.keys(): if field == "llh": - rdata_kwargs[field] = np.float(rdata_kwargs[field]) + rdata_kwargs[field] = np.float64(rdata_kwargs[field]) elif field not in ["sllh", "s2llh"]: rdata_kwargs[field] = np.asarray(rdata_kwargs[field]).T if rdata_kwargs[field].ndim == 1: From 481216d4f14146578df3485cc5c0cd573ffd1e2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 21:10:16 +0100 Subject: [PATCH 19/80] ignore fujita in jax --- tests/benchmark-models/test_petab_model.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index 308900855e..9f75cd17a7 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -148,6 +148,7 @@ def main(): "Bachmann_MSB2011", "Beer_MolBioSystems2014", "Brannmark_JBC2010", + "Fujita_SciSignal2010", "Isensee_JCB2018", "Weber_BMC2015", "Zheng_PNAS2012", @@ -155,6 +156,7 @@ def main(): # Bachmann: integration failure even with 1e6 steps # Beer: Heaviside # Brannmark_JBC2010: preeq + # Fujita: Heaviside # Isensee_JCB2018: preeq # Weber_BMC2015: preeq # Zheng_PNAS2012: preeq From 85b8173df189744591dbc823d1d7dc3de52ab947 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Wed, 10 Apr 2024 21:59:22 +0100 Subject: [PATCH 20/80] ignore smith --- tests/benchmark-models/test_petab_model.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index 9f75cd17a7..5c07cb3dcd 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -150,6 +150,7 @@ def main(): "Brannmark_JBC2010", "Fujita_SciSignal2010", "Isensee_JCB2018", + "Smith_BMCSystBiol2013", "Weber_BMC2015", "Zheng_PNAS2012", ): @@ -158,6 +159,7 @@ def main(): # Brannmark_JBC2010: preeq # Fujita: Heaviside # Isensee_JCB2018: preeq + # Smith_BMCSystBiol2013: Heaviside # Weber_BMC2015: preeq # Zheng_PNAS2012: preeq From b213adb92ec2f4d5d844732df12d4901cb9f5038 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Thu, 11 Apr 2024 22:16:42 +0100 Subject: [PATCH 21/80] optimize & fix bachmann --- python/sdist/amici/jax.py | 288 ++++++++++++--------- python/sdist/amici/jax.template.py | 31 ++- python/sdist/setup.cfg | 2 + tests/benchmark-models/test_petab_model.py | 7 +- 4 files changed, 183 insertions(+), 145 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 8b6bb39311..2a30d028ad 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -3,10 +3,10 @@ from concurrent.futures import ThreadPoolExecutor import diffrax +import equinox as eqx import jax.numpy as jnp import numpy as np import jax -from functools import partial from collections.abc import Iterable import amici @@ -14,208 +14,237 @@ jax.config.update("jax_enable_x64", True) -class JAXModel: +class JAXModel(eqx.Module): _unscale_funs = { amici.ParameterScaling.none: lambda x: x, amici.ParameterScaling.ln: lambda x: jnp.exp(x), amici.ParameterScaling.log10: lambda x: jnp.power(10, x), } + solver: diffrax.AbstractSolver + controller: diffrax.AbstractStepSizeController + atol: float + rtol: float + pcoeff: float + icoeff: float + dcoeff: float + maxsteps: int + term: diffrax.ODETerm + sensi_order: amici.SensitivityOrder + + def __init__(self): + self.solver = diffrax.Kvaerno5() + self.atol: float = 1e-8 + self.rtol: float = 1e-8 + self.pcoeff: float = 0.4 + self.icoeff: float = 0.3 + self.dcoeff: float = 0.0 + self.maxsteps: int = 2**10 + self.controller = diffrax.PIDController( + rtol=self.rtol, + atol=self.atol, + pcoeff=self.pcoeff, + icoeff=self.icoeff, + dcoeff=self.dcoeff, + ) + self.term = diffrax.ODETerm(self.xdot) + self.sensi_order = amici.SensitivityOrder.none + @staticmethod @abstractmethod - def xdot(self, t, x, args): + def xdot(t, x, args): ... + @staticmethod @abstractmethod - def _w(self, t, x, p, k, tcl): + def _w(t, x, p, k, tcl): ... + @staticmethod @abstractmethod - def x0(self, p, k): + def x0(p, k): ... + @staticmethod @abstractmethod - def x_solver(self, x): + def x_solver(x): ... + @staticmethod @abstractmethod - def x_rdata(self, x, tcl): + def x_rdata(x, tcl): ... + @staticmethod @abstractmethod - def tcl(self, x, p, k): + def tcl(x, p, k): ... + @staticmethod @abstractmethod - def y(self, t, x, p, k, tcl): + def y(t, x, p, k, tcl): ... + @staticmethod @abstractmethod - def sigmay(self, y, p, k): + def sigmay(y, p, k): ... + @staticmethod @abstractmethod - def Jy(self, y, my, sigmay): + def Jy(y, my, sigmay): ... def unscale_p(self, p, pscale): - return jnp.stack( - [ - self._unscale_funs[pscale_i](p_i) - for p_i, pscale_i in zip(p, pscale) - ] - ) - - def get_solver(self): - return JAXSolver(model=self) - - -class JAXSolver: - def __init__(self, model: JAXModel): - self.model: JAXModel = model - self.solver: diffrax.AbstractSolver = diffrax.Kvaerno5() - self.atol: float = 1e-8 - self.rtol: float = 1e-8 - self.pcoeff: float = 0.4 - self.icoeff: float = 0.3 - self.dcoeff: float = 0.0 - self.maxsteps: int = int(1e6) - self.sensi_mode: amici.SensitivityMethod = ( - amici.SensitivityMethod.adjoint - ) - self.sensi_order: amici.SensitivityOrder = amici.SensitivityOrder.none + return jax.vmap( + lambda p_i, pscale_i: jnp.stack( + (p_i, jnp.exp(p_i), jnp.power(10, p_i)) + ) + .at[pscale_i] + .get() + )(p, pscale) def _solve(self, ts, p, k): - x0 = self.model.x0(p, k) - tcl = self.model.tcl(x0, p, k) + x0 = self.x0(p, k) + tcl = self.tcl(x0, p, k) sol = diffrax.diffeqsolve( - diffrax.ODETerm(self.model.xdot), + self.term, self.solver, args=(p, k, tcl), t0=0.0, t1=ts[-1], dt0=None, - y0=self.model.x_solver(x0), - stepsize_controller=diffrax.PIDController( - rtol=self.rtol, - atol=self.atol, - pcoeff=self.pcoeff, - icoeff=self.icoeff, - dcoeff=self.dcoeff, - ), + y0=self.x_solver(x0), + stepsize_controller=self.controller, max_steps=self.maxsteps, saveat=diffrax.SaveAt(ts=ts), ) - return sol.ys, tcl + return sol.ys, tcl, sol.stats def _obs(self, ts, x, p, k, tcl): - return jax.vmap(self.model.y, in_axes=(0, 0, None, None, None))( - np.asarray(ts), x, p, k, tcl + return jax.vmap(self.y, in_axes=(0, 0, None, None, None))( + ts, x, p, k, tcl ) def _sigmay(self, obs, p, k): - return jax.vmap(self.model.sigmay, in_axes=(0, None, None))(obs, p, k) + return jax.vmap(self.sigmay, in_axes=(0, None, None))(obs, p, k) def _x_rdata(self, x, tcl): - return jax.vmap(self.model.x_rdata, in_axes=(0, None))(x, tcl) + return jax.vmap(self.x_rdata, in_axes=(0, None))(x, tcl) def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): - loss_fun = jax.vmap(self.model.Jy, in_axes=(0, 0, 0)) + loss_fun = jax.vmap(self.Jy, in_axes=(0, 0, 0)) return -jnp.sum(loss_fun(obs, my, sigmay)) def _run( - self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + self, + ts: np.ndarray, + p: np.ndarray, + k: jnp.ndarray, + my: jnp.ndarray, + pscale: np.ndarray, ): - ps = self.model.unscale_p(p, pscale) - x, tcl = self._solve(ts, ps, k) + ps = self.unscale_p(p, pscale) + x, tcl, stats = self._solve(ts, ps, k) obs = self._obs(ts, x, ps, k, tcl) - my_r = np.asarray(my).reshape((len(ts), -1)) + my_r = my.reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) llh = self._loss(obs, sigmay, my_r) x_rdata = self._x_rdata(x, tcl) - return llh, (x_rdata, obs) + return llh, (x_rdata, obs, stats) - @partial(jax.jit, static_argnames=("self", "ts", "k", "my", "pscale")) + @eqx.filter_jit def run( - self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + self, + ts: np.ndarray, + p: jnp.ndarray, + k: np.ndarray, + my: np.ndarray, + pscale: np.ndarray, ): return self._run(ts, p, k, my, pscale) - @partial(jax.jit, static_argnames=("self", "ts", "k", "my", "pscale")) + @eqx.filter_jit def srun( - self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + self, + ts: np.ndarray, + p: jnp.ndarray, + k: np.ndarray, + my: np.ndarray, + pscale: np.ndarray, ): - (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( - ts, p, k, my, pscale - ) - return llh, sllh, (x, obs) + (llh, (x, obs, stats)), sllh = ( + jax.value_and_grad(self._run, 1, True) + )(ts, p, k, my, pscale) + return llh, sllh, (x, obs, stats) - @partial(jax.jit, static_argnames=("self", "ts", "k", "my", "pscale")) + @eqx.filter_jit def s2run( - self, ts: tuple, p: jnp.ndarray, k: tuple, my: tuple, pscale: tuple + self, + ts: np.ndarray, + p: jnp.ndarray, + k: np.ndarray, + my: np.ndarray, + pscale: np.ndarray, ): - (llh, (x, obs)), sllh = (jax.value_and_grad(self._run, 1, True))( + (llh, (_, _, _)), sllh = (jax.value_and_grad(self._run, 1, True))( ts, p, k, my, pscale ) - s2llh, (x, obs) = jax.jacfwd(jax.grad(self._run, 1, True), 1, True)( - ts, p, k, my, pscale - ) - return llh, sllh, s2llh, (x, obs) - - -def run_simulations( - model: JAXModel, - solver: JAXSolver, - edatas: Iterable[amici.ExpData], - num_threads: int = 1, -): - def run(edata): - return run_simulation(model, solver, edata) - - if num_threads > 1: - with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map(run, edatas) - else: - results = map(run, edatas) - return list(results) - - -def run_simulation(model: JAXModel, solver: JAXSolver, edata: amici.ExpData): - ts = tuple(edata.getTimepoints()) - p = jnp.asarray(edata.parameters) - k = tuple(edata.fixedParameters) - my = tuple(edata.getObservedData()) - pscale = tuple(edata.pscale) - - rdata_kwargs = dict() - - if solver.sensi_order == amici.SensitivityOrder.none: - ( - rdata_kwargs["llh"], - (rdata_kwargs["x"], rdata_kwargs["y"]), - ) = solver.run(ts, p, k, my, pscale) - elif solver.sensi_order == amici.SensitivityOrder.first: - ( - rdata_kwargs["llh"], - rdata_kwargs["sllh"], - (rdata_kwargs["x"], rdata_kwargs["y"]), - ) = solver.srun(ts, p, k, my, pscale) - elif solver.sensi_order == amici.SensitivityOrder.second: - ( - rdata_kwargs["llh"], - rdata_kwargs["sllh"], - rdata_kwargs["s2llh"], - (rdata_kwargs["x"], rdata_kwargs["y"]), - ) = solver.s2run(ts, p, k, my, pscale) - - for field in rdata_kwargs.keys(): - if field == "llh": - rdata_kwargs[field] = np.float64(rdata_kwargs[field]) - elif field not in ["sllh", "s2llh"]: - rdata_kwargs[field] = np.asarray(rdata_kwargs[field]).T - if rdata_kwargs[field].ndim == 1: - rdata_kwargs[field] = np.expand_dims(rdata_kwargs[field], 1) - - return ReturnDataJAX(**rdata_kwargs) + s2llh, (x, obs, stats) = jax.jacfwd( + jax.grad(self._run, 1, True), 1, True + )(ts, p, k, my, pscale) + return llh, sllh, s2llh, (x, obs, stats) + + def run_simulation(self, edata: amici.ExpData): + ts = np.asarray(edata.getTimepoints()) + p = jnp.asarray(edata.parameters) + k = np.asarray(edata.fixedParameters) + my = np.asarray(edata.getObservedData()) + pscale = np.asarray(edata.pscale) + + rdata_kwargs = dict() + + if self.sensi_order == amici.SensitivityOrder.none: + ( + rdata_kwargs["llh"], + (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), + ) = self.run(ts, p, k, my, pscale) + elif self.sensi_order == amici.SensitivityOrder.first: + ( + rdata_kwargs["llh"], + rdata_kwargs["sllh"], + (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), + ) = self.srun(ts, p, k, my, pscale) + elif self.sensi_order == amici.SensitivityOrder.second: + ( + rdata_kwargs["llh"], + rdata_kwargs["sllh"], + rdata_kwargs["s2llh"], + (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), + ) = self.s2run(ts, p, k, my, pscale) + + for field in rdata_kwargs.keys(): + if field == "llh": + rdata_kwargs[field] = np.float64(rdata_kwargs[field]) + elif field not in ["sllh", "s2llh"]: + rdata_kwargs[field] = np.asarray(rdata_kwargs[field]).T + if rdata_kwargs[field].ndim == 1: + rdata_kwargs[field] = np.expand_dims( + rdata_kwargs[field], 1 + ) + + return ReturnDataJAX(**rdata_kwargs) + + def run_simulations( + self, + edatas: Iterable[amici.ExpData], + num_threads: int = 1, + ): + if num_threads > 1: + with ThreadPoolExecutor(max_workers=num_threads) as pool: + results = pool.map(self.run_simulation, edatas) + else: + results = map(self.run_simulation, edatas) + return list(results) @dataclass @@ -228,6 +257,7 @@ class ReturnDataJAX(dict): ssigmay: np.array = None llh: np.array = None sllh: np.array = None + stats: dict = None def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index 3cb3bbb4d5..378b16944f 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -7,7 +7,8 @@ class JAXModel_TPL_MODEL_NAME(JAXModel): def __init__(self): super().__init__() - def xdot(self, t, x, args): + @staticmethod + def xdot(t, x, args): p, k, tcl = args @@ -15,13 +16,14 @@ def xdot(self, t, x, args): TPL_P_SYMS = p TPL_K_SYMS = k TPL_TCL_SYMS = tcl - TPL_W_SYMS = self._w(t, x, p, k, tcl) + TPL_W_SYMS = JAXModel_TPL_MODEL_NAME._w(t, x, p, k, tcl) TPL_XDOT_EQ return TPL_XDOT_RET - def _w(self, t, x, p, k, tcl): + @staticmethod + def _w(t, x, p, k, tcl): TPL_X_SYMS = x TPL_P_SYMS = p @@ -32,7 +34,8 @@ def _w(self, t, x, p, k, tcl): return TPL_W_RET - def x0(self, p, k): + @staticmethod + def x0(p, k): TPL_P_SYMS = p TPL_K_SYMS = k @@ -41,7 +44,8 @@ def x0(self, p, k): return TPL_X0_RET - def x_solver(self, x): + @staticmethod + def x_solver(x): TPL_X_RDATA_SYMS = x @@ -49,7 +53,8 @@ def x_solver(self, x): return TPL_X_SOLVER_RET - def x_rdata(self, x, tcl): + @staticmethod + def x_rdata(x, tcl): TPL_X_SYMS = x TPL_TCL_SYMS = tcl @@ -58,7 +63,8 @@ def x_rdata(self, x, tcl): return TPL_X_RDATA_RET - def tcl(self, x, p, k): + @staticmethod + def tcl(x, p, k): TPL_X_RDATA_SYMS = x TPL_P_SYMS = p @@ -68,18 +74,20 @@ def tcl(self, x, p, k): return TPL_TOTAL_CL_RET - def y(self, t, x, p, k, tcl): + @staticmethod + def y(t, x, p, k, tcl): TPL_X_SYMS = x TPL_P_SYMS = p TPL_K_SYMS = k - TPL_W_SYMS = self._w(t, x, p, k, tcl) + TPL_W_SYMS = JAXModel_TPL_MODEL_NAME._w(t, x, p, k, tcl) TPL_Y_EQ return TPL_Y_RET - def sigmay(self, y, p, k): + @staticmethod + def sigmay(y, p, k): TPL_Y_SYMS = y TPL_P_SYMS = p TPL_K_SYMS = k @@ -88,7 +96,8 @@ def sigmay(self, y, p, k): return TPL_SIGMAY_RET - def Jy(self, y, my, sigmay): + @staticmethod + def Jy(y, my, sigmay): TPL_Y_SYMS = y TPL_MY_SYMS = my TPL_SIGMAY_SYMS = sigmay diff --git a/python/sdist/setup.cfg b/python/sdist/setup.cfg index 6ce19bd290..f6b34bc0c5 100644 --- a/python/sdist/setup.cfg +++ b/python/sdist/setup.cfg @@ -52,6 +52,8 @@ pysb = pysb>=1.13.1 jax = jax diffrax + equinox + optimistix test = benchmark_models_petab @ git+https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab.git@master#subdirectory=src/python h5py diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index 5c07cb3dcd..39ea00907c 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -145,7 +145,6 @@ def main(): llh = res[LLH] if args.model_name not in ( - "Bachmann_MSB2011", "Beer_MolBioSystems2014", "Brannmark_JBC2010", "Fujita_SciSignal2010", @@ -154,7 +153,6 @@ def main(): "Weber_BMC2015", "Zheng_PNAS2012", ): - # Bachmann: integration failure even with 1e6 steps # Beer: Heaviside # Brannmark_JBC2010: preeq # Fujita: Heaviside @@ -164,7 +162,6 @@ def main(): # Zheng_PNAS2012: preeq jax_model = model_module.get_jax_model() - jax_solver = jax_model.get_solver() simulation_conditions = ( problem.get_simulation_conditions_from_measurement_df() ) @@ -191,9 +188,9 @@ def main(): amici_model=amici_model, ) # run once to JIT - amici.jax.run_simulations(jax_model, jax_solver, edatas) + jax_model.run_simulations(edatas) start_jax = timer() - rdatas_jax = amici.jax.run_simulations(jax_model, jax_solver, edatas) + rdatas_jax = jax_model.run_simulations(edatas) end_jax = timer() t_jax = end_jax - start_jax From a1f37b7f5b929e8bfe80158faf71f0382474d396 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Thu, 11 Apr 2024 22:44:27 +0100 Subject: [PATCH 22/80] fix import/wokflow --- .github/workflows/test_benchmark_collection_models.yml | 2 +- python/sdist/amici/__init__.template.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test_benchmark_collection_models.yml b/.github/workflows/test_benchmark_collection_models.yml index ab29938a12..9bcb86a9da 100644 --- a/.github/workflows/test_benchmark_collection_models.yml +++ b/.github/workflows/test_benchmark_collection_models.yml @@ -50,7 +50,7 @@ jobs: run: | pip3 install --user petab[vis] && \ AMICI_PARALLEL_COMPILE="" pip3 install -v --user \ - $(ls -t python/sdist/dist/amici-*.tar.gz | head -1)[petab,test,vis] + $(ls -t python/sdist/dist/amici-*.tar.gz | head -1)[petab,test,vis,jax] # retrieve test models - name: Download and test benchmark collection diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py index f59108b2d5..6b3e1c7260 100644 --- a/python/sdist/amici/__init__.template.py +++ b/python/sdist/amici/__init__.template.py @@ -31,7 +31,7 @@ def get_jax_model() -> JAXModel: except (ModuleNotFoundError, ImportError): def get_jax_model() -> JAXModel: - raise NotImplementedError() + raise NotImplementedError(str(err)) __version__ = "TPL_PACKAGE_VERSION" From e09bb2f975679a30a3479f09b9f4cdbdd0373f65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 12 Apr 2024 08:33:15 +0100 Subject: [PATCH 23/80] Update __init__.template.py --- python/sdist/amici/__init__.template.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py index 6b3e1c7260..f4b50f652a 100644 --- a/python/sdist/amici/__init__.template.py +++ b/python/sdist/amici/__init__.template.py @@ -28,10 +28,11 @@ def get_jax_model() -> JAXModel: return JAXModel_TPL_MODELNAME() -except (ModuleNotFoundError, ImportError): +except (ModuleNotFoundError, ImportError) as exc: + error = str(exc) def get_jax_model() -> JAXModel: - raise NotImplementedError(str(err)) + raise NotImplementedError(error) __version__ = "TPL_PACKAGE_VERSION" From d8d19000051788e38c5e943d4ccc5815ad55be67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 12 Apr 2024 08:48:10 +0100 Subject: [PATCH 24/80] fix jax imports --- documentation/rtd_requirements.txt | 2 ++ python/sdist/amici/__init__.template.py | 17 +++++------------ 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/documentation/rtd_requirements.txt b/documentation/rtd_requirements.txt index 8d2c2100f9..a6743940e0 100644 --- a/documentation/rtd_requirements.txt +++ b/documentation/rtd_requirements.txt @@ -3,6 +3,8 @@ sphinx mock>=5.0.2 setuptools>=67.7.2 pysb>=1.11.0 +jax>=0.4.26 +diffrax>=0.5.0 matplotlib==3.7.1 nbsphinx==0.9.1 nbformat==5.8.0 diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py index f4b50f652a..56064535e8 100644 --- a/python/sdist/amici/__init__.template.py +++ b/python/sdist/amici/__init__.template.py @@ -1,13 +1,11 @@ """AMICI-generated module for model TPL_MODELNAME""" from pathlib import Path - +from typing import TYPE_CHECKING import amici -try: +if TYPE_CHECKING: from amici.jax import JAXModel -except (ModuleNotFoundError, ImportError): - JAXModel = object # Ensure we are binary-compatible, see #556 if "TPL_AMICI_VERSION" != amici.__version__: @@ -23,16 +21,11 @@ from .TPL_MODELNAME import * # noqa: F403, F401 from .TPL_MODELNAME import getModel as get_model # noqa: F401 -try: - from .jax import JAXModel_TPL_MODELNAME - def get_jax_model() -> JAXModel: - return JAXModel_TPL_MODELNAME() -except (ModuleNotFoundError, ImportError) as exc: - error = str(exc) +def get_jax_model() -> "JAXModel": + from .jax import JAXModel_TPL_MODELNAME - def get_jax_model() -> JAXModel: - raise NotImplementedError(error) + return JAXModel_TPL_MODELNAME() __version__ = "TPL_PACKAGE_VERSION" From c24fe6b552a1e0c8b6d5b850b588d2af6fdc5d41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 12 Apr 2024 09:49:25 +0100 Subject: [PATCH 25/80] Update setup.cfg --- python/sdist/setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/python/sdist/setup.cfg b/python/sdist/setup.cfg index f6b34bc0c5..97e5681c3b 100644 --- a/python/sdist/setup.cfg +++ b/python/sdist/setup.cfg @@ -51,6 +51,7 @@ petab = petab>=0.2.9 pysb = pysb>=1.13.1 jax = jax + jaxlib diffrax equinox optimistix From 1ec591cfa9cb20ff8efcbb7c9a45c20e2ff2c9cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 12 Apr 2024 10:46:13 +0100 Subject: [PATCH 26/80] add preequilibration support --- python/sdist/amici/jax.py | 35 ++++++++++++++++++---- tests/benchmark-models/test_petab_model.py | 10 +++---- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 2a30d028ad..76bf254f0f 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -103,8 +103,24 @@ def unscale_p(self, p, pscale): .get() )(p, pscale) - def _solve(self, ts, p, k): + def _preeq(self, p, k): x0 = self.x0(p, k) + tcl = self.tcl(x0, p, k) + sol = diffrax.diffeqsolve( + self.term, + self.solver, + args=(p, k, tcl), + t0=0.0, + t1=jnp.inf, + dt0=None, + y0=self.x_solver(x0), + stepsize_controller=self.controller, + max_steps=self.maxsteps, + discrete_terminating_event=diffrax.SteadyStateEvent(), + ) + return sol.ys + + def _solve(self, ts, p, k, x0): tcl = self.tcl(x0, p, k) sol = diffrax.diffeqsolve( self.term, @@ -140,11 +156,16 @@ def _run( ts: np.ndarray, p: np.ndarray, k: jnp.ndarray, + k_preeq: jnp.ndarray, my: jnp.ndarray, pscale: np.ndarray, ): ps = self.unscale_p(p, pscale) - x, tcl, stats = self._solve(ts, ps, k) + if k_preeq.shape[0] > 0: + x0 = self._preeq(ps, k_preeq) + else: + x0 = self.x0(p, k) + x, tcl, stats = self._solve(ts, ps, k, x0) obs = self._obs(ts, x, ps, k, tcl) my_r = my.reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) @@ -158,10 +179,11 @@ def run( ts: np.ndarray, p: jnp.ndarray, k: np.ndarray, + k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, ): - return self._run(ts, p, k, my, pscale) + return self._run(ts, p, k, k_preeq, my, pscale) @eqx.filter_jit def srun( @@ -198,6 +220,7 @@ def run_simulation(self, edata: amici.ExpData): ts = np.asarray(edata.getTimepoints()) p = jnp.asarray(edata.parameters) k = np.asarray(edata.fixedParameters) + k_preeq = np.asarray(edata.fixedParametersPreequilibration) my = np.asarray(edata.getObservedData()) pscale = np.asarray(edata.pscale) @@ -207,20 +230,20 @@ def run_simulation(self, edata: amici.ExpData): ( rdata_kwargs["llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.run(ts, p, k, my, pscale) + ) = self.run(ts, p, k, k_preeq, my, pscale) elif self.sensi_order == amici.SensitivityOrder.first: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.srun(ts, p, k, my, pscale) + ) = self.srun(ts, p, k, k_preeq, my, pscale) elif self.sensi_order == amici.SensitivityOrder.second: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], rdata_kwargs["s2llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.s2run(ts, p, k, my, pscale) + ) = self.s2run(ts, p, k, k_preeq, my, pscale) for field in rdata_kwargs.keys(): if field == "llh": diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index 39ea00907c..2f05724e3c 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -151,15 +151,13 @@ def main(): "Isensee_JCB2018", "Smith_BMCSystBiol2013", "Weber_BMC2015", - "Zheng_PNAS2012", ): # Beer: Heaviside - # Brannmark_JBC2010: preeq + # Brannmark: Heaviside # Fujita: Heaviside - # Isensee_JCB2018: preeq - # Smith_BMCSystBiol2013: Heaviside - # Weber_BMC2015: preeq - # Zheng_PNAS2012: preeq + # Isensee: Heaviside + # Smith: Heaviside + # Weber: Heaviside jax_model = model_module.get_jax_model() simulation_conditions = ( From aebe07c612bebf73b63ff6c2e4ec1b20d20437f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 13 Apr 2024 15:22:31 +0100 Subject: [PATCH 27/80] fix jax tests --- python/sdist/amici/jax.py | 46 ++++++++----- python/tests/test_jax.py | 136 ++++++++++++++++++++------------------ 2 files changed, 102 insertions(+), 80 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 76bf254f0f..3bd2495ff1 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -29,7 +29,6 @@ class JAXModel(eqx.Module): dcoeff: float maxsteps: int term: diffrax.ODETerm - sensi_order: amici.SensitivityOrder def __init__(self): self.solver = diffrax.Kvaerno5() @@ -38,7 +37,7 @@ def __init__(self): self.pcoeff: float = 0.4 self.icoeff: float = 0.3 self.dcoeff: float = 0.0 - self.maxsteps: int = 2**10 + self.maxsteps: int = 2**14 self.controller = diffrax.PIDController( rtol=self.rtol, atol=self.atol, @@ -47,7 +46,6 @@ def __init__(self): dcoeff=self.dcoeff, ) self.term = diffrax.ODETerm(self.xdot) - self.sensi_order = amici.SensitivityOrder.none @staticmethod @abstractmethod @@ -120,7 +118,7 @@ def _preeq(self, p, k): ) return sol.ys - def _solve(self, ts, p, k, x0): + def _solve(self, ts, p, k, x0, checkpointed): tcl = self.tcl(x0, p, k) sol = diffrax.diffeqsolve( self.term, @@ -132,6 +130,9 @@ def _solve(self, ts, p, k, x0): y0=self.x_solver(x0), stepsize_controller=self.controller, max_steps=self.maxsteps, + adjoint=diffrax.RecursiveCheckpointAdjoint() + if checkpointed + else diffrax.DirectAdjoint(), saveat=diffrax.SaveAt(ts=ts), ) return sol.ys, tcl, sol.stats @@ -159,13 +160,14 @@ def _run( k_preeq: jnp.ndarray, my: jnp.ndarray, pscale: np.ndarray, + checkpointed=True, ): ps = self.unscale_p(p, pscale) if k_preeq.shape[0] > 0: x0 = self._preeq(ps, k_preeq) else: x0 = self.x0(p, k) - x, tcl, stats = self._solve(ts, ps, k, x0) + x, tcl, stats = self._solve(ts, ps, k, x0, checkpointed=checkpointed) obs = self._obs(ts, x, ps, k, tcl) my_r = my.reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) @@ -191,12 +193,13 @@ def srun( ts: np.ndarray, p: jnp.ndarray, k: np.ndarray, + k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 1, True) - )(ts, p, k, my, pscale) + )(ts, p, k, k_preeq, my, pscale) return llh, sllh, (x, obs, stats) @eqx.filter_jit @@ -205,18 +208,23 @@ def s2run( ts: np.ndarray, p: jnp.ndarray, k: np.ndarray, + k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, ): - (llh, (_, _, _)), sllh = (jax.value_and_grad(self._run, 1, True))( - ts, p, k, my, pscale + (llh, (x, obs, stats)), sllh = ( + jax.value_and_grad(self._run, 1, True) + )(ts, p, k, k_preeq, my, pscale) + + s2llh = jax.hessian(self._run, 1, True)( + ts, p, k, k_preeq, my, pscale, False ) - s2llh, (x, obs, stats) = jax.jacfwd( - jax.grad(self._run, 1, True), 1, True - )(ts, p, k, my, pscale) + return llh, sllh, s2llh, (x, obs, stats) - def run_simulation(self, edata: amici.ExpData): + def run_simulation( + self, edata: amici.ExpData, sensitivity_order: amici.SensitivityOrder + ): ts = np.asarray(edata.getTimepoints()) p = jnp.asarray(edata.parameters) k = np.asarray(edata.fixedParameters) @@ -226,18 +234,18 @@ def run_simulation(self, edata: amici.ExpData): rdata_kwargs = dict() - if self.sensi_order == amici.SensitivityOrder.none: + if sensitivity_order == amici.SensitivityOrder.none: ( rdata_kwargs["llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), ) = self.run(ts, p, k, k_preeq, my, pscale) - elif self.sensi_order == amici.SensitivityOrder.first: + elif sensitivity_order == amici.SensitivityOrder.first: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), ) = self.srun(ts, p, k, k_preeq, my, pscale) - elif self.sensi_order == amici.SensitivityOrder.second: + elif sensitivity_order == amici.SensitivityOrder.second: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], @@ -260,13 +268,17 @@ def run_simulation(self, edata: amici.ExpData): def run_simulations( self, edatas: Iterable[amici.ExpData], + sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, num_threads: int = 1, ): + fun = eqx.Partial( + self.run_simulation, sensitivity_order=sensitivity_order + ) if num_threads > 1: with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map(self.run_simulation, edatas) + results = pool.map(fun, edatas) else: - results = map(self.run_simulation, edatas) + results = map(fun, edatas) return list(results) diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 492d5162b6..6f7881840a 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -1,5 +1,6 @@ import pytest import amici + pytest.importorskip("jax") import amici.jax @@ -16,21 +17,18 @@ def test_conversion(): pysb.SelfExporter.cleanup() # reset pysb pysb.SelfExporter.do_export = True - model = pysb.Model('conversion') - a = pysb.Monomer('A', sites=['s'], site_states={'s': ['a', 'b']}) - pysb.Initial(a(s='a'), pysb.Parameter('aa0', 1.2)) - pysb.Rule( - 'conv', - a(s='a') >> a(s='b'), pysb.Parameter('kcat', 0.05) - ) - pysb.Observable('ab', a(s='b')) + model = pysb.Model("conversion") + a = pysb.Monomer("A", sites=["s"], site_states={"s": ["a", "b"]}) + pysb.Initial(a(s="a"), pysb.Parameter("aa0", 1.2)) + pysb.Rule("conv", a(s="a") >> a(s="b"), pysb.Parameter("kcat", 0.05)) + pysb.Observable("ab", a(s="b")) outdir = model.name - pysb2amici(model, outdir, verbose=True, - observables=['ab']) + pysb2amici(model, outdir, verbose=True, observables=["ab"]) - model_module = amici.import_model_module(module_name=model.name, - module_path=outdir) + model_module = amici.import_model_module( + module_name=model.name, module_path=outdir + ) ts = tuple(np.linspace(0, 1, 10)) p = jnp.stack((1.0, 0.1), axis=-1) @@ -42,33 +40,44 @@ def test_dimerization(): pysb.SelfExporter.cleanup() # reset pysb pysb.SelfExporter.do_export = True - model = pysb.Model('dimerization') - a = pysb.Monomer('A', sites=['b']) - b = pysb.Monomer('B', sites=['a']) - - pysb.Rule('turnover_a', - a(b=None) | None, - pysb.Parameter('kdeg_a', 10), - pysb.Parameter('ksyn_a', 0.1)) - pysb.Rule('turnover_b', - b(a=None) | None, - pysb.Parameter('kdeg_b', 0.1), - pysb.Parameter('ksyn_b', 10)) - pysb.Rule('dimer', - a(b=None) + b(a=None) | a(b=1) % b(a=1), - pysb.Parameter('kon', 1.0), - pysb.Parameter('koff', 0.1)) - - pysb.Observable('a_obs', a()) - pysb.Observable('b_obs', b()) + model = pysb.Model("dimerization") + a = pysb.Monomer("A", sites=["b"]) + b = pysb.Monomer("B", sites=["a"]) + + pysb.Rule( + "turnover_a", + a(b=None) | None, + pysb.Parameter("kdeg_a", 10), + pysb.Parameter("ksyn_a", 0.1), + ) + pysb.Rule( + "turnover_b", + b(a=None) | None, + pysb.Parameter("kdeg_b", 0.1), + pysb.Parameter("ksyn_b", 10), + ) + pysb.Rule( + "dimer", + a(b=None) + b(a=None) | a(b=1) % b(a=1), + pysb.Parameter("kon", 1.0), + pysb.Parameter("koff", 0.1), + ) + + pysb.Observable("a_obs", a()) + pysb.Observable("b_obs", b()) outdir = model.name - pysb2amici(model, outdir, verbose=True, - observables=['a_obs', 'b_obs'], - constant_parameters=['ksyn_a', 'ksyn_b']) + pysb2amici( + model, + outdir, + verbose=True, + observables=["a_obs", "b_obs"], + constant_parameters=["ksyn_a", "ksyn_b"], + ) - model_module = amici.import_model_module(module_name=model.name, - module_path=outdir) + model_module = amici.import_model_module( + module_name=model.name, module_path=outdir + ) ts = tuple(np.linspace(0, 1, 10)) p = jnp.stack((5, 0.5, 0.5, 0.5), axis=-1) @@ -80,11 +89,11 @@ def _test_model(model_module, ts, p, k): amici_model = model_module.getModel() amici_model.setTimepoints(np.asarray(ts, dtype=np.float64)) - sol_amici_ref = amici.runAmiciSimulation(amici_model, - amici_model.getSolver()) + sol_amici_ref = amici.runAmiciSimulation( + amici_model, amici_model.getSolver() + ) jax_model = model_module.get_jax_model() - jax_solver = jax_model.get_solver() amici_model.setParameters(np.asarray(p, dtype=np.float64)) amici_model.setFixedParameters(np.asarray(k, dtype=np.float64)) @@ -99,39 +108,40 @@ def _test_model(model_module, ts, p, k): amici_solver = amici_model.getSolver() amici_solver.setSensitivityMethod(amici.SensitivityMethod.forward) amici_solver.setSensitivityOrder(amici.SensitivityOrder.first) - rs_amici = amici.runAmiciSimulations( - amici_model, - amici_solver, - edatas - ) - - check_fields_jax(rs_amici, jax_model, jax_solver, edatas, - ['x', 'y', 'llh']) + rs_amici = amici.runAmiciSimulations(amici_model, amici_solver, edatas) - jax_solver.sensi_order = amici.SensitivityOrder.first - check_fields_jax(rs_amici, jax_model, jax_solver, edatas, - ['x', 'y', 'llh', 'sllh']) - - jax_solver.sensi_order = amici.SensitivityOrder.second - check_fields_jax(rs_amici, jax_model, jax_solver, edatas, - ['x', 'y', 'llh', 'sllh']) + check_fields_jax(rs_amici, jax_model, edatas, ["x", "y", "llh"]) + check_fields_jax( + rs_amici, + jax_model, + edatas, + ["x", "y", "llh", "sllh"], + sensi_order=amici.SensitivityOrder.first, + ) -def check_fields_jax(rs_amici, - jax_model, - jax_solver, - edatas, - fields): - rs_jax = amici.jax.run_simulations( + check_fields_jax( + rs_amici, jax_model, - jax_solver, - edatas + edatas, + ["x", "y", "llh", "sllh"], + sensi_order=amici.SensitivityOrder.second, ) + + +def check_fields_jax( + rs_amici, + jax_model, + edatas, + fields, + sensi_order=amici.SensitivityOrder.none, +): + rs_jax = jax_model.run_simulations(edatas, sensitivity_order=sensi_order) for field in fields: for r_amici, r_jax in zip(rs_amici, rs_jax): assert_allclose( actual=r_amici[field], desired=r_jax[field], atol=1e-6, - rtol=1e-6 + rtol=1e-6, ) From 4125c51e9715c9db85c979767b83de89eed41de4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 14 Apr 2024 10:53:31 +0100 Subject: [PATCH 28/80] add filterwarning --- python/tests/test_jax.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 6f7881840a..34fd70a201 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -36,6 +36,9 @@ def test_conversion(): _test_model(model_module, ts, p, k) +@pytest.mark.filterwarnings( + "ignore:Model does not contain any initial conditions" +) def test_dimerization(): pysb.SelfExporter.cleanup() # reset pysb pysb.SelfExporter.do_export = True From 8143cc25e5f729a74c15eb2dfb3a4c07eb1f209f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 14 Apr 2024 14:05:56 +0100 Subject: [PATCH 29/80] fix parameter transformation --- python/sdist/amici/jax.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 3bd2495ff1..75e7810a49 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -134,6 +134,7 @@ def _solve(self, ts, p, k, x0, checkpointed): if checkpointed else diffrax.DirectAdjoint(), saveat=diffrax.SaveAt(ts=ts), + throw=False, ) return sol.ys, tcl, sol.stats @@ -166,7 +167,7 @@ def _run( if k_preeq.shape[0] > 0: x0 = self._preeq(ps, k_preeq) else: - x0 = self.x0(p, k) + x0 = self.x0(ps, k) x, tcl, stats = self._solve(ts, ps, k, x0, checkpointed=checkpointed) obs = self._obs(ts, x, ps, k, tcl) my_r = my.reshape((len(ts), -1)) From 81e2aebf66dd31252aca8a4996d2205dc4cd1ef7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 19 Oct 2024 12:31:28 +0100 Subject: [PATCH 30/80] reenable ruff format --- .pre-commit-config.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aee5b3b77e..f16458b29a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,22 @@ repos: args: [--allow-multiple-documents] - id: end-of-file-fixer - id: trailing-whitespace +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.6.7 + hooks: + # Run the linter. + - id: ruff + args: + - --fix + - --config + - python/sdist/pyproject.toml + + # Run the formatter. + - id: ruff-format + args: + - --config + - python/sdist/pyproject.toml - repo: https://github.com/asottile/pyupgrade rev: v3.17.0 From c01f707a2abbc10b641e6bf7e3ed4259a34282df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 19 Oct 2024 12:41:10 +0100 Subject: [PATCH 31/80] post merge cleanup --- .github/workflows/test_python_cplusplus.yml | 5 -- python/sdist/pyproject.toml | 11 ++- python/sdist/setup.cfg | 97 --------------------- 3 files changed, 9 insertions(+), 104 deletions(-) delete mode 100644 python/sdist/setup.cfg diff --git a/.github/workflows/test_python_cplusplus.yml b/.github/workflows/test_python_cplusplus.yml index 23337986db..6c5e1bd7b7 100644 --- a/.github/workflows/test_python_cplusplus.yml +++ b/.github/workflows/test_python_cplusplus.yml @@ -231,11 +231,6 @@ jobs: - name: Install python package run: scripts/installAmiciSource.sh - - name: Install notebook dependencies - run: | - source venv/bin/activate \ - && pip install jax[cpu] - - name: example notebooks run: scripts/runNotebook.sh python/examples/example_*/ diff --git a/python/sdist/pyproject.toml b/python/sdist/pyproject.toml index 22f33eda2c..1d641abf28 100644 --- a/python/sdist/pyproject.toml +++ b/python/sdist/pyproject.toml @@ -74,14 +74,21 @@ test = [ "scipy", "pooch" ] -vis =[ +vis = [ "matplotlib", "seaborn", ] -examples =[ +examples = [ "jupyter", "scipy", ] +jax = [ + "jax>=0.4.34", + "jaxlib>=0.4.34", + "diffrax>=0.6.0", + "equinox>=0.11.8", + "optimistix>=0.0.8", +] [project.scripts] # amici_import_petab.py is kept for backwards compatibility diff --git a/python/sdist/setup.cfg b/python/sdist/setup.cfg deleted file mode 100644 index 97e5681c3b..0000000000 --- a/python/sdist/setup.cfg +++ /dev/null @@ -1,97 +0,0 @@ -[metadata] -name = amici -description = Advanced multi-language Interface to CVODES and IDAS -version = file: amici/version.txt -license = BSD 3-Clause License -url = https://github.com/AMICI-dev/AMICI -keywords = differential equations, simulation, ode, cvodes, systems biology, sensitivity analysis, sbml, pysb, petab -author = Fabian Froehlich, Jan Hasenauer, Daniel Weindl and Paul Stapor -author_email = fabian_froehlich@hms.harvard.edu -project_urls = - Bug Reports = https://github.com/AMICI-dev/AMICI/issues - Source = https://github.com/AMICI-dev/AMICI - Documentation = https://amici.readthedocs.io/en/latest/ -classifiers = - Development Status :: 5 - Production/Stable - Intended Audience :: Science/Research - License :: OSI Approved :: BSD License - Operating System :: POSIX :: Linux - Operating System :: MacOS :: MacOS X - Programming Language :: Python - Programming Language :: C++ - Topic :: Scientific/Engineering :: Bio-Informatics - -[options] -packages = find_namespace: -package_dir = - amici = amici -python_requires = >=3.9 -install_requires = - cmake-build-extension==0.5.1 - sympy>=1.9 - numpy>=1.19.3; python_version=='3.9' - numpy>=1.21.4; python_version>='3.10' - numpy>=1.23.2; python_version=='3.11' - numpy; python_version>='3.12' - python-libsbml - pandas>=2.0.2 - pyarrow - wurlitzer - toposort - setuptools>=48 - mpmath -include_package_data = True -zip_safe = False - -[options.extras_require] -# Don't include any URLs here - they are not supported by PyPI: -# HTTPError: 400 Bad Request from https://upload.pypi.org/legacy/ -# Invalid value for requires_dist. Error: Can't have direct dependency: ... -petab = petab>=0.2.9 -pysb = pysb>=1.13.1 -jax = - jax - jaxlib - diffrax - equinox - optimistix -test = - benchmark_models_petab @ git+https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab.git@master#subdirectory=src/python - h5py - pytest - pytest-cov - pytest-rerunfailures - coverage - shyaml - antimony>=2.13 - # see https://github.com/sys-bio/antimony/issues/92 - # unsupported x86_64 / x86_64h - antimony!=2.14; platform_system=='Darwin' and platform_machine in 'x86_64h' - scipy - pooch -vis = - matplotlib - seaborn -examples = - jupyter - scipy - -[options.package_data] -amici = - amici/include/amici/* - src/*template* - swig/* - libs/* - setup.py.template - -[options.exclude_package_data] -* = - README.txt - - -[options.entry_points] - -; amici_import_petab.py is kept for backwards compatibility -console_scripts = - amici_import_petab = amici.petab.cli.import_petab:_main - amici_import_petab.py = amici.petab.cli.import_petab:_main From a5d356a634e2107742667c107f827559622bef1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 19 Oct 2024 14:36:54 +0100 Subject: [PATCH 32/80] "fix" splines --- .pre-commit-config.yaml | 6 ------ python/sdist/amici/jax.template.py | 1 + python/sdist/amici/jaxcodeprinter.py | 5 +++++ python/sdist/pyproject.toml | 1 + 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f16458b29a..26395d1a0a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,10 +27,4 @@ repos: - --config - python/sdist/pyproject.toml -- repo: https://github.com/asottile/pyupgrade - rev: v3.17.0 - hooks: - - id: pyupgrade - args: ["--py310-plus"] - exclude: '^(ThirdParty|models)/' diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index 378b16944f..c52f29a78f 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -1,4 +1,5 @@ import jax.numpy as jnp +from interpax import interp1d from amici.jax import JAXModel diff --git a/python/sdist/amici/jaxcodeprinter.py b/python/sdist/amici/jaxcodeprinter.py index b768d44fc9..ee56d292ff 100644 --- a/python/sdist/amici/jaxcodeprinter.py +++ b/python/sdist/amici/jaxcodeprinter.py @@ -1,4 +1,5 @@ """Jax code generation""" + import re from typing import Optional, Union from collections.abc import Iterable @@ -21,6 +22,10 @@ def doprint(self, expr: sp.Expr, assign_to: Optional[str] = None) -> str: f'Encountered unsupported function in expression "{expr}"' ) from e + def _print_AmiciSpline(self, expr: sp.Expr) -> str: + # FIXME: untested, where are spline nodes coming from anyways? + return f'interp1d(time, {self.doprint(expr.args[2:])}, kind="cubic")' + def _get_sym_lines( self, symbols: Union[Iterable[str], sp.Matrix], diff --git a/python/sdist/pyproject.toml b/python/sdist/pyproject.toml index 1d641abf28..d8d74c6476 100644 --- a/python/sdist/pyproject.toml +++ b/python/sdist/pyproject.toml @@ -88,6 +88,7 @@ jax = [ "diffrax>=0.6.0", "equinox>=0.11.8", "optimistix>=0.0.8", + "interpax>=0.3.3", ] [project.scripts] From 9a021cfac43cb1c1d3b8a7a0615cd0aff49e3652 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 19 Oct 2024 14:37:03 +0100 Subject: [PATCH 33/80] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 26395d1a0a..f16458b29a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,4 +27,10 @@ repos: - --config - python/sdist/pyproject.toml +- repo: https://github.com/asottile/pyupgrade + rev: v3.17.0 + hooks: + - id: pyupgrade + args: ["--py310-plus"] + exclude: '^(ThirdParty|models)/' From 50193d86c983ef6b987ce07d2e091f563a534287 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 21 Oct 2024 12:29:44 +0100 Subject: [PATCH 34/80] force optimistix 0.0.9 --- python/sdist/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/sdist/pyproject.toml b/python/sdist/pyproject.toml index d8d74c6476..c7433ef17e 100644 --- a/python/sdist/pyproject.toml +++ b/python/sdist/pyproject.toml @@ -87,7 +87,7 @@ jax = [ "jaxlib>=0.4.34", "diffrax>=0.6.0", "equinox>=0.11.8", - "optimistix>=0.0.8", + "optimistix>=0.0.9", "interpax>=0.3.3", ] From 7faae32365c781f9abeef6ea06797547b5d5b3b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 21 Oct 2024 15:52:19 +0100 Subject: [PATCH 35/80] add support for heavyside functions --- python/sdist/amici/de_export.py | 13 ++++++++++++- tests/benchmark-models/test_petab_model.py | 17 ++++++++++------- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index 95b70eccc9..6f747d8d82 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -21,6 +21,7 @@ TYPE_CHECKING, Literal, ) + import sympy as sp from . import ( @@ -305,7 +306,17 @@ def jnp_stack_str(array) -> str: f"{eq_name.upper()}_EQ": "\n".join( self._code_printer_jax._get_sym_lines( (str(strip_pysb(s)) for s in self.model.sym(eq_name)), - self.model.eq(eq_name), + self.model.eq(eq_name).subs( + dict( + zip( + self.model.sym("h"), + ( + sp.Heaviside(x) + for x in self.model.eq("root") + ), + ) + ) + ), indent, ) ) diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index c0424fbf03..64af79d8a8 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -145,13 +145,16 @@ def main(): rdatas = res[RDATAS] llh = res[LLH] - if args.model_name not in ( - "Beer_MolBioSystems2014", - "Brannmark_JBC2010", - "Fujita_SciSignal2010", - "Isensee_JCB2018", - "Smith_BMCSystBiol2013", - "Weber_BMC2015", + if ( + args.model_name + not in ( + # "Beer_MolBioSystems2014", + # "Brannmark_JBC2010", + # "Fujita_SciSignal2010", + # "Isensee_JCB2018", + # "Smith_BMCSystBiol2013", + # "Weber_BMC2015", + ) ): # Beer: Heaviside # Brannmark: Heaviside From 907acb7319b2ec5fdf25a375980a4ec44c01c5a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 21 Oct 2024 18:08:37 +0100 Subject: [PATCH 36/80] cleanup & actually run tests --- tests/benchmark-models/test_petab_model.py | 97 +++++++++------------- 1 file changed, 41 insertions(+), 56 deletions(-) diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index 64af79d8a8..89a482cd7a 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -145,65 +145,50 @@ def main(): rdatas = res[RDATAS] llh = res[LLH] - if ( - args.model_name - not in ( - # "Beer_MolBioSystems2014", - # "Brannmark_JBC2010", - # "Fujita_SciSignal2010", - # "Isensee_JCB2018", - # "Smith_BMCSystBiol2013", - # "Weber_BMC2015", - ) - ): - # Beer: Heaviside - # Brannmark: Heaviside - # Fujita: Heaviside - # Isensee: Heaviside - # Smith: Heaviside - # Weber: Heaviside - - jax_model = model_module.get_jax_model() - simulation_conditions = ( - problem.get_simulation_conditions_from_measurement_df() - ) - edatas = create_edatas( - amici_model=amici_model, - petab_problem=problem, - simulation_conditions=simulation_conditions, - ) - problem_parameters = { - t.Index: getattr(t, petab.NOMINAL_VALUE) - for t in problem.parameter_df.itertuples() - } - parameter_mapping = create_parameter_mapping( - petab_problem=problem, - simulation_conditions=simulation_conditions, - scaled_parameters=False, - amici_model=amici_model, - ) - fill_in_parameters( - edatas=edatas, - problem_parameters=problem_parameters, - scaled_parameters=False, - parameter_mapping=parameter_mapping, - amici_model=amici_model, - ) - # run once to JIT - jax_model.run_simulations(edatas) - start_jax = timer() - rdatas_jax = jax_model.run_simulations(edatas) - end_jax = timer() + jax_model = model_module.get_jax_model() + simulation_conditions = ( + problem.get_simulation_conditions_from_measurement_df() + ) + edatas = create_edatas( + amici_model=amici_model, + petab_problem=problem, + simulation_conditions=simulation_conditions, + ) + problem_parameters = { + t.Index: getattr(t, petab.NOMINAL_VALUE) + for t in problem.parameter_df.itertuples() + } + parameter_mapping = create_parameter_mapping( + petab_problem=problem, + simulation_conditions=simulation_conditions, + scaled_parameters=False, + amici_model=amici_model, + ) + fill_in_parameters( + edatas=edatas, + problem_parameters=problem_parameters, + scaled_parameters=False, + parameter_mapping=parameter_mapping, + amici_model=amici_model, + ) + # run once to JIT + jax_model.run_simulations(edatas) + start_jax = timer() + rdatas_jax = jax_model.run_simulations(edatas) + end_jax = timer() - t_jax = end_jax - start_jax - t_amici = sum(r.cpu_time for r in rdatas) / 1e3 + t_jax = end_jax - start_jax + t_amici = sum(r.cpu_time for r in rdatas) / 1e3 - llh_jax = sum(r.llh for r in rdatas_jax) + llh_jax = sum(r.llh for r in rdatas_jax) - print( - f'amici (llh={res["llh"]} after {t_amici}s) vs ' - f'jax (llh={llh_jax} after {t_jax}s)' - ) + print( + f'amici (llh={res["llh"]} after {t_amici}s) vs ' + f'jax (llh={llh_jax} after {t_jax}s)' + ) + assert np.isclose( + llh, llh_jax, rtol=1e-3, atol=1e-3 + ), "LLH mismatch {llh} (amici) vs {llh_jax} (jax)" times = dict() From 82a01bacb8970f29ee614babbbfc7778c6a131c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 22 Oct 2024 15:58:12 +0100 Subject: [PATCH 37/80] simply tests + add support for non-dynamic simulation in jax --- .../test_benchmark_collection_models.yml | 4 +- python/sdist/amici/jax.py | 53 ++++++++++--------- python/sdist/amici/petab/petab_import.py | 16 +++++- .../test_benchmark_collection.sh | 12 +---- tests/benchmark-models/test_petab_model.py | 34 ++++++------ 5 files changed, 62 insertions(+), 57 deletions(-) diff --git a/.github/workflows/test_benchmark_collection_models.yml b/.github/workflows/test_benchmark_collection_models.yml index 39eef6f9be..81c971be15 100644 --- a/.github/workflows/test_benchmark_collection_models.yml +++ b/.github/workflows/test_benchmark_collection_models.yml @@ -59,9 +59,7 @@ jobs: # retrieve test models - name: Download and test benchmark collection run: | - git clone --depth 1 https://github.com/benchmarking-initiative/Benchmark-Models-PEtab.git \ - && export BENCHMARK_COLLECTION="$(pwd)/Benchmark-Models-PEtab/Benchmark-Models/" \ - && pip3 install -e $BENCHMARK_COLLECTION/../src/python \ + pip install git+https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab.git@master#subdirectory=src/python \ && AMICI_PARALLEL_COMPILE="" tests/benchmark-models/test_benchmark_collection.sh # run gradient checks diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 75e7810a49..5537aef2c8 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -49,48 +49,39 @@ def __init__(self): @staticmethod @abstractmethod - def xdot(t, x, args): - ... + def xdot(t, x, args): ... @staticmethod @abstractmethod - def _w(t, x, p, k, tcl): - ... + def _w(t, x, p, k, tcl): ... @staticmethod @abstractmethod - def x0(p, k): - ... + def x0(p, k): ... @staticmethod @abstractmethod - def x_solver(x): - ... + def x_solver(x): ... @staticmethod @abstractmethod - def x_rdata(x, tcl): - ... + def x_rdata(x, tcl): ... @staticmethod @abstractmethod - def tcl(x, p, k): - ... + def tcl(x, p, k): ... @staticmethod @abstractmethod - def y(t, x, p, k, tcl): - ... + def y(t, x, p, k, tcl): ... @staticmethod @abstractmethod - def sigmay(y, p, k): - ... + def sigmay(y, p, k): ... @staticmethod @abstractmethod - def Jy(y, my, sigmay): - ... + def Jy(y, my, sigmay): ... def unscale_p(self, p, pscale): return jax.vmap( @@ -136,6 +127,7 @@ def _solve(self, ts, p, k, x0, checkpointed): saveat=diffrax.SaveAt(ts=ts), throw=False, ) + return sol.ys, tcl, sol.stats def _obs(self, ts, x, p, k, tcl): @@ -162,13 +154,22 @@ def _run( my: jnp.ndarray, pscale: np.ndarray, checkpointed=True, + dynamic=True, ): ps = self.unscale_p(p, pscale) if k_preeq.shape[0] > 0: x0 = self._preeq(ps, k_preeq) else: x0 = self.x0(ps, k) - x, tcl, stats = self._solve(ts, ps, k, x0, checkpointed=checkpointed) + + if dynamic: + x, tcl, stats = self._solve( + ts, ps, k, x0, checkpointed=checkpointed + ) + else: + x = tuple(jnp.array([x0_i] * len(ts)) for x0_i in x0) + tcl = self.tcl(x0, ps, k) + stats = None obs = self._obs(ts, x, ps, k, tcl) my_r = my.reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) @@ -176,7 +177,7 @@ def _run( x_rdata = self._x_rdata(x, tcl) return llh, (x_rdata, obs, stats) - @eqx.filter_jit + # @eqx.filter_jit def run( self, ts: np.ndarray, @@ -185,8 +186,9 @@ def run( k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, + dynamic=True, ): - return self._run(ts, p, k, k_preeq, my, pscale) + return self._run(ts, p, k, k_preeq, my, pscale, dynamic=dynamic) @eqx.filter_jit def srun( @@ -197,6 +199,7 @@ def srun( k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, + dynamic=True, ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 1, True) @@ -212,6 +215,7 @@ def s2run( k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, + dynamic=True, ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 1, True) @@ -232,6 +236,7 @@ def run_simulation( k_preeq = np.asarray(edata.fixedParametersPreequilibration) my = np.asarray(edata.getObservedData()) pscale = np.asarray(edata.pscale) + dynamic = np.max(ts) > 0 rdata_kwargs = dict() @@ -239,20 +244,20 @@ def run_simulation( ( rdata_kwargs["llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.run(ts, p, k, k_preeq, my, pscale) + ) = self.run(ts, p, k, k_preeq, my, pscale, dynamic) elif sensitivity_order == amici.SensitivityOrder.first: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.srun(ts, p, k, k_preeq, my, pscale) + ) = self.srun(ts, p, k, k_preeq, my, pscale, dynamic) elif sensitivity_order == amici.SensitivityOrder.second: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], rdata_kwargs["s2llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.s2run(ts, p, k, k_preeq, my, pscale) + ) = self.s2run(ts, p, k, k_preeq, my, pscale, dynamic) for field in rdata_kwargs.keys(): if field == "llh": diff --git a/python/sdist/amici/petab/petab_import.py b/python/sdist/amici/petab/petab_import.py index 52b08cfd47..42a4d85dc4 100644 --- a/python/sdist/amici/petab/petab_import.py +++ b/python/sdist/amici/petab/petab_import.py @@ -37,8 +37,9 @@ def import_petab_problem( model_name: str = None, compile_: bool = None, non_estimated_parameters_as_constants=True, + jax=False, **kwargs, -) -> "amici.Model": +) -> "amici.Model | amici.JAXModel": """ Create an AMICI model for a PEtab problem. @@ -64,6 +65,9 @@ def import_petab_problem( model size and simulation times. If sensitivities with respect to those parameters are required, this should be set to ``False``. + :param jax: + Whether to load the jax version of the model. + :param kwargs: Additional keyword arguments to be passed to :meth:`amici.sbml_import.SbmlImporter.sbml2amici` or @@ -154,6 +158,16 @@ def import_petab_problem( # import model model_module = amici.import_model_module(model_name, model_output_dir) + + if jax: + model = model_module.get_jax_model() + + logger.info( + f"Successfully loaded jax model {model_name} " + f"from {model_output_dir}." + ) + return model + model = model_module.getModel() check_model(amici_model=model, petab_problem=petab_problem) diff --git a/tests/benchmark-models/test_benchmark_collection.sh b/tests/benchmark-models/test_benchmark_collection.sh index 581b8db028..4efd1c55bb 100755 --- a/tests/benchmark-models/test_benchmark_collection.sh +++ b/tests/benchmark-models/test_benchmark_collection.sh @@ -86,17 +86,9 @@ script_path=$(dirname "$BASH_SOURCE") script_path=$(cd "$script_path" && pwd) for model in $models; do - yaml="${model_dir}"/"${model}"/"${model}".yaml - - # different naming scheme - if [[ "$model" == "Bertozzi_PNAS2020" ]]; then - yaml="${model_dir}"/"${model}"/problem.yaml - fi - - amici_model_dir=test_bmc/"${model}" + amici_model_dir=test_bmc mkdir -p "$amici_model_dir" - cmd_import="amici_import_petab ${yaml} -o ${amici_model_dir} -n ${model} --flatten" - cmd_run="$script_path/test_petab_model.py -y ${yaml} -d ${amici_model_dir} -m ${model} -c" + cmd_run="$script_path/test_petab_model.py -d ${amici_model_dir} -m ${model} -c" printf '=%.0s' {1..40} printf " %s " "${model}" diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py index 89a482cd7a..d38c1b5f9e 100755 --- a/tests/benchmark-models/test_petab_model.py +++ b/tests/benchmark-models/test_petab_model.py @@ -6,7 +6,6 @@ import argparse import contextlib -import importlib import logging import os import sys @@ -29,6 +28,7 @@ ) from timeit import default_timer as timer from petab.v1.visualize import plot_problem +import benchmark_models_petab logger = get_logger(f"amici.{__name__}", logging.WARNING) @@ -67,15 +67,6 @@ def parse_cli_args(): help="Plot measurement and simulation results", ) - # PEtab problem - parser.add_argument( - "-y", - "--yaml", - dest="yaml_file_name", - required=True, - help="PEtab YAML problem filename", - ) - # Corresponding AMICI model parser.add_argument( "-m", @@ -88,7 +79,7 @@ def parse_cli_args(): "-d", "--model-dir", dest="model_directory", - help="Directory containing the AMICI module of the " + help="Parent directory containing the AMICI module of the " "model to simulate. Required if model is not " "in python path.", ) @@ -113,19 +104,20 @@ def main(): logger.info( f"Simulating '{args.model_name}' " - f"({args.model_directory}) using PEtab data from " - f"{args.yaml_file_name}" + f"({args.model_directory}) with AMICI" ) # load PEtab files - problem = petab.Problem.from_yaml(args.yaml_file_name) + problem = benchmark_models_petab.get_problem(args.model_name) petab.flatten_timepoint_specific_output_overrides(problem) # load model - if args.model_directory: - sys.path.insert(0, args.model_directory) - model_module = importlib.import_module(args.model_name) - amici_model = model_module.getModel() + from amici.petab.petab_import import import_petab_problem + + amici_model = import_petab_problem( + problem, + model_output_dir=Path(args.model_directory) / args.model_name, + ) amici_solver = amici_model.getSolver() amici_solver.setAbsoluteTolerance(1e-8) @@ -145,7 +137,11 @@ def main(): rdatas = res[RDATAS] llh = res[LLH] - jax_model = model_module.get_jax_model() + jax_model = import_petab_problem( + problem, + model_output_dir=Path(args.model_directory) / args.model_name, + jax=True, + ) simulation_conditions = ( problem.get_simulation_conditions_from_measurement_df() ) From c548c935af1a63971f47efa18651511b1ac6acd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Thu, 24 Oct 2024 10:21:45 +0100 Subject: [PATCH 38/80] fix for NONCONST_CLS --- python/sdist/amici/jax.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 5537aef2c8..c882658e3e 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -167,7 +167,9 @@ def _run( ts, ps, k, x0, checkpointed=checkpointed ) else: - x = tuple(jnp.array([x0_i] * len(ts)) for x0_i in x0) + x = tuple( + self.x_solver(jnp.array([x0_i] * len(ts)) for x0_i in x0) + ) tcl = self.tcl(x0, ps, k) stats = None obs = self._obs(ts, x, ps, k, tcl) From 7c27a21a460be1f0833e630522fa8498ef622823 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Thu, 24 Oct 2024 14:31:03 +0100 Subject: [PATCH 39/80] fix petab path --- tests/benchmark-models/test_benchmark_collection.sh | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/benchmark-models/test_benchmark_collection.sh b/tests/benchmark-models/test_benchmark_collection.sh index 4efd1c55bb..2cae1db484 100755 --- a/tests/benchmark-models/test_benchmark_collection.sh +++ b/tests/benchmark-models/test_benchmark_collection.sh @@ -2,8 +2,6 @@ # Import and run selected benchmark models with nominal parameters and check # agreement with reference values # -# Expects environment variable BENCHMARK_COLLECTION to provide path to -# benchmark collection model directory # Confirmed to be working models=" @@ -60,8 +58,6 @@ Zheng_PNAS2012" set -e -[[ -n "${BENCHMARK_COLLECTION}" ]] && model_dir="${BENCHMARK_COLLECTION}" - function show_help() { echo "-h: this help; -n: dry run, print commands; -b path_to_models_dir" } @@ -112,7 +108,7 @@ cd "$script_path" && python evaluate_benchmark.py # Test deprecated import from individual PEtab files model="Zheng_PNAS2012" -problem_dir="${model_dir}/${model}" +problem_dir=$(python3 -c "import benchmark_models_petab; print(str(benchmark_models_petab.get_problem_yaml_path('Zheng_PNAS2012').parent))") amici_model_dir=test_bmc/"${model}-deprecated" cmd_import="amici_import_petab -s "${problem_dir}/model_${model}.xml" \ -m "${problem_dir}/measurementData_${model}.tsv" \ From 956b0a638c04122f1e51d5546605d75d4a93cdeb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Thu, 24 Oct 2024 21:59:23 +0100 Subject: [PATCH 40/80] fixup merge --- .../benchmark-models/test_petab_benchmark.py | 47 +++ tests/benchmark-models/test_petab_model.py | 323 ------------------ 2 files changed, 47 insertions(+), 323 deletions(-) delete mode 100755 tests/benchmark-models/test_petab_model.py diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index b4e1f50e68..ae84e3bc02 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -31,6 +31,9 @@ RDATAS, rdatas_to_measurement_df, simulate_petab, + create_edatas, + fill_in_parameters, + create_parameter_mapping, ) from petab.v1.visualize import plot_problem @@ -250,6 +253,50 @@ def benchmark_problem(request): return problem_id, petab_problem, amici_model +def test_jax_llh(benchmark_problem): + problem_id, petab_problem, amici_model = benchmark_problem + if problem_id not in problems_for_llh_check: + pytest.skip("Excluded from log-likelihood check.") + jax_model = import_petab_problem( + problem_id, + model_output_dir=benchmark_outdir / problem_id, + jax=True, + ) + simulation_conditions = ( + petab_problem.get_simulation_conditions_from_measurement_df() + ) + edatas = create_edatas( + amici_model=amici_model, + petab_problem=petab_problem, + simulation_conditions=simulation_conditions, + ) + problem_parameters = { + t.Index: getattr(t, petab.NOMINAL_VALUE) + for t in petab_problem.parameter_df.itertuples() + } + parameter_mapping = create_parameter_mapping( + petab_problem=petab_problem, + simulation_conditions=simulation_conditions, + scaled_parameters=False, + amici_model=amici_model, + ) + fill_in_parameters( + edatas=edatas, + problem_parameters=problem_parameters, + scaled_parameters=False, + parameter_mapping=parameter_mapping, + amici_model=amici_model, + ) + rdatas_jax = jax_model.run_simulations(edatas) + + llh_jax = sum(r.llh for r in rdatas_jax) + ref_llh = reference_values[problem_id]["llh"] + + assert np.isclose( + ref_llh, llh_jax, rtol=1e-3, atol=1e-3 + ), f"LLH mismatch for {problem_id} with {ref_llh} vs {llh_jax} (jax)" + + @pytest.mark.filterwarnings( "ignore:divide by zero encountered in log", # https://github.com/AMICI-dev/AMICI/issues/18 diff --git a/tests/benchmark-models/test_petab_model.py b/tests/benchmark-models/test_petab_model.py deleted file mode 100755 index e632ec772a..0000000000 --- a/tests/benchmark-models/test_petab_model.py +++ /dev/null @@ -1,323 +0,0 @@ -#!/usr/bin/env python3 - -""" -Simulate a PEtab problem and compare results to reference values -""" - -import argparse -import contextlib -import logging -import os -import sys -from pathlib import Path - -import amici -import numpy as np -import pandas as pd -import petab.v1 as petab -import yaml -from amici.logging import get_logger -from amici.petab.simulations import ( - LLH, - RDATAS, - rdatas_to_measurement_df, - simulate_petab, - create_edatas, - fill_in_parameters, - create_parameter_mapping, -) -from timeit import default_timer as timer -from petab.v1.visualize import plot_problem -from petab.v1.lint import measurement_table_has_timepoint_specific_mappings -import benchmark_models_petab - -logger = get_logger(f"amici.{__name__}", logging.WARNING) - - -def parse_cli_args(): - """Parse command line arguments - - Returns: - Parsed CLI arguments from ``argparse``. - """ - - parser = argparse.ArgumentParser( - description="Simulate PEtab-format model using AMICI." - ) - - # General options: - parser.add_argument( - "-v", - "--verbose", - dest="verbose", - action="store_true", - help="More verbose output", - ) - parser.add_argument( - "-c", - "--check", - dest="check", - action="store_true", - help="Compare to reference value", - ) - parser.add_argument( - "-p", - "--plot", - dest="plot", - action="store_true", - help="Plot measurement and simulation results", - ) - - # Corresponding AMICI model - parser.add_argument( - "-m", - "--model-name", - dest="model_name", - help="Name of the AMICI module of the model to " "simulate.", - required=True, - ) - parser.add_argument( - "-d", - "--model-dir", - dest="model_directory", - help="Parent directory containing the AMICI module of the " - "model to simulate. Required if model is not " - "in python path.", - ) - - parser.add_argument( - "-o", - "--simulation-file", - dest="simulation_file", - help="File to write simulation result to, in PEtab" - "measurement table format.", - ) - - return parser.parse_args() - - -def main(): - """Simulate the model specified on the command line""" - script_dir = Path(__file__).parent.absolute() - args = parse_cli_args() - loglevel = logging.DEBUG if args.verbose else logging.INFO - logger.setLevel(loglevel) - - logger.info( - f"Simulating '{args.model_name}' " - f"({args.model_directory}) with AMICI" - ) - - # load PEtab files - problem = benchmark_models_petab.get_problem(args.model_name) - - if measurement_table_has_timepoint_specific_mappings( - problem.measurement_df - ): - petab.flatten_timepoint_specific_output_overrides(problem) - - # load model - from amici.petab.petab_import import import_petab_problem - - amici_model = import_petab_problem( - problem, - model_output_dir=Path(args.model_directory) / args.model_name, - ) - amici_solver = amici_model.getSolver() - - amici_solver.setAbsoluteTolerance(1e-8) - amici_solver.setRelativeTolerance(1e-8) - amici_solver.setMaxSteps(int(1e4)) - if args.model_name in ("Brannmark_JBC2010", "Isensee_JCB2018"): - amici_model.setSteadyStateSensitivityMode( - amici.SteadyStateSensitivityMode.integrationOnly - ) - - res = simulate_petab( - petab_problem=problem, - amici_model=amici_model, - solver=amici_solver, - log_level=logging.INFO, - ) - rdatas = res[RDATAS] - llh = res[LLH] - - jax_model = import_petab_problem( - problem, - model_output_dir=Path(args.model_directory) / args.model_name, - jax=True, - ) - simulation_conditions = ( - problem.get_simulation_conditions_from_measurement_df() - ) - edatas = create_edatas( - amici_model=amici_model, - petab_problem=problem, - simulation_conditions=simulation_conditions, - ) - problem_parameters = { - t.Index: getattr(t, petab.NOMINAL_VALUE) - for t in problem.parameter_df.itertuples() - } - parameter_mapping = create_parameter_mapping( - petab_problem=problem, - simulation_conditions=simulation_conditions, - scaled_parameters=False, - amici_model=amici_model, - ) - fill_in_parameters( - edatas=edatas, - problem_parameters=problem_parameters, - scaled_parameters=False, - parameter_mapping=parameter_mapping, - amici_model=amici_model, - ) - # run once to JIT - jax_model.run_simulations(edatas) - start_jax = timer() - rdatas_jax = jax_model.run_simulations(edatas) - end_jax = timer() - - t_jax = end_jax - start_jax - t_amici = sum(r.cpu_time for r in rdatas) / 1e3 - - llh_jax = sum(r.llh for r in rdatas_jax) - - print( - f'amici (llh={res["llh"]} after {t_amici}s) vs ' - f'jax (llh={llh_jax} after {t_jax}s)' - ) - assert np.isclose( - llh, llh_jax, rtol=1e-3, atol=1e-3 - ), "LLH mismatch {llh} (amici) vs {llh_jax} (jax)" - - times = dict() - - for label, sensi_mode in { - "t_sim": amici.SensitivityMethod.none, - "t_fwd": amici.SensitivityMethod.forward, - "t_adj": amici.SensitivityMethod.adjoint, - }.items(): - amici_solver.setSensitivityMethod(sensi_mode) - if sensi_mode == amici.SensitivityMethod.none: - amici_solver.setSensitivityOrder(amici.SensitivityOrder.none) - else: - amici_solver.setSensitivityOrder(amici.SensitivityOrder.first) - - res_repeats = [ - simulate_petab( - petab_problem=problem, - amici_model=amici_model, - solver=amici_solver, - log_level=loglevel, - ) - for _ in range(3) # repeat to get more stable timings - ] - res = res_repeats[0] - - times[label] = np.min( - [ - sum(r.cpu_time + r.cpu_timeB for r in res[RDATAS]) / 1000 - # only forwards/backwards simulation - for res in res_repeats - ] - ) - - if sensi_mode == amici.SensitivityMethod.none: - rdatas = res[RDATAS] - llh = res[LLH] - - times["np"] = sum(problem.parameter_df[petab.ESTIMATE]) - - pd.Series(times).to_csv(script_dir / f"{args.model_name}_benchmark.csv") - - for rdata in rdatas: - assert ( - rdata.status == amici.AMICI_SUCCESS - ), f"Simulation failed for {rdata.id}" - - # create simulation PEtab table - sim_df = rdatas_to_measurement_df( - rdatas=rdatas, model=amici_model, measurement_df=problem.measurement_df - ) - sim_df.rename(columns={petab.MEASUREMENT: petab.SIMULATION}, inplace=True) - - if args.simulation_file: - sim_df.to_csv(args.simulation_file, index=False, sep="\t") - - if args.plot: - with contextlib.suppress(NotImplementedError): - # visualize fit - axs = plot_problem(petab_problem=problem, simulations_df=sim_df) - - # save figure - for plot_id, ax in axs.items(): - fig_path = os.path.join( - args.model_directory, - f"{args.model_name}_{plot_id}_vis.png", - ) - logger.info(f"Saving figure to {fig_path}") - ax.get_figure().savefig(fig_path, dpi=150) - - if args.check: - references_yaml = script_dir / "benchmark_models.yaml" - with open(references_yaml) as f: - refs = yaml.full_load(f) - - try: - ref_llh = refs[args.model_name]["llh"] - - rdiff = np.abs((llh - ref_llh) / ref_llh) - rtol = 1e-3 - adiff = np.abs(llh - ref_llh) - atol = 1e-3 - tolstr = ( - f" Absolute difference is {adiff:.2e} " - f"(tol {atol:.2e}) and relative difference is " - f"{rdiff:.2e} (tol {rtol:.2e})." - ) - - if np.isclose(llh, ref_llh, rtol=rtol, atol=atol): - logger.info( - f"Computed llh {llh:.4e} matches reference {ref_llh:.4e}." - + tolstr - ) - else: - logger.error( - f"Computed llh {llh:.4e} does not match reference " - f"{ref_llh:.4e}." + tolstr - ) - sys.exit(1) - except KeyError: - logger.error( - "No reference likelihood found for " - f"{args.model_name} in {references_yaml}" - ) - - for label, key in { - "simulation": "t_sim", - "adjoint sensitivity": "t_adj", - "forward sensitivity": "t_fwd", - }.items(): - try: - ref = refs[args.model_name][key] - if times[key] > ref: - logger.error( - f"Computation time for {label} ({times[key]:.2e}) " - f"exceeds reference ({ref:.2e})." - ) - sys.exit(1) - else: - logger.info( - f"Computation time for {label} ({times[key]:.2e}) " - f"within reference ({ref:.2e})." - ) - except KeyError: - logger.error( - f"No reference time for {label} found for " - f"{args.model_name} in {references_yaml}" - ) - - -if __name__ == "__main__": - main() From 2f3834dad964eb763a19ef4093912918f6375dce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 25 Oct 2024 12:43:02 +0100 Subject: [PATCH 41/80] support postequilibration --- python/sdist/amici/jax.py | 65 ++++++++++++++----- .../benchmark-models/test_petab_benchmark.py | 44 ++++++++++--- 2 files changed, 85 insertions(+), 24 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index c882658e3e..67e0869f9c 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -93,8 +93,14 @@ def unscale_p(self, p, pscale): )(p, pscale) def _preeq(self, p, k): - x0 = self.x0(p, k) + x0 = self.x_solver(self.x0(p, k)) tcl = self.tcl(x0, p, k) + return self._eq(p, k, tcl, x0) + + def _posteq(self, p, k, x, tcl): + return self._eq(p, k, tcl, x) + + def _eq(self, p, k, tcl, x0): sol = diffrax.diffeqsolve( self.term, self.solver, @@ -102,10 +108,10 @@ def _preeq(self, p, k): t0=0.0, t1=jnp.inf, dt0=None, - y0=self.x_solver(x0), + y0=x0, stepsize_controller=self.controller, max_steps=self.maxsteps, - discrete_terminating_event=diffrax.SteadyStateEvent(), + event=diffrax.Event(cond_fn=diffrax.steady_state_event()), ) return sol.ys @@ -127,7 +133,6 @@ def _solve(self, ts, p, k, x0, checkpointed): saveat=diffrax.SaveAt(ts=ts), throw=False, ) - return sol.ys, tcl, sol.stats def _obs(self, ts, x, p, k, tcl): @@ -148,6 +153,7 @@ def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): def _run( self, ts: np.ndarray, + ts_dyn: np.ndarray, p: np.ndarray, k: jnp.ndarray, k_preeq: jnp.ndarray, @@ -157,21 +163,44 @@ def _run( dynamic=True, ): ps = self.unscale_p(p, pscale) + + # Pre-equilibration if k_preeq.shape[0] > 0: x0 = self._preeq(ps, k_preeq) else: x0 = self.x0(ps, k) - if dynamic: + # Dynamic simulation + if dynamic and ts_dyn.shape[0] > 0: x, tcl, stats = self._solve( - ts, ps, k, x0, checkpointed=checkpointed + ts_dyn, ps, k, x0, checkpointed=checkpointed ) else: x = tuple( - self.x_solver(jnp.array([x0_i] * len(ts)) for x0_i in x0) + jnp.array([x0_i] * len(ts_dyn)) for x0_i in self.x_solver(x0) ) tcl = self.tcl(x0, ps, k) stats = None + + # Post-equilibration + if len(ts) > len(ts_dyn): + if len(ts_dyn) > 0: + x_final = tuple(x_i[-1] for x_i in x) + else: + x_final = self.x_solver(x0) + x_posteq = self._posteq(ps, k, x_final, tcl) + x_posteq = tuple( + jnp.array([x0_i] * (len(ts) - len(ts_dyn))) + for x0_i in x_posteq + ) + if len(ts_dyn) > 0: + x = tuple( + jnp.concatenate((x_i, x_posteq_i), axis=0) + for x_i, x_posteq_i in zip(x, x_posteq) + ) + else: + x = x_posteq + obs = self._obs(ts, x, ps, k, tcl) my_r = my.reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) @@ -179,10 +208,11 @@ def _run( x_rdata = self._x_rdata(x, tcl) return llh, (x_rdata, obs, stats) - # @eqx.filter_jit + @eqx.filter_jit def run( self, ts: np.ndarray, + ts_dyn: np.ndarray, p: jnp.ndarray, k: np.ndarray, k_preeq: np.ndarray, @@ -190,12 +220,13 @@ def run( pscale: np.ndarray, dynamic=True, ): - return self._run(ts, p, k, k_preeq, my, pscale, dynamic=dynamic) + return self._run(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) @eqx.filter_jit def srun( self, ts: np.ndarray, + ts_dyn: np.ndarray, p: jnp.ndarray, k: np.ndarray, k_preeq: np.ndarray, @@ -205,13 +236,14 @@ def srun( ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 1, True) - )(ts, p, k, k_preeq, my, pscale) + )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) return llh, sllh, (x, obs, stats) @eqx.filter_jit def s2run( self, ts: np.ndarray, + ts_dyn: np.ndarray, p: jnp.ndarray, k: np.ndarray, k_preeq: np.ndarray, @@ -221,10 +253,10 @@ def s2run( ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 1, True) - )(ts, p, k, k_preeq, my, pscale) + )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) s2llh = jax.hessian(self._run, 1, True)( - ts, p, k, k_preeq, my, pscale, False + ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic ) return llh, sllh, s2llh, (x, obs, stats) @@ -238,7 +270,8 @@ def run_simulation( k_preeq = np.asarray(edata.fixedParametersPreequilibration) my = np.asarray(edata.getObservedData()) pscale = np.asarray(edata.pscale) - dynamic = np.max(ts) > 0 + ts_dyn = ts[np.isfinite(ts)] + dynamic = len(ts_dyn) > 0 and np.max(ts_dyn) > 0 rdata_kwargs = dict() @@ -246,20 +279,20 @@ def run_simulation( ( rdata_kwargs["llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.run(ts, p, k, k_preeq, my, pscale, dynamic) + ) = self.run(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) elif sensitivity_order == amici.SensitivityOrder.first: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.srun(ts, p, k, k_preeq, my, pscale, dynamic) + ) = self.srun(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) elif sensitivity_order == amici.SensitivityOrder.second: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], rdata_kwargs["s2llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.s2run(ts, p, k, k_preeq, my, pscale, dynamic) + ) = self.s2run(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) for field in rdata_kwargs.keys(): if field == "llh": diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index ae84e3bc02..22fc497e33 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -25,6 +25,7 @@ import contextlib import logging import yaml +import equinox as eqx from amici.logging import get_logger from amici.petab.simulations import ( LLH, @@ -144,6 +145,8 @@ class GradientCheckSettings: # forward/backward/central differences. atol_consistency: float = 1e-5 rtol_consistency: float = 1e-1 + # maximum number of integration steps + maxsteps: int = 10_000 # Step sizes for finite difference gradient checks. step_sizes: list[float] = field( default_factory=lambda: [ @@ -253,12 +256,27 @@ def benchmark_problem(request): return problem_id, petab_problem, amici_model +@pytest.mark.filterwarnings( + "ignore:The following problem parameters were not used *", + "ignore: The environment variable *", +) def test_jax_llh(benchmark_problem): problem_id, petab_problem, amici_model = benchmark_problem - if problem_id not in problems_for_llh_check: - pytest.skip("Excluded from log-likelihood check.") + + amici_solver = amici_model.getSolver() + amici_solver.setAbsoluteTolerance(settings[problem_id].atol_sim) + amici_solver.setRelativeTolerance(settings[problem_id].rtol_sim) + amici_solver.setMaxSteps(settings[problem_id].maxsteps) + + llh_amici = simulate_petab( + petab_problem=petab_problem, + amici_model=amici_model, + solver=amici_solver, + log_level=logging.DEBUG, + )[LLH] + jax_model = import_petab_problem( - problem_id, + petab_problem, model_output_dir=benchmark_outdir / problem_id, jax=True, ) @@ -287,14 +305,24 @@ def test_jax_llh(benchmark_problem): parameter_mapping=parameter_mapping, amici_model=amici_model, ) + + jax_model = eqx.tree_at( + lambda x: x.maxsteps, jax_model, settings[problem_id].maxsteps + ) + jax_model = eqx.tree_at( + lambda x: x.atol, jax_model, settings[problem_id].atol_sim + ) + jax_model = eqx.tree_at( + lambda x: x.rtol, jax_model, settings[problem_id].rtol_sim + ) + rdatas_jax = jax_model.run_simulations(edatas) llh_jax = sum(r.llh for r in rdatas_jax) - ref_llh = reference_values[problem_id]["llh"] assert np.isclose( - ref_llh, llh_jax, rtol=1e-3, atol=1e-3 - ), f"LLH mismatch for {problem_id} with {ref_llh} vs {llh_jax} (jax)" + llh_amici, llh_jax, rtol=1e-3, atol=1e-3 + ), f"LLH mismatch for {problem_id} with {llh_amici} (amici) vs {llh_jax} (jax)" @pytest.mark.filterwarnings( @@ -313,8 +341,8 @@ def test_nominal_parameters_llh(benchmark_problem): pytest.skip("Excluded from log-likelihood check.") amici_solver = amici_model.getSolver() - amici_solver.setAbsoluteTolerance(1e-8) - amici_solver.setRelativeTolerance(1e-8) + amici_solver.setAbsoluteTolerance(settings[problem_id].atol_sim) + amici_solver.setRelativeTolerance(settings[problem_id].rtol_sim) amici_solver.setMaxSteps(10_000) if problem_id in ("Brannmark_JBC2010", "Isensee_JCB2018"): amici_model.setSteadyStateSensitivityMode( From 5366632e716de3a82513e489b7221f94885408f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 25 Oct 2024 13:40:26 +0100 Subject: [PATCH 42/80] fixup --- python/sdist/amici/jax.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 67e0869f9c..c1f083a799 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -171,7 +171,7 @@ def _run( x0 = self.x0(ps, k) # Dynamic simulation - if dynamic and ts_dyn.shape[0] > 0: + if dynamic == "true": x, tcl, stats = self._solve( ts_dyn, ps, k, x0, checkpointed=checkpointed ) @@ -220,7 +220,9 @@ def run( pscale: np.ndarray, dynamic=True, ): - return self._run(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) + return self._run( + ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ) @eqx.filter_jit def srun( @@ -236,7 +238,7 @@ def srun( ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 1, True) - )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) + )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) return llh, sllh, (x, obs, stats) @eqx.filter_jit @@ -253,10 +255,10 @@ def s2run( ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 1, True) - )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) + )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) s2llh = jax.hessian(self._run, 1, True)( - ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic + ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic ) return llh, sllh, s2llh, (x, obs, stats) @@ -271,7 +273,7 @@ def run_simulation( my = np.asarray(edata.getObservedData()) pscale = np.asarray(edata.pscale) ts_dyn = ts[np.isfinite(ts)] - dynamic = len(ts_dyn) > 0 and np.max(ts_dyn) > 0 + dynamic = "true" if len(ts_dyn) and np.max(ts_dyn) > 0 else "false" rdata_kwargs = dict() @@ -279,20 +281,26 @@ def run_simulation( ( rdata_kwargs["llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.run(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) + ) = self.run( + ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ) elif sensitivity_order == amici.SensitivityOrder.first: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.srun(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) + ) = self.srun( + ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ) elif sensitivity_order == amici.SensitivityOrder.second: ( rdata_kwargs["llh"], rdata_kwargs["sllh"], rdata_kwargs["s2llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.s2run(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic) + ) = self.s2run( + ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ) for field in rdata_kwargs.keys(): if field == "llh": From 5a86f4c3f52d3d22305dbe1aa8f952a4f102bfab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 25 Oct 2024 14:58:08 +0100 Subject: [PATCH 43/80] fix --- .../test_benchmark_collection_models.yml | 3 +-- tests/benchmark-models/test_petab_benchmark.py | 16 ---------------- 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/.github/workflows/test_benchmark_collection_models.yml b/.github/workflows/test_benchmark_collection_models.yml index 019f6d6d8b..dd520de16d 100644 --- a/.github/workflows/test_benchmark_collection_models.yml +++ b/.github/workflows/test_benchmark_collection_models.yml @@ -60,8 +60,7 @@ jobs: - name: Download benchmark collection run: | - pip install git+https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab.git@master#subdirectory=src/python \ - && AMICI_PARALLEL_COMPILE="" tests/benchmark-models/test_benchmark_collection.sh + pip install git+https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab.git@master#subdirectory=src/python - name: Run tests env: diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 22fc497e33..9a896b038c 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -25,7 +25,6 @@ import contextlib import logging import yaml -import equinox as eqx from amici.logging import get_logger from amici.petab.simulations import ( LLH, @@ -145,8 +144,6 @@ class GradientCheckSettings: # forward/backward/central differences. atol_consistency: float = 1e-5 rtol_consistency: float = 1e-1 - # maximum number of integration steps - maxsteps: int = 10_000 # Step sizes for finite difference gradient checks. step_sizes: list[float] = field( default_factory=lambda: [ @@ -264,9 +261,6 @@ def test_jax_llh(benchmark_problem): problem_id, petab_problem, amici_model = benchmark_problem amici_solver = amici_model.getSolver() - amici_solver.setAbsoluteTolerance(settings[problem_id].atol_sim) - amici_solver.setRelativeTolerance(settings[problem_id].rtol_sim) - amici_solver.setMaxSteps(settings[problem_id].maxsteps) llh_amici = simulate_petab( petab_problem=petab_problem, @@ -306,16 +300,6 @@ def test_jax_llh(benchmark_problem): amici_model=amici_model, ) - jax_model = eqx.tree_at( - lambda x: x.maxsteps, jax_model, settings[problem_id].maxsteps - ) - jax_model = eqx.tree_at( - lambda x: x.atol, jax_model, settings[problem_id].atol_sim - ) - jax_model = eqx.tree_at( - lambda x: x.rtol, jax_model, settings[problem_id].rtol_sim - ) - rdatas_jax = jax_model.run_simulations(edatas) llh_jax = sum(r.llh for r in rdatas_jax) From 480b75a64a48eaf9dd4cb6573e9c334992ae025a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 25 Oct 2024 15:42:24 +0100 Subject: [PATCH 44/80] fix gradients --- python/sdist/amici/jax.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index c1f083a799..e798a0138f 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -237,7 +237,7 @@ def srun( dynamic=True, ): (llh, (x, obs, stats)), sllh = ( - jax.value_and_grad(self._run, 1, True) + jax.value_and_grad(self._run, 2, True) )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) return llh, sllh, (x, obs, stats) @@ -254,10 +254,10 @@ def s2run( dynamic=True, ): (llh, (x, obs, stats)), sllh = ( - jax.value_and_grad(self._run, 1, True) + jax.value_and_grad(self._run, 2, True) )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) - s2llh = jax.hessian(self._run, 1, True)( + s2llh = jax.hessian(self._run, 2, True)( ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic ) From 8b9c10ae330e669898e6405c318a6481eb15f3db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 25 Oct 2024 22:53:32 +0100 Subject: [PATCH 45/80] fix hessian --- python/sdist/amici/jax.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index e798a0138f..74e601dd8c 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -258,7 +258,15 @@ def s2run( )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) s2llh = jax.hessian(self._run, 2, True)( - ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ts, + ts_dyn, + p, + k, + k_preeq, + my, + pscale, + checkpointed=False, + dynamic=dynamic, ) return llh, sllh, s2llh, (x, obs, stats) From 7dc81aca73a6e9f3b07787be06fcff459752546c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Fri, 25 Oct 2024 23:25:23 +0100 Subject: [PATCH 46/80] Update test_petab_benchmark.py --- tests/benchmark-models/test_petab_benchmark.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 9a896b038c..bab18a1550 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -261,6 +261,9 @@ def test_jax_llh(benchmark_problem): problem_id, petab_problem, amici_model = benchmark_problem amici_solver = amici_model.getSolver() + amici_solver.setAbsoluteTolerance(1e-8) + amici_solver.setRelativeTolerance(1e-8) + amici_solver.setMaxSteps(10_000) llh_amici = simulate_petab( petab_problem=petab_problem, @@ -325,8 +328,8 @@ def test_nominal_parameters_llh(benchmark_problem): pytest.skip("Excluded from log-likelihood check.") amici_solver = amici_model.getSolver() - amici_solver.setAbsoluteTolerance(settings[problem_id].atol_sim) - amici_solver.setRelativeTolerance(settings[problem_id].rtol_sim) + amici_solver.setAbsoluteTolerance(1e-8) + amici_solver.setRelativeTolerance(1e-8) amici_solver.setMaxSteps(10_000) if problem_id in ("Brannmark_JBC2010", "Isensee_JCB2018"): amici_model.setSteadyStateSensitivityMode( From 02a12726521820f36ae70a63f9588ebd020acc8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 27 Oct 2024 11:10:44 +0000 Subject: [PATCH 47/80] skip smith in jax --- tests/benchmark-models/test_petab_benchmark.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index e3ad23c913..0d991b50d8 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -262,6 +262,9 @@ def benchmark_problem(request): def test_jax_llh(benchmark_problem): problem_id, petab_problem, amici_model = benchmark_problem + if problem_id == "Smith_BMCSystBiol2013": + pytest.skip("Excluded from JAX check due to excessive runtime") + amici_solver = amici_model.getSolver() amici_solver.setAbsoluteTolerance(1e-8) amici_solver.setRelativeTolerance(1e-8) From 51bd18cac0314d6dbd3c8b39aee660641a4d4d36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 27 Oct 2024 11:21:47 +0000 Subject: [PATCH 48/80] exclude more models --- tests/benchmark-models/test_petab_benchmark.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 0d991b50d8..58586e3329 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -262,7 +262,14 @@ def benchmark_problem(request): def test_jax_llh(benchmark_problem): problem_id, petab_problem, amici_model = benchmark_problem - if problem_id == "Smith_BMCSystBiol2013": + if problem_id in ( + "Bachmann_MSB2011", + "Isensee_JCB2018", + "Lucarelli_CellSystems2018", + "SalazarCavazos_MBoC2020", + "Smith_BMCSystBiol2013", + ): + # confirmed to work 27/10/2024 but experienced high local runtime (M2 MBA, >30s) pytest.skip("Excluded from JAX check due to excessive runtime") amici_solver = amici_model.getSolver() From c7c5d4b9eebc64456588f48ff102d27bf7ba04ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 9 Nov 2024 23:14:34 +0000 Subject: [PATCH 49/80] refactor: remove use of edatas --- python/sdist/amici/de_export.py | 8 ++ python/sdist/amici/jax.py | 127 +++++++++++++++--- .../benchmark-models/test_petab_benchmark.py | 26 +--- 3 files changed, 123 insertions(+), 38 deletions(-) diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index 6f747d8d82..d773b0864e 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -344,6 +344,14 @@ def jnp_stack_str(array) -> str: else "_" for sym_name in sym_names }, + **{ + f"{sym_name.upper()}_IDS": "".join( + f'"{strip_pysb(s)}", ' for s in self.model.sym(sym_name) + ) + if self.model.sym(sym_name) + else "tuple()" + for sym_name in ("p", "k", "y", "x") + }, **{ "MODEL_NAME": self.model_name, }, diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 74e601dd8c..5d70a08aef 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -1,15 +1,25 @@ from abc import abstractmethod from dataclasses import dataclass from concurrent.futures import ThreadPoolExecutor +from numbers import Number import diffrax import equinox as eqx import jax.numpy as jnp import numpy as np +import pandas as pd import jax -from collections.abc import Iterable +import petab.v1 as petab import amici +from amici.petab.parameter_mapping import ( + ParameterMapping, + ParameterMappingForCondition, +) +from amici.petab.conditions import ( + _get_timepoints_with_replicates, + _get_measurements_and_sigmas, +) jax.config.update("jax_enable_x64", True) @@ -83,6 +93,22 @@ def sigmay(y, p, k): ... @abstractmethod def Jy(y, my, sigmay): ... + @property + @abstractmethod + def state_ids(self): ... + + @property + @abstractmethod + def observable_ids(self): ... + + @property + @abstractmethod + def parameter_ids(self): ... + + @property + @abstractmethod + def fixed_parameter_ids(self): ... + def unscale_p(self, p, pscale): return jax.vmap( lambda p_i, pscale_i: jnp.stack( @@ -154,9 +180,9 @@ def _run( self, ts: np.ndarray, ts_dyn: np.ndarray, - p: np.ndarray, - k: jnp.ndarray, - k_preeq: jnp.ndarray, + p: jnp.ndarray, + k: np.ndarray, + k_preeq: np.ndarray, my: jnp.ndarray, pscale: np.ndarray, checkpointed=True, @@ -272,14 +298,50 @@ def s2run( return llh, sllh, s2llh, (x, obs, stats) def run_simulation( - self, edata: amici.ExpData, sensitivity_order: amici.SensitivityOrder + self, + parameter_mapping: ParameterMappingForCondition = None, + measurements: pd.DataFrame = None, + parameters: pd.DataFrame = None, + sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, ): - ts = np.asarray(edata.getTimepoints()) - p = jnp.asarray(edata.parameters) - k = np.asarray(edata.fixedParameters) - k_preeq = np.asarray(edata.fixedParametersPreequilibration) - my = np.asarray(edata.getObservedData()) - pscale = np.asarray(edata.pscale) + cond_id, measurements_df = measurements + ts = _get_timepoints_with_replicates(measurements_df) + p = jnp.array( + [ + pval + if isinstance( + pval := parameter_mapping.map_sim_var[par], Number + ) + else petab.scale( + parameters.loc[pval, petab.NOMINAL_VALUE], + parameters.loc[pval, petab.PARAMETER_SCALE], + ) + for par in self.parameter_ids + ] + ) + pscale = jnp.array( + [ + 0 if s == petab.LIN else 1 if s == petab.LOG else 2 + for s in parameter_mapping.scale_map_sim_var.values() + ] + ) + k_sim = np.array( + [ + parameter_mapping.map_sim_fix[k] + for k in self.fixed_parameter_ids + ] + ) + k_preeq = np.array( + [ + parameter_mapping.map_preeq_fix[k] + for k in self.fixed_parameter_ids + if k in parameter_mapping.map_preeq_fix + ] + ) + my = _get_measurements_and_sigmas( + measurements_df, ts, self.observable_ids + )[0].flatten() + ts = np.array(ts) ts_dyn = ts[np.isfinite(ts)] dynamic = "true" if len(ts_dyn) and np.max(ts_dyn) > 0 else "false" @@ -290,7 +352,7 @@ def run_simulation( rdata_kwargs["llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), ) = self.run( - ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic ) elif sensitivity_order == amici.SensitivityOrder.first: ( @@ -298,7 +360,7 @@ def run_simulation( rdata_kwargs["sllh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), ) = self.srun( - ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic ) elif sensitivity_order == amici.SensitivityOrder.second: ( @@ -307,7 +369,7 @@ def run_simulation( rdata_kwargs["s2llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), ) = self.s2run( - ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic + ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic ) for field in rdata_kwargs.keys(): @@ -324,18 +386,47 @@ def run_simulation( def run_simulations( self, - edatas: Iterable[amici.ExpData], sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, num_threads: int = 1, + parameter_mappings: ParameterMapping = None, + parameters: pd.DataFrame = None, + simulation_conditions: pd.DataFrame = None, + measurements: pd.DataFrame = None, ): fun = eqx.Partial( - self.run_simulation, sensitivity_order=sensitivity_order + self.run_simulation, + sensitivity_order=sensitivity_order, + parameters=parameters, ) + gb = ( + [ + petab.PREEQUILIBRATION_CONDITION_ID, + petab.SIMULATION_CONDITION_ID, + ] + if petab.PREEQUILIBRATION_CONDITION_ID in measurements.columns + and petab.PREEQUILIBRATION_CONDITION_ID in simulation_conditions + else petab.SIMULATION_CONDITION_ID + ) + + per_condition_measurements = measurements.groupby(gb) + + order_conditions = [ + tuple(c) if isinstance(c, np.ndarray) else c + for c in simulation_conditions[gb].values + ] + + sorted_mappings = [ + parameter_mappings[order_conditions.index(condition)] + for condition in per_condition_measurements.groups.keys() + ] + if num_threads > 1: with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map(fun, edatas) + results = pool.map( + fun, sorted_mappings, per_condition_measurements + ) else: - results = map(fun, edatas) + results = map(fun, sorted_mappings, per_condition_measurements) return list(results) diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 58586e3329..54d92dcf88 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -31,8 +31,6 @@ RDATAS, rdatas_to_measurement_df, simulate_petab, - create_edatas, - fill_in_parameters, create_parameter_mapping, ) from petab.v1.visualize import plot_problem @@ -292,31 +290,19 @@ def test_jax_llh(benchmark_problem): simulation_conditions = ( petab_problem.get_simulation_conditions_from_measurement_df() ) - edatas = create_edatas( - amici_model=amici_model, - petab_problem=petab_problem, - simulation_conditions=simulation_conditions, - ) - problem_parameters = { - t.Index: getattr(t, petab.NOMINAL_VALUE) - for t in petab_problem.parameter_df.itertuples() - } - parameter_mapping = create_parameter_mapping( + mappings = create_parameter_mapping( petab_problem=petab_problem, simulation_conditions=simulation_conditions, scaled_parameters=False, amici_model=amici_model, ) - fill_in_parameters( - edatas=edatas, - problem_parameters=problem_parameters, - scaled_parameters=False, - parameter_mapping=parameter_mapping, - amici_model=amici_model, + rdatas_jax = jax_model.run_simulations( + parameter_mappings=mappings, + parameters=petab_problem.parameter_df, + simulation_conditions=simulation_conditions, + measurements=petab_problem.measurement_df, ) - rdatas_jax = jax_model.run_simulations(edatas) - llh_jax = sum(r.llh for r in rdatas_jax) assert np.isclose( From a514debd75b680d75278bf0c6ed80a5813a759a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 9 Nov 2024 23:16:12 +0000 Subject: [PATCH 50/80] update template --- .pre-commit-config.yaml | 6 ------ python/sdist/amici/jax.template.py | 16 ++++++++++++++++ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f16458b29a..26395d1a0a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,10 +27,4 @@ repos: - --config - python/sdist/pyproject.toml -- repo: https://github.com/asottile/pyupgrade - rev: v3.17.0 - hooks: - - id: pyupgrade - args: ["--py310-plus"] - exclude: '^(ThirdParty|models)/' diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index c52f29a78f..b6048b57f5 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -106,3 +106,19 @@ def Jy(y, my, sigmay): TPL_JY_EQ return TPL_JY_RET + + @property + def parameter_ids(self): + return TPL_P_IDS + + @property + def fixed_parameter_ids(self): + return TPL_K_IDS + + @property + def observable_ids(self): + return TPL_Y_IDS + + @property + def state_ids(self): + return TPL_X_IDS From 498681aa99bfd139d846c2d1b9a3d9ba168bc4a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 9 Nov 2024 23:16:23 +0000 Subject: [PATCH 51/80] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 26395d1a0a..f16458b29a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,4 +27,10 @@ repos: - --config - python/sdist/pyproject.toml +- repo: https://github.com/asottile/pyupgrade + rev: v3.17.0 + hooks: + - id: pyupgrade + args: ["--py310-plus"] + exclude: '^(ThirdParty|models)/' From f745be02b2b79a9ba5ad9c0bdb22a749d1df58c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 12 Nov 2024 14:51:19 +0000 Subject: [PATCH 52/80] fix python jax tests --- python/sdist/amici/jax.py | 12 ++++----- python/tests/test_jax.py | 56 ++++++++++++++++++++++++++++----------- 2 files changed, 47 insertions(+), 21 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 5d70a08aef..ec69f34361 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -186,7 +186,7 @@ def _run( my: jnp.ndarray, pscale: np.ndarray, checkpointed=True, - dynamic=True, + dynamic="true", ): ps = self.unscale_p(p, pscale) @@ -227,11 +227,11 @@ def _run( else: x = x_posteq - obs = self._obs(ts, x, ps, k, tcl) + obs = jnp.stack(self._obs(ts, x, ps, k, tcl), axis=1) my_r = my.reshape((len(ts), -1)) sigmay = self._sigmay(obs, ps, k) llh = self._loss(obs, sigmay, my_r) - x_rdata = self._x_rdata(x, tcl) + x_rdata = jnp.stack(self._x_rdata(x, tcl), axis=1) return llh, (x_rdata, obs, stats) @eqx.filter_jit @@ -244,7 +244,7 @@ def run( k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, - dynamic=True, + dynamic="true", ): return self._run( ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic @@ -260,7 +260,7 @@ def srun( k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, - dynamic=True, + dynamic="true", ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 2, True) @@ -277,7 +277,7 @@ def s2run( k_preeq: np.ndarray, my: np.ndarray, pscale: np.ndarray, - dynamic=True, + dynamic="true", ): (llh, (x, obs, stats)), sllh = ( jax.value_and_grad(self._run, 2, True) diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 34fd70a201..5898262f90 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -100,25 +100,21 @@ def _test_model(model_module, ts, p, k): amici_model.setParameters(np.asarray(p, dtype=np.float64)) amici_model.setFixedParameters(np.asarray(k, dtype=np.float64)) - edatas = ( - amici.ExpData(sol_amici_ref, 1.0, 1.0), - amici.ExpData(sol_amici_ref, 1.0, 1.0), - ) - for edata in edatas: - edata.parameters = amici_model.getParameters() - edata.fixedParameters = amici_model.getFixedParameters() - edata.pscale = amici_model.getParameterScale() + edata = amici.ExpData(sol_amici_ref, 1.0, 1.0) + edata.parameters = amici_model.getParameters() + edata.fixedParameters = amici_model.getFixedParameters() + edata.pscale = amici_model.getParameterScale() amici_solver = amici_model.getSolver() amici_solver.setSensitivityMethod(amici.SensitivityMethod.forward) amici_solver.setSensitivityOrder(amici.SensitivityOrder.first) - rs_amici = amici.runAmiciSimulations(amici_model, amici_solver, edatas) + rs_amici = amici.runAmiciSimulations(amici_model, amici_solver, [edata]) - check_fields_jax(rs_amici, jax_model, edatas, ["x", "y", "llh"]) + check_fields_jax(rs_amici, jax_model, edata, ["x", "y", "llh"]) check_fields_jax( rs_amici, jax_model, - edatas, + edata, ["x", "y", "llh", "sllh"], sensi_order=amici.SensitivityOrder.first, ) @@ -126,7 +122,7 @@ def _test_model(model_module, ts, p, k): check_fields_jax( rs_amici, jax_model, - edatas, + edata, ["x", "y", "llh", "sllh"], sensi_order=amici.SensitivityOrder.second, ) @@ -135,13 +131,43 @@ def _test_model(model_module, ts, p, k): def check_fields_jax( rs_amici, jax_model, - edatas, + edata, fields, sensi_order=amici.SensitivityOrder.none, ): - rs_jax = jax_model.run_simulations(edatas, sensitivity_order=sensi_order) + r_jax = dict() + kwargs = { + "ts": np.array(edata.getTimepoints()), + "ts_dyn": np.array(edata.getTimepoints()), + "p": np.array(edata.parameters), + "k": np.array(edata.fixedParameters), + "k_preeq": np.array([]), + "my": np.array(edata.getObservedData()).reshape( + np.array(edata.getTimepoints()).shape[0], -1 + ), + "pscale": np.array(edata.pscale), + } + if sensi_order == amici.SensitivityOrder.none: + ( + r_jax["llh"], + (r_jax["x"], r_jax["y"], r_jax["stats"]), + ) = jax_model.run(**kwargs) + elif sensi_order == amici.SensitivityOrder.first: + ( + r_jax["llh"], + r_jax["sllh"], + (r_jax["x"], r_jax["y"], r_jax["stats"]), + ) = jax_model.srun(**kwargs) + elif sensi_order == amici.SensitivityOrder.second: + ( + r_jax["llh"], + r_jax["sllh"], + r_jax["s2llh"], + (r_jax["x"], r_jax["y"], r_jax["stats"]), + ) = jax_model.s2run(**kwargs) + for field in fields: - for r_amici, r_jax in zip(rs_amici, rs_jax): + for r_amici, r_jax in zip(rs_amici, [r_jax]): assert_allclose( actual=r_amici[field], desired=r_jax[field], From a64f89ba7f872ed2f698fd6b19946650616c407f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 12 Nov 2024 15:39:15 +0000 Subject: [PATCH 53/80] simplify petab interface --- python/sdist/amici/jax.py | 58 +++++++++++++++---- .../benchmark-models/test_petab_benchmark.py | 11 +--- 2 files changed, 49 insertions(+), 20 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index ec69f34361..3597404cea 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -13,8 +13,8 @@ import amici from amici.petab.parameter_mapping import ( - ParameterMapping, ParameterMappingForCondition, + create_parameter_mapping, ) from amici.petab.conditions import ( _get_timepoints_with_replicates, @@ -39,6 +39,7 @@ class JAXModel(eqx.Module): dcoeff: float maxsteps: int term: diffrax.ODETerm + petab_problem: petab.Problem | None def __init__(self): self.solver = diffrax.Kvaerno5() @@ -56,6 +57,18 @@ def __init__(self): dcoeff=self.dcoeff, ) self.term = diffrax.ODETerm(self.xdot) + self.petab_problem = None + + def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": + if self.petab_problem is None: + return eqx.tree_at( + lambda x: x.petab_problem, + self, + petab_problem, + is_leaf=lambda x: x is None, + ) + else: + return eqx.tree_at(lambda x: x.petab_problem, self, petab_problem) @staticmethod @abstractmethod @@ -109,6 +122,22 @@ def parameter_ids(self): ... @abstractmethod def fixed_parameter_ids(self): ... + def getParameterIds(self) -> list[str]: # noqa: N802 + """ + Get the parameter ids of the model. Adds compatibility with AmiciModel, added to enable generation of + parameter mappings via :func:`amici.petab.create_parameter_mapping`. + :return: + """ + return self.parameter_ids + + def getFixedParameterIds(self) -> list[str]: # noqa: N802 + """ + Get the fixed parameter ids of the model. Adds compatibility with AmiciModel, added to enable generation of + parameter mappings via :func:`amici.petab.create_parameter_mapping`. + :return: + """ + return self.fixed_parameter_ids + def unscale_p(self, p, pscale): return jax.vmap( lambda p_i, pscale_i: jnp.stack( @@ -301,7 +330,6 @@ def run_simulation( self, parameter_mapping: ParameterMappingForCondition = None, measurements: pd.DataFrame = None, - parameters: pd.DataFrame = None, sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, ): cond_id, measurements_df = measurements @@ -313,8 +341,12 @@ def run_simulation( pval := parameter_mapping.map_sim_var[par], Number ) else petab.scale( - parameters.loc[pval, petab.NOMINAL_VALUE], - parameters.loc[pval, petab.PARAMETER_SCALE], + self.petab_problem.parameter_df.loc[ + pval, petab.NOMINAL_VALUE + ], + self.petab_problem.parameter_df.loc[ + pval, petab.PARAMETER_SCALE + ], ) for par in self.parameter_ids ] @@ -388,33 +420,39 @@ def run_simulations( self, sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, num_threads: int = 1, - parameter_mappings: ParameterMapping = None, - parameters: pd.DataFrame = None, simulation_conditions: pd.DataFrame = None, - measurements: pd.DataFrame = None, ): fun = eqx.Partial( self.run_simulation, sensitivity_order=sensitivity_order, - parameters=parameters, ) gb = ( [ petab.PREEQUILIBRATION_CONDITION_ID, petab.SIMULATION_CONDITION_ID, ] - if petab.PREEQUILIBRATION_CONDITION_ID in measurements.columns + if petab.PREEQUILIBRATION_CONDITION_ID + in self.petab_problem.measurement_df and petab.PREEQUILIBRATION_CONDITION_ID in simulation_conditions else petab.SIMULATION_CONDITION_ID ) - per_condition_measurements = measurements.groupby(gb) + per_condition_measurements = self.petab_problem.measurement_df.groupby( + gb + ) order_conditions = [ tuple(c) if isinstance(c, np.ndarray) else c for c in simulation_conditions[gb].values ] + parameter_mappings = create_parameter_mapping( + petab_problem=self.petab_problem, + simulation_conditions=simulation_conditions, + scaled_parameters=False, + amici_model=self, + ) + sorted_mappings = [ parameter_mappings[order_conditions.index(condition)] for condition in per_condition_measurements.groups.keys() diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 54d92dcf88..6667a6aae3 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -31,7 +31,6 @@ RDATAS, rdatas_to_measurement_df, simulate_petab, - create_parameter_mapping, ) from petab.v1.visualize import plot_problem @@ -287,20 +286,12 @@ def test_jax_llh(benchmark_problem): model_output_dir=benchmark_outdir / problem_id, jax=True, ) + jax_model = jax_model.set_petab_problem(petab_problem) simulation_conditions = ( petab_problem.get_simulation_conditions_from_measurement_df() ) - mappings = create_parameter_mapping( - petab_problem=petab_problem, - simulation_conditions=simulation_conditions, - scaled_parameters=False, - amici_model=amici_model, - ) rdatas_jax = jax_model.run_simulations( - parameter_mappings=mappings, - parameters=petab_problem.parameter_df, simulation_conditions=simulation_conditions, - measurements=petab_problem.measurement_df, ) llh_jax = sum(r.llh for r in rdatas_jax) From 72924518fd20e45c5ab986ee5f741997aaab9694 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 12 Nov 2024 16:28:39 +0000 Subject: [PATCH 54/80] add parameter values to model class --- python/sdist/amici/jax.py | 53 ++++++++++++++++++++++++++++----------- 1 file changed, 38 insertions(+), 15 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 3597404cea..5ad11680c9 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -38,6 +38,7 @@ class JAXModel(eqx.Module): icoeff: float dcoeff: float maxsteps: int + parameters: jnp.ndarray term: diffrax.ODETerm petab_problem: petab.Problem | None @@ -58,17 +59,40 @@ def __init__(self): ) self.term = diffrax.ODETerm(self.xdot) self.petab_problem = None + self.parameters = jnp.array([]) def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": + """ + Set the PEtab problem for the model and updates parameters to the nominal values. + :param petab_problem: + Petab problem to set. + :return: JAXModel instance + """ if self.petab_problem is None: - return eqx.tree_at( + model = eqx.tree_at( lambda x: x.petab_problem, self, petab_problem, is_leaf=lambda x: x is None, ) else: - return eqx.tree_at(lambda x: x.petab_problem, self, petab_problem) + model = eqx.tree_at(lambda x: x.petab_problem, self, petab_problem) + + nominal_values = jnp.array( + [ + petab.scale( + model.petab_problem.parameter_df.loc[ + pval, petab.NOMINAL_VALUE + ], + model.petab_problem.parameter_df.loc[ + pval, petab.PARAMETER_SCALE + ], + ) + for pval in model.petab_parameter_ids() + ] + ) + + return eqx.tree_at(lambda x: x.parameters, model, nominal_values) @staticmethod @abstractmethod @@ -138,7 +162,15 @@ def getFixedParameterIds(self) -> list[str]: # noqa: N802 """ return self.fixed_parameter_ids - def unscale_p(self, p, pscale): + def petab_parameter_ids(self) -> list[str]: + return self.petab_problem.parameter_df[ + self.petab_problem.parameter_df[petab.ESTIMATE] == 1 + ].index.tolist() + + def get_petab_parameter_by_name(self, name: str) -> jnp.float_: + return self.parameters[self.petab_parameter_ids().index(name)] + + def _unscale_p(self, p, pscale): return jax.vmap( lambda p_i, pscale_i: jnp.stack( (p_i, jnp.exp(p_i), jnp.power(10, p_i)) @@ -217,7 +249,7 @@ def _run( checkpointed=True, dynamic="true", ): - ps = self.unscale_p(p, pscale) + ps = self._unscale_p(p, pscale) # Pre-equilibration if k_preeq.shape[0] > 0: @@ -340,14 +372,7 @@ def run_simulation( if isinstance( pval := parameter_mapping.map_sim_var[par], Number ) - else petab.scale( - self.petab_problem.parameter_df.loc[ - pval, petab.NOMINAL_VALUE - ], - self.petab_problem.parameter_df.loc[ - pval, petab.PARAMETER_SCALE - ], - ) + else self.get_petab_parameter_by_name(pval) for par in self.parameter_ids ] ) @@ -471,13 +496,11 @@ def run_simulations( @dataclass class ReturnDataJAX(dict): x: np.array = None - sx: np.array = None y: np.array = None - sy: np.array = None sigmay: np.array = None - ssigmay: np.array = None llh: np.array = None sllh: np.array = None + s2llh: np.array = None stats: dict = None def __init__(self, *args, **kwargs): From da021064ee37b1fd8a4a2a3a2a042fe55c49a59e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 12 Nov 2024 17:08:44 +0000 Subject: [PATCH 55/80] refactor parameter mapping --- python/sdist/amici/jax.py | 103 +++++++++--------- .../benchmark-models/test_petab_benchmark.py | 3 + 2 files changed, 57 insertions(+), 49 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index 5ad11680c9..fc16a533e1 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -7,7 +7,6 @@ import equinox as eqx import jax.numpy as jnp import numpy as np -import pandas as pd import jax import petab.v1 as petab @@ -39,6 +38,7 @@ class JAXModel(eqx.Module): dcoeff: float maxsteps: int parameters: jnp.ndarray + parameter_mappings: dict[tuple[str], ParameterMappingForCondition] term: diffrax.ODETerm petab_problem: petab.Problem | None @@ -59,6 +59,7 @@ def __init__(self): ) self.term = diffrax.ODETerm(self.xdot) self.petab_problem = None + self.parameter_mappings = None self.parameters = jnp.array([]) def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": @@ -68,15 +69,41 @@ def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": Petab problem to set. :return: JAXModel instance """ - if self.petab_problem is None: - model = eqx.tree_at( - lambda x: x.petab_problem, - self, - petab_problem, - is_leaf=lambda x: x is None, + + is_leaf = lambda x: x is None if self.petab_problem is None else None # noqa: E731 + model = eqx.tree_at( + lambda x: x.petab_problem, + self, + petab_problem, + is_leaf=is_leaf, + ) + + simulation_conditions = ( + petab_problem.get_simulation_conditions_from_measurement_df() + ) + + mappings = create_parameter_mapping( + petab_problem=petab_problem, + simulation_conditions=simulation_conditions, + scaled_parameters=False, + amici_model=self, + ) + + parameter_mappings = { + tuple(simulation_condition.values): mapping + for (_, simulation_condition), mapping in zip( + simulation_conditions.iterrows(), mappings ) - else: - model = eqx.tree_at(lambda x: x.petab_problem, self, petab_problem) + } + is_leaf = ( # noqa: E731 + lambda x: x is None if self.parameter_mappings is None else None + ) + model = eqx.tree_at( + lambda x: x.parameter_mappings, + model, + parameter_mappings, + is_leaf=is_leaf, + ) nominal_values = jnp.array( [ @@ -360,11 +387,19 @@ def s2run( def run_simulation( self, - parameter_mapping: ParameterMappingForCondition = None, - measurements: pd.DataFrame = None, + simulation_condition: tuple[str], sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, ): - cond_id, measurements_df = measurements + parameter_mapping = self.parameter_mappings[simulation_condition] + measurements_df = self.petab_problem.measurement_df + for v, k in zip( + simulation_condition, + ( + petab.SIMULATION_CONDITION_ID, + petab.PREEQUILIBRATION_CONDITION_ID, + ), + ): + measurements_df = measurements_df.query(f"{k} == '{v}'") ts = _get_timepoints_with_replicates(measurements_df) p = jnp.array( [ @@ -402,7 +437,9 @@ def run_simulation( ts_dyn = ts[np.isfinite(ts)] dynamic = "true" if len(ts_dyn) and np.max(ts_dyn) > 0 else "false" - rdata_kwargs = dict() + rdata_kwargs = dict( + simulation_condition=simulation_condition, + ) if sensitivity_order == amici.SensitivityOrder.none: ( @@ -445,56 +482,24 @@ def run_simulations( self, sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, num_threads: int = 1, - simulation_conditions: pd.DataFrame = None, + simulation_conditions: tuple[tuple[str]] = None, ): fun = eqx.Partial( self.run_simulation, sensitivity_order=sensitivity_order, ) - gb = ( - [ - petab.PREEQUILIBRATION_CONDITION_ID, - petab.SIMULATION_CONDITION_ID, - ] - if petab.PREEQUILIBRATION_CONDITION_ID - in self.petab_problem.measurement_df - and petab.PREEQUILIBRATION_CONDITION_ID in simulation_conditions - else petab.SIMULATION_CONDITION_ID - ) - - per_condition_measurements = self.petab_problem.measurement_df.groupby( - gb - ) - - order_conditions = [ - tuple(c) if isinstance(c, np.ndarray) else c - for c in simulation_conditions[gb].values - ] - - parameter_mappings = create_parameter_mapping( - petab_problem=self.petab_problem, - simulation_conditions=simulation_conditions, - scaled_parameters=False, - amici_model=self, - ) - - sorted_mappings = [ - parameter_mappings[order_conditions.index(condition)] - for condition in per_condition_measurements.groups.keys() - ] if num_threads > 1: with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map( - fun, sorted_mappings, per_condition_measurements - ) + results = pool.map(fun, simulation_conditions) else: - results = map(fun, sorted_mappings, per_condition_measurements) + results = map(fun, simulation_conditions) return list(results) @dataclass class ReturnDataJAX(dict): + simulation_condition: tuple[str] = None x: np.array = None y: np.array = None sigmay: np.array = None diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 6667a6aae3..97d96af324 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -290,6 +290,9 @@ def test_jax_llh(benchmark_problem): simulation_conditions = ( petab_problem.get_simulation_conditions_from_measurement_df() ) + simulation_conditions = tuple( + tuple(row) for _, row in simulation_conditions.iterrows() + ) rdatas_jax = jax_model.run_simulations( simulation_conditions=simulation_conditions, ) From a46e65d270d6a7beb952cac26c50ccb93e7121c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 12 Nov 2024 17:49:18 +0000 Subject: [PATCH 56/80] refactor & simplify --- python/sdist/amici/jax.py | 207 ++++++++++++++++++++------------------ python/tests/test_jax.py | 6 +- 2 files changed, 110 insertions(+), 103 deletions(-) diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py index fc16a533e1..6161759ebd 100644 --- a/python/sdist/amici/jax.py +++ b/python/sdist/amici/jax.py @@ -8,6 +8,7 @@ import jax.numpy as jnp import numpy as np import jax +import pandas as pd import petab.v1 as petab import amici @@ -24,66 +25,34 @@ class JAXModel(eqx.Module): - _unscale_funs = { - amici.ParameterScaling.none: lambda x: x, - amici.ParameterScaling.ln: lambda x: jnp.exp(x), - amici.ParameterScaling.log10: lambda x: jnp.power(10, x), - } solver: diffrax.AbstractSolver controller: diffrax.AbstractStepSizeController - atol: float - rtol: float - pcoeff: float - icoeff: float - dcoeff: float maxsteps: int parameters: jnp.ndarray - parameter_mappings: dict[tuple[str], ParameterMappingForCondition] - term: diffrax.ODETerm + parameter_mappings: dict[tuple[str], ParameterMappingForCondition] | None + measurements: dict[tuple[str], pd.DataFrame] | None petab_problem: petab.Problem | None def __init__(self): self.solver = diffrax.Kvaerno5() - self.atol: float = 1e-8 - self.rtol: float = 1e-8 - self.pcoeff: float = 0.4 - self.icoeff: float = 0.3 - self.dcoeff: float = 0.0 self.maxsteps: int = 2**14 self.controller = diffrax.PIDController( - rtol=self.rtol, - atol=self.atol, - pcoeff=self.pcoeff, - icoeff=self.icoeff, - dcoeff=self.dcoeff, + rtol=1e-8, + atol=1e-8, + pcoeff=0.4, + icoeff=0.3, + dcoeff=0.0, ) - self.term = diffrax.ODETerm(self.xdot) self.petab_problem = None self.parameter_mappings = None + self.measurements = None self.parameters = jnp.array([]) - def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": - """ - Set the PEtab problem for the model and updates parameters to the nominal values. - :param petab_problem: - Petab problem to set. - :return: JAXModel instance - """ - - is_leaf = lambda x: x is None if self.petab_problem is None else None # noqa: E731 - model = eqx.tree_at( - lambda x: x.petab_problem, - self, - petab_problem, - is_leaf=is_leaf, - ) - - simulation_conditions = ( - petab_problem.get_simulation_conditions_from_measurement_df() - ) - + def _set_parameter_mappings( + self, simulation_conditions: pd.DataFrame + ) -> "JAXModel": mappings = create_parameter_mapping( - petab_problem=petab_problem, + petab_problem=self.petab_problem, simulation_conditions=simulation_conditions, scaled_parameters=False, amici_model=self, @@ -95,31 +64,81 @@ def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": simulation_conditions.iterrows(), mappings ) } + is_leaf = ( # noqa: E731 lambda x: x is None if self.parameter_mappings is None else None ) - model = eqx.tree_at( + return eqx.tree_at( lambda x: x.parameter_mappings, - model, + self, parameter_mappings, is_leaf=is_leaf, ) + def _set_measurements( + self, simulation_conditions: pd.DataFrame + ) -> "JAXModel": + measurements = dict() + for _, simulation_condition in simulation_conditions.iterrows(): + measurements_df = self.petab_problem.measurement_df + for k, v in simulation_condition.items(): + measurements_df = measurements_df.query(f"{k} == '{v}'") + + ts = _get_timepoints_with_replicates(measurements_df) + my = _get_measurements_and_sigmas( + measurements_df, ts, self.observable_ids + )[0].flatten() + measurements[tuple(simulation_condition)] = np.array(ts), my + is_leaf = ( # noqa: E731 + lambda x: x is None if self.measurements is None else None + ) + return eqx.tree_at( + lambda x: x.measurements, + self, + measurements, + is_leaf=is_leaf, + ) + + def _set_nominal_parameter_values(self) -> "JAXModel": nominal_values = jnp.array( [ petab.scale( - model.petab_problem.parameter_df.loc[ + self.petab_problem.parameter_df.loc[ pval, petab.NOMINAL_VALUE ], - model.petab_problem.parameter_df.loc[ + self.petab_problem.parameter_df.loc[ pval, petab.PARAMETER_SCALE ], ) - for pval in model.petab_parameter_ids() + for pval in self.petab_parameter_ids() ] ) + return eqx.tree_at(lambda x: x.parameters, self, nominal_values) - return eqx.tree_at(lambda x: x.parameters, model, nominal_values) + def _set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": + is_leaf = lambda x: x is None if self.petab_problem is None else None # noqa: E731 + return eqx.tree_at( + lambda x: x.petab_problem, + self, + petab_problem, + is_leaf=is_leaf, + ) + + def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": + """ + Set the PEtab problem for the model and updates parameters to the nominal values. + :param petab_problem: + Petab problem to set. + :return: JAXModel instance + """ + + model = self._set_petab_problem(petab_problem) + simulation_conditions = ( + petab_problem.get_simulation_conditions_from_measurement_df() + ) + model = model._set_parameter_mappings(simulation_conditions) + model = model._set_measurements(simulation_conditions) + return model._set_nominal_parameter_values() @staticmethod @abstractmethod @@ -216,7 +235,7 @@ def _posteq(self, p, k, x, tcl): def _eq(self, p, k, tcl, x0): sol = diffrax.diffeqsolve( - self.term, + diffrax.ODETerm(self.xdot), self.solver, args=(p, k, tcl), t0=0.0, @@ -232,7 +251,7 @@ def _eq(self, p, k, tcl, x0): def _solve(self, ts, p, k, x0, checkpointed): tcl = self.tcl(x0, p, k) sol = diffrax.diffeqsolve( - self.term, + diffrax.ODETerm(self.xdot), self.solver, args=(p, k, tcl), t0=0.0, @@ -264,15 +283,15 @@ def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): loss_fun = jax.vmap(self.Jy, in_axes=(0, 0, 0)) return -jnp.sum(loss_fun(obs, my, sigmay)) - def _run( + def run_condition( self, - ts: np.ndarray, - ts_dyn: np.ndarray, + ts: jnp.ndarray, + ts_dyn: jnp.ndarray, p: jnp.ndarray, - k: np.ndarray, - k_preeq: np.ndarray, + k: jnp.ndarray, + k_preeq: jnp.ndarray, my: jnp.ndarray, - pscale: np.ndarray, + pscale: jnp.ndarray, checkpointed=True, dynamic="true", ): @@ -323,55 +342,55 @@ def _run( return llh, (x_rdata, obs, stats) @eqx.filter_jit - def run( + def _fun( self, - ts: np.ndarray, - ts_dyn: np.ndarray, + ts: jnp.ndarray, + ts_dyn: jnp.ndarray, p: jnp.ndarray, - k: np.ndarray, - k_preeq: np.ndarray, - my: np.ndarray, - pscale: np.ndarray, + k: jnp.ndarray, + k_preeq: jnp.ndarray, + my: jnp.ndarray, + pscale: jnp.ndarray, dynamic="true", ): - return self._run( + return self.run_condition( ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic ) @eqx.filter_jit - def srun( + def _grad( self, - ts: np.ndarray, - ts_dyn: np.ndarray, + ts: jnp.ndarray, + ts_dyn: jnp.ndarray, p: jnp.ndarray, - k: np.ndarray, - k_preeq: np.ndarray, - my: np.ndarray, - pscale: np.ndarray, + k: jnp.ndarray, + k_preeq: jnp.ndarray, + my: jnp.ndarray, + pscale: jnp.ndarray, dynamic="true", ): (llh, (x, obs, stats)), sllh = ( - jax.value_and_grad(self._run, 2, True) + jax.value_and_grad(self.run_condition, 2, True) )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) return llh, sllh, (x, obs, stats) @eqx.filter_jit - def s2run( + def _hessian( self, - ts: np.ndarray, - ts_dyn: np.ndarray, + ts: jnp.ndarray, + ts_dyn: jnp.ndarray, p: jnp.ndarray, - k: np.ndarray, - k_preeq: np.ndarray, - my: np.ndarray, - pscale: np.ndarray, + k: jnp.ndarray, + k_preeq: jnp.ndarray, + my: jnp.ndarray, + pscale: jnp.ndarray, dynamic="true", ): (llh, (x, obs, stats)), sllh = ( - jax.value_and_grad(self._run, 2, True) + jax.value_and_grad(self.run_condition, 2, True) )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) - s2llh = jax.hessian(self._run, 2, True)( + s2llh = jax.hessian(self.run_condition, 2, True)( ts, ts_dyn, p, @@ -391,16 +410,7 @@ def run_simulation( sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, ): parameter_mapping = self.parameter_mappings[simulation_condition] - measurements_df = self.petab_problem.measurement_df - for v, k in zip( - simulation_condition, - ( - petab.SIMULATION_CONDITION_ID, - petab.PREEQUILIBRATION_CONDITION_ID, - ), - ): - measurements_df = measurements_df.query(f"{k} == '{v}'") - ts = _get_timepoints_with_replicates(measurements_df) + ts, my = self.measurements[simulation_condition] p = jnp.array( [ pval @@ -430,10 +440,7 @@ def run_simulation( if k in parameter_mapping.map_preeq_fix ] ) - my = _get_measurements_and_sigmas( - measurements_df, ts, self.observable_ids - )[0].flatten() - ts = np.array(ts) + ts_dyn = ts[np.isfinite(ts)] dynamic = "true" if len(ts_dyn) and np.max(ts_dyn) > 0 else "false" @@ -445,7 +452,7 @@ def run_simulation( ( rdata_kwargs["llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.run( + ) = self._fun( ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic ) elif sensitivity_order == amici.SensitivityOrder.first: @@ -453,7 +460,7 @@ def run_simulation( rdata_kwargs["llh"], rdata_kwargs["sllh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.srun( + ) = self._grad( ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic ) elif sensitivity_order == amici.SensitivityOrder.second: @@ -462,7 +469,7 @@ def run_simulation( rdata_kwargs["sllh"], rdata_kwargs["s2llh"], (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self.s2run( + ) = self._hessian( ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic ) diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 5898262f90..8c78253334 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -151,20 +151,20 @@ def check_fields_jax( ( r_jax["llh"], (r_jax["x"], r_jax["y"], r_jax["stats"]), - ) = jax_model.run(**kwargs) + ) = jax_model._fun(**kwargs) elif sensi_order == amici.SensitivityOrder.first: ( r_jax["llh"], r_jax["sllh"], (r_jax["x"], r_jax["y"], r_jax["stats"]), - ) = jax_model.srun(**kwargs) + ) = jax_model._grad(**kwargs) elif sensi_order == amici.SensitivityOrder.second: ( r_jax["llh"], r_jax["sllh"], r_jax["s2llh"], (r_jax["x"], r_jax["y"], r_jax["stats"]), - ) = jax_model.s2run(**kwargs) + ) = jax_model._hessian(**kwargs) for field in fields: for r_amici, r_jax in zip(rs_amici, [r_jax]): From 404d82ebbb896326472174734d21b308397f8cc8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 16 Nov 2024 09:56:30 +0000 Subject: [PATCH 57/80] refsctor --- python/sdist/amici/de_export.py | 42 +- python/sdist/amici/jax.py | 520 ------------------ python/sdist/amici/jax/__init__.py | 0 python/sdist/amici/jax/model.py | 307 +++++++++++ python/sdist/amici/jax/petab.py | 277 ++++++++++ python/sdist/amici/jaxcodeprinter.py | 7 +- python/sdist/amici/petab/parameter_mapping.py | 51 +- .../benchmark-models/test_petab_benchmark.py | 89 ++- 8 files changed, 721 insertions(+), 572 deletions(-) delete mode 100644 python/sdist/amici/jax.py create mode 100644 python/sdist/amici/jax/__init__.py create mode 100644 python/sdist/amici/jax/model.py create mode 100644 python/sdist/amici/jax/petab.py diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index d773b0864e..793d746e9a 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -289,17 +289,14 @@ def _generate_jax_code(self) -> None: "x_rdata", "total_cl", ) - sym_names = ("p", "k", "x", "tcl", "w", "my", "y", "sigmay", "x_rdata") + sym_names = ("x", "tcl", "w", "my", "y", "sigmay", "x_rdata") indent = 8 - def jnp_stack_str(array) -> str: - elems = "".join(str(x) + ", " for x in array) + def jnp_array_str(array) -> str: + elems = ", ".join(str(s) for s in array) - if not elems: - return "tuple()" - - return elems + return f"jnp.array([{elems}])" tpl_data = { **{ @@ -309,11 +306,14 @@ def jnp_stack_str(array) -> str: self.model.eq(eq_name).subs( dict( zip( - self.model.sym("h"), - ( + list(self.model.sym("h")) + + list(self.model.sym("my")), + [ sp.Heaviside(x) for x in self.model.eq("root") - ), + ] + + [sp.Symbol("my")] + * len(self.model.sym("my")), ) ) ), @@ -323,17 +323,11 @@ def jnp_stack_str(array) -> str: for eq_name in eq_names }, **{ - f"{eq_name.upper()}_RET": jnp_stack_str( + f"{eq_name.upper()}_RET": jnp_array_str( strip_pysb(s) for s in self.model.sym(eq_name) ) - if eq_name != "Jy" - else ( - "jnp.nansum(jnp.stack((" - + "".join(str(s) + ", " for s in self.model.sym(eq_name)) - + "), axis=-1))" - ) if self.model.sym(eq_name) - else "0" + else "jnp.array([])" for eq_name in eq_names }, **{ @@ -352,6 +346,18 @@ def jnp_stack_str(array) -> str: else "tuple()" for sym_name in ("p", "k", "y", "x") }, + **{ + "PK_SYMS": "".join( + str(strip_pysb(s)) + ", " + for s in list(self.model.sym("p")) + + list(self.model.sym("k")) + ), + "PK_IDS": "".join( + f'"{strip_pysb(s)}", ' + for s in list(self.model.sym("p")) + + list(self.model.sym("k")) + ), + }, **{ "MODEL_NAME": self.model_name, }, diff --git a/python/sdist/amici/jax.py b/python/sdist/amici/jax.py deleted file mode 100644 index 6161759ebd..0000000000 --- a/python/sdist/amici/jax.py +++ /dev/null @@ -1,520 +0,0 @@ -from abc import abstractmethod -from dataclasses import dataclass -from concurrent.futures import ThreadPoolExecutor -from numbers import Number - -import diffrax -import equinox as eqx -import jax.numpy as jnp -import numpy as np -import jax -import pandas as pd -import petab.v1 as petab - -import amici -from amici.petab.parameter_mapping import ( - ParameterMappingForCondition, - create_parameter_mapping, -) -from amici.petab.conditions import ( - _get_timepoints_with_replicates, - _get_measurements_and_sigmas, -) - -jax.config.update("jax_enable_x64", True) - - -class JAXModel(eqx.Module): - solver: diffrax.AbstractSolver - controller: diffrax.AbstractStepSizeController - maxsteps: int - parameters: jnp.ndarray - parameter_mappings: dict[tuple[str], ParameterMappingForCondition] | None - measurements: dict[tuple[str], pd.DataFrame] | None - petab_problem: petab.Problem | None - - def __init__(self): - self.solver = diffrax.Kvaerno5() - self.maxsteps: int = 2**14 - self.controller = diffrax.PIDController( - rtol=1e-8, - atol=1e-8, - pcoeff=0.4, - icoeff=0.3, - dcoeff=0.0, - ) - self.petab_problem = None - self.parameter_mappings = None - self.measurements = None - self.parameters = jnp.array([]) - - def _set_parameter_mappings( - self, simulation_conditions: pd.DataFrame - ) -> "JAXModel": - mappings = create_parameter_mapping( - petab_problem=self.petab_problem, - simulation_conditions=simulation_conditions, - scaled_parameters=False, - amici_model=self, - ) - - parameter_mappings = { - tuple(simulation_condition.values): mapping - for (_, simulation_condition), mapping in zip( - simulation_conditions.iterrows(), mappings - ) - } - - is_leaf = ( # noqa: E731 - lambda x: x is None if self.parameter_mappings is None else None - ) - return eqx.tree_at( - lambda x: x.parameter_mappings, - self, - parameter_mappings, - is_leaf=is_leaf, - ) - - def _set_measurements( - self, simulation_conditions: pd.DataFrame - ) -> "JAXModel": - measurements = dict() - for _, simulation_condition in simulation_conditions.iterrows(): - measurements_df = self.petab_problem.measurement_df - for k, v in simulation_condition.items(): - measurements_df = measurements_df.query(f"{k} == '{v}'") - - ts = _get_timepoints_with_replicates(measurements_df) - my = _get_measurements_and_sigmas( - measurements_df, ts, self.observable_ids - )[0].flatten() - measurements[tuple(simulation_condition)] = np.array(ts), my - is_leaf = ( # noqa: E731 - lambda x: x is None if self.measurements is None else None - ) - return eqx.tree_at( - lambda x: x.measurements, - self, - measurements, - is_leaf=is_leaf, - ) - - def _set_nominal_parameter_values(self) -> "JAXModel": - nominal_values = jnp.array( - [ - petab.scale( - self.petab_problem.parameter_df.loc[ - pval, petab.NOMINAL_VALUE - ], - self.petab_problem.parameter_df.loc[ - pval, petab.PARAMETER_SCALE - ], - ) - for pval in self.petab_parameter_ids() - ] - ) - return eqx.tree_at(lambda x: x.parameters, self, nominal_values) - - def _set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": - is_leaf = lambda x: x is None if self.petab_problem is None else None # noqa: E731 - return eqx.tree_at( - lambda x: x.petab_problem, - self, - petab_problem, - is_leaf=is_leaf, - ) - - def set_petab_problem(self, petab_problem: petab.Problem) -> "JAXModel": - """ - Set the PEtab problem for the model and updates parameters to the nominal values. - :param petab_problem: - Petab problem to set. - :return: JAXModel instance - """ - - model = self._set_petab_problem(petab_problem) - simulation_conditions = ( - petab_problem.get_simulation_conditions_from_measurement_df() - ) - model = model._set_parameter_mappings(simulation_conditions) - model = model._set_measurements(simulation_conditions) - return model._set_nominal_parameter_values() - - @staticmethod - @abstractmethod - def xdot(t, x, args): ... - - @staticmethod - @abstractmethod - def _w(t, x, p, k, tcl): ... - - @staticmethod - @abstractmethod - def x0(p, k): ... - - @staticmethod - @abstractmethod - def x_solver(x): ... - - @staticmethod - @abstractmethod - def x_rdata(x, tcl): ... - - @staticmethod - @abstractmethod - def tcl(x, p, k): ... - - @staticmethod - @abstractmethod - def y(t, x, p, k, tcl): ... - - @staticmethod - @abstractmethod - def sigmay(y, p, k): ... - - @staticmethod - @abstractmethod - def Jy(y, my, sigmay): ... - - @property - @abstractmethod - def state_ids(self): ... - - @property - @abstractmethod - def observable_ids(self): ... - - @property - @abstractmethod - def parameter_ids(self): ... - - @property - @abstractmethod - def fixed_parameter_ids(self): ... - - def getParameterIds(self) -> list[str]: # noqa: N802 - """ - Get the parameter ids of the model. Adds compatibility with AmiciModel, added to enable generation of - parameter mappings via :func:`amici.petab.create_parameter_mapping`. - :return: - """ - return self.parameter_ids - - def getFixedParameterIds(self) -> list[str]: # noqa: N802 - """ - Get the fixed parameter ids of the model. Adds compatibility with AmiciModel, added to enable generation of - parameter mappings via :func:`amici.petab.create_parameter_mapping`. - :return: - """ - return self.fixed_parameter_ids - - def petab_parameter_ids(self) -> list[str]: - return self.petab_problem.parameter_df[ - self.petab_problem.parameter_df[petab.ESTIMATE] == 1 - ].index.tolist() - - def get_petab_parameter_by_name(self, name: str) -> jnp.float_: - return self.parameters[self.petab_parameter_ids().index(name)] - - def _unscale_p(self, p, pscale): - return jax.vmap( - lambda p_i, pscale_i: jnp.stack( - (p_i, jnp.exp(p_i), jnp.power(10, p_i)) - ) - .at[pscale_i] - .get() - )(p, pscale) - - def _preeq(self, p, k): - x0 = self.x_solver(self.x0(p, k)) - tcl = self.tcl(x0, p, k) - return self._eq(p, k, tcl, x0) - - def _posteq(self, p, k, x, tcl): - return self._eq(p, k, tcl, x) - - def _eq(self, p, k, tcl, x0): - sol = diffrax.diffeqsolve( - diffrax.ODETerm(self.xdot), - self.solver, - args=(p, k, tcl), - t0=0.0, - t1=jnp.inf, - dt0=None, - y0=x0, - stepsize_controller=self.controller, - max_steps=self.maxsteps, - event=diffrax.Event(cond_fn=diffrax.steady_state_event()), - ) - return sol.ys - - def _solve(self, ts, p, k, x0, checkpointed): - tcl = self.tcl(x0, p, k) - sol = diffrax.diffeqsolve( - diffrax.ODETerm(self.xdot), - self.solver, - args=(p, k, tcl), - t0=0.0, - t1=ts[-1], - dt0=None, - y0=self.x_solver(x0), - stepsize_controller=self.controller, - max_steps=self.maxsteps, - adjoint=diffrax.RecursiveCheckpointAdjoint() - if checkpointed - else diffrax.DirectAdjoint(), - saveat=diffrax.SaveAt(ts=ts), - throw=False, - ) - return sol.ys, tcl, sol.stats - - def _obs(self, ts, x, p, k, tcl): - return jax.vmap(self.y, in_axes=(0, 0, None, None, None))( - ts, x, p, k, tcl - ) - - def _sigmay(self, obs, p, k): - return jax.vmap(self.sigmay, in_axes=(0, None, None))(obs, p, k) - - def _x_rdata(self, x, tcl): - return jax.vmap(self.x_rdata, in_axes=(0, None))(x, tcl) - - def _loss(self, obs: jnp.ndarray, sigmay: jnp.ndarray, my: np.ndarray): - loss_fun = jax.vmap(self.Jy, in_axes=(0, 0, 0)) - return -jnp.sum(loss_fun(obs, my, sigmay)) - - def run_condition( - self, - ts: jnp.ndarray, - ts_dyn: jnp.ndarray, - p: jnp.ndarray, - k: jnp.ndarray, - k_preeq: jnp.ndarray, - my: jnp.ndarray, - pscale: jnp.ndarray, - checkpointed=True, - dynamic="true", - ): - ps = self._unscale_p(p, pscale) - - # Pre-equilibration - if k_preeq.shape[0] > 0: - x0 = self._preeq(ps, k_preeq) - else: - x0 = self.x0(ps, k) - - # Dynamic simulation - if dynamic == "true": - x, tcl, stats = self._solve( - ts_dyn, ps, k, x0, checkpointed=checkpointed - ) - else: - x = tuple( - jnp.array([x0_i] * len(ts_dyn)) for x0_i in self.x_solver(x0) - ) - tcl = self.tcl(x0, ps, k) - stats = None - - # Post-equilibration - if len(ts) > len(ts_dyn): - if len(ts_dyn) > 0: - x_final = tuple(x_i[-1] for x_i in x) - else: - x_final = self.x_solver(x0) - x_posteq = self._posteq(ps, k, x_final, tcl) - x_posteq = tuple( - jnp.array([x0_i] * (len(ts) - len(ts_dyn))) - for x0_i in x_posteq - ) - if len(ts_dyn) > 0: - x = tuple( - jnp.concatenate((x_i, x_posteq_i), axis=0) - for x_i, x_posteq_i in zip(x, x_posteq) - ) - else: - x = x_posteq - - obs = jnp.stack(self._obs(ts, x, ps, k, tcl), axis=1) - my_r = my.reshape((len(ts), -1)) - sigmay = self._sigmay(obs, ps, k) - llh = self._loss(obs, sigmay, my_r) - x_rdata = jnp.stack(self._x_rdata(x, tcl), axis=1) - return llh, (x_rdata, obs, stats) - - @eqx.filter_jit - def _fun( - self, - ts: jnp.ndarray, - ts_dyn: jnp.ndarray, - p: jnp.ndarray, - k: jnp.ndarray, - k_preeq: jnp.ndarray, - my: jnp.ndarray, - pscale: jnp.ndarray, - dynamic="true", - ): - return self.run_condition( - ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic - ) - - @eqx.filter_jit - def _grad( - self, - ts: jnp.ndarray, - ts_dyn: jnp.ndarray, - p: jnp.ndarray, - k: jnp.ndarray, - k_preeq: jnp.ndarray, - my: jnp.ndarray, - pscale: jnp.ndarray, - dynamic="true", - ): - (llh, (x, obs, stats)), sllh = ( - jax.value_and_grad(self.run_condition, 2, True) - )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) - return llh, sllh, (x, obs, stats) - - @eqx.filter_jit - def _hessian( - self, - ts: jnp.ndarray, - ts_dyn: jnp.ndarray, - p: jnp.ndarray, - k: jnp.ndarray, - k_preeq: jnp.ndarray, - my: jnp.ndarray, - pscale: jnp.ndarray, - dynamic="true", - ): - (llh, (x, obs, stats)), sllh = ( - jax.value_and_grad(self.run_condition, 2, True) - )(ts, ts_dyn, p, k, k_preeq, my, pscale, dynamic=dynamic) - - s2llh = jax.hessian(self.run_condition, 2, True)( - ts, - ts_dyn, - p, - k, - k_preeq, - my, - pscale, - checkpointed=False, - dynamic=dynamic, - ) - - return llh, sllh, s2llh, (x, obs, stats) - - def run_simulation( - self, - simulation_condition: tuple[str], - sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, - ): - parameter_mapping = self.parameter_mappings[simulation_condition] - ts, my = self.measurements[simulation_condition] - p = jnp.array( - [ - pval - if isinstance( - pval := parameter_mapping.map_sim_var[par], Number - ) - else self.get_petab_parameter_by_name(pval) - for par in self.parameter_ids - ] - ) - pscale = jnp.array( - [ - 0 if s == petab.LIN else 1 if s == petab.LOG else 2 - for s in parameter_mapping.scale_map_sim_var.values() - ] - ) - k_sim = np.array( - [ - parameter_mapping.map_sim_fix[k] - for k in self.fixed_parameter_ids - ] - ) - k_preeq = np.array( - [ - parameter_mapping.map_preeq_fix[k] - for k in self.fixed_parameter_ids - if k in parameter_mapping.map_preeq_fix - ] - ) - - ts_dyn = ts[np.isfinite(ts)] - dynamic = "true" if len(ts_dyn) and np.max(ts_dyn) > 0 else "false" - - rdata_kwargs = dict( - simulation_condition=simulation_condition, - ) - - if sensitivity_order == amici.SensitivityOrder.none: - ( - rdata_kwargs["llh"], - (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self._fun( - ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic - ) - elif sensitivity_order == amici.SensitivityOrder.first: - ( - rdata_kwargs["llh"], - rdata_kwargs["sllh"], - (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self._grad( - ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic - ) - elif sensitivity_order == amici.SensitivityOrder.second: - ( - rdata_kwargs["llh"], - rdata_kwargs["sllh"], - rdata_kwargs["s2llh"], - (rdata_kwargs["x"], rdata_kwargs["y"], rdata_kwargs["stats"]), - ) = self._hessian( - ts, ts_dyn, p, k_sim, k_preeq, my, pscale, dynamic=dynamic - ) - - for field in rdata_kwargs.keys(): - if field == "llh": - rdata_kwargs[field] = np.float64(rdata_kwargs[field]) - elif field not in ["sllh", "s2llh"]: - rdata_kwargs[field] = np.asarray(rdata_kwargs[field]).T - if rdata_kwargs[field].ndim == 1: - rdata_kwargs[field] = np.expand_dims( - rdata_kwargs[field], 1 - ) - - return ReturnDataJAX(**rdata_kwargs) - - def run_simulations( - self, - sensitivity_order: amici.SensitivityOrder = amici.SensitivityOrder.none, - num_threads: int = 1, - simulation_conditions: tuple[tuple[str]] = None, - ): - fun = eqx.Partial( - self.run_simulation, - sensitivity_order=sensitivity_order, - ) - - if num_threads > 1: - with ThreadPoolExecutor(max_workers=num_threads) as pool: - results = pool.map(fun, simulation_conditions) - else: - results = map(fun, simulation_conditions) - return list(results) - - -@dataclass -class ReturnDataJAX(dict): - simulation_condition: tuple[str] = None - x: np.array = None - y: np.array = None - sigmay: np.array = None - llh: np.array = None - sllh: np.array = None - s2llh: np.array = None - stats: dict = None - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.__dict__ = self diff --git a/python/sdist/amici/jax/__init__.py b/python/sdist/amici/jax/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py new file mode 100644 index 0000000000..ffd58ee8a1 --- /dev/null +++ b/python/sdist/amici/jax/model.py @@ -0,0 +1,307 @@ +from abc import abstractmethod + +import diffrax +import equinox as eqx +import jax.numpy as jnp +import numpy as np +import jax + +# always use 64-bit precision. No-brainer on CPUs and GPUs don't make sense for stiff systems. +jax.config.update("jax_enable_x64", True) + + +class JAXModel(eqx.Module): + """ + JAXModel provides an abstract base class for a JAX-based implementation of an AMICI model. Models inheriting from + JAXModel must provide model specific implementations of abstract methods. + """ + + @staticmethod + @abstractmethod + def xdot( + t: jnp.float_, x: jnp.ndarray, args: tuple[jnp.ndarray, jnp.ndarray] + ) -> jnp.ndarray: + """ + Right-hand side of the ODE system. + + :param t: time point + :param x: state vector + :param args: tuple of parameters, fixed parameters and total values for conservation laws + :return: + Derivative of the state vector at time point, same data structure as x. + """ + ... + + @staticmethod + @abstractmethod + def _w( + t: jnp.float_, x: jnp.ndarray, pk: jnp.ndarray, tcl: jnp.ndarray + ) -> jnp.ndarray: + """ + Compute the expressions (algebraic variables) of the model. + + :param t: time point + :param x: state vector + :param pk: parameters + :param tcl: total values for conservation laws + :return: + Expression values. + """ + ... + + @staticmethod + @abstractmethod + def x0(pk: jnp.ndarray) -> jnp.ndarray: + """ + Compute the initial state vector. + + :param pk: parameters + """ + ... + + @staticmethod + @abstractmethod + def x_solver(x: jnp.ndarray) -> jnp.ndarray: + """ + Transform the full state vector to the reduced state vector for ODE solving. + + :param x: + full state vector + :return: + reduced state vector + """ + ... + + @staticmethod + @abstractmethod + def x_rdata(x: jnp.ndarray, tcl: jnp.ndarray) -> jnp.ndarray: + """ + Compute the full state vector from the reduced state vector and conservation laws. + + :param x: + reduced state vector + :param tcl: + total values for conservation laws + :return: + full state vector + """ + ... + + @staticmethod + @abstractmethod + def tcl(x: jnp.ndarray, pk: jnp.ndarray) -> jnp.ndarray: + """ + Compute the total values for conservation laws. + :param x: + state vector + :param pk: + parameters + :return: + total values for conservation laws + """ + ... + + @abstractmethod + def y( + self, t: jnp.float_, x: jnp.ndarray, pk: jnp.ndarray, tcl: jnp.ndarray + ) -> jnp.ndarray: + """ + Compute the observables. + :param t: + time point + :param x: + state vector + :param pk: + parameters + :param tcl: + total values for conservation laws + :return: + observable + """ + ... + + @staticmethod + @abstractmethod + def sigmay(y: jnp.ndarray, pk: jnp.ndarray) -> jnp.ndarray: + """ + Compute the standard deviations of the observables. + :param y: + observable for the specified observable id + :param pk: + parameters + :return: + standard deviations of the observables + """ + ... + + @abstractmethod + def llh( + self, + t: jnp.float_, + x: jnp.ndarray, + pk: jnp.ndarray, + tcl: jnp.ndarray, + iy: int, + ) -> jnp.float_: + """ + Compute the log-likelihood of the observable for the specified observable id. + :param t: + time point + :param x: + state vector + :param pk: + parameters + :param tcl: + total values for conservation laws + :param iy: + observable id + :return: + log-likelihood of the observable + """ + ... + + @property + @abstractmethod + def state_ids(self) -> list[str]: + """ + Get the state ids of the model. + :return: + State ids + """ + ... + + @property + @abstractmethod + def observable_ids(self) -> list[str]: + """ + Get the observable ids of the model. + :return: + Observable ids + """ + ... + + @property + @abstractmethod + def parameter_ids(self) -> list[str]: + """ + Get the parameter ids of the model. + :return: + Parameter ids + """ + ... + + def _preeq(self, p, solver, controller, max_steps): + """ + Pre-equilibration of the model. + :param p: + parameters + :return: + Initial state vector + """ + x0 = self.x_solver(self.x0(p)) + tcl = self.tcl(x0, p) + return self._eq(p, tcl, x0, solver, controller, max_steps) + + def _posteq(self, p, x, tcl, solver, controller, max_steps): + return self._eq(p, tcl, x, solver, controller, max_steps) + + def _eq(self, p, tcl, x0, solver, controller, max_steps): + sol = diffrax.diffeqsolve( + diffrax.ODETerm(self.xdot), + solver, + args=(p, tcl), + t0=0.0, + t1=jnp.inf, + dt0=None, + y0=x0, + stepsize_controller=controller, + max_steps=max_steps, + event=diffrax.Event(cond_fn=diffrax.steady_state_event()), + ) + return sol.ys[-1, :] + + def _solve(self, ts, p, x0, solver, controller, max_steps): + tcl = self.tcl(x0, p) + sol = diffrax.diffeqsolve( + diffrax.ODETerm(self.xdot), + solver, + args=(p, tcl), + t0=0.0, + t1=ts[-1], + dt0=None, + y0=self.x_solver(x0), + stepsize_controller=controller, + max_steps=max_steps, + adjoint=diffrax.RecursiveCheckpointAdjoint(), + saveat=diffrax.SaveAt(ts=ts), + throw=False, + ) + return sol.ys, tcl, sol.stats + + def _x_rdata(self, x, tcl): + return jax.vmap(self.x_rdata, in_axes=(0, None))(x, tcl) + + def _outputs(self, ts, x, p, tcl, my, iys) -> jnp.float_: + return jax.vmap(self.llh, in_axes=(0, 0, None, None, 0, 0))( + ts, x, p, tcl, my, iys + ) + + # @eqx.filter_jit + def simulate_condition( + self, + ts: np.ndarray, + ts_dyn: np.ndarray, + my: np.ndarray, + iys: np.ndarray, + p: jnp.ndarray, + p_preeq: jnp.ndarray, + dynamic: bool, + solver: diffrax.AbstractSolver, + controller: diffrax.AbstractStepSizeController, + max_steps: int, + ): + # Pre-equilibration + if p_preeq.shape[0] > 0: + x0 = self._preeq(p_preeq, solver, controller, max_steps) + else: + x0 = self.x0(p) + + # Dynamic simulation + if dynamic: + x, tcl, stats = self._solve( + ts_dyn, p, x0, solver, controller, max_steps + ) + else: + x = jnp.repeat( + self.x_solver(x0).reshape(1, -1), + len(ts_dyn), + axis=0, + ) + tcl = self.tcl(x0, p) + stats = None + + # Post-equilibration + if len(ts) > len(ts_dyn): + if len(ts_dyn) > 0: + x_final = x[-1, :] + else: + x_final = self.x_solver(x0) + x_posteq = self._posteq( + p, x_final, tcl, solver, controller, max_steps + ) + x_posteq = jnp.repeat( + x_posteq.reshape(1, -1), + len(ts) - len(ts_dyn), + axis=0, + ) + if len(ts_dyn) > 0: + x = jnp.concatenate((x, x_posteq), axis=0) + else: + x = x_posteq + + outputs = self._outputs(ts, x, p, tcl, my, iys) + llh = -jnp.sum(outputs[:, 0]) + obs = outputs[:, 1] + sigmay = outputs[:, 2] + x_rdata = jnp.stack(self._x_rdata(x, tcl), axis=1) + return llh, dict(llh=llh, x=x_rdata, y=obs, sigmay=sigmay, stats=stats) diff --git a/python/sdist/amici/jax/petab.py b/python/sdist/amici/jax/petab.py new file mode 100644 index 0000000000..6bf090d114 --- /dev/null +++ b/python/sdist/amici/jax/petab.py @@ -0,0 +1,277 @@ +""" +JAX +---- +This module provides functions and classes to enable the use of JAX-based ODE solvers (currently diffrax) to simulate + AMICI processed models. The API in this module is experimental. Expect substantial changes and do not use in production + code. + +Loading this module will automatically enable 64-bit precision for JAX. +""" + +from numbers import Number +from collections.abc import Iterable + +import diffrax +import equinox as eqx +import jax.numpy as jnp +import numpy as np +import pandas as pd +import petab.v1 as petab + +from amici.petab.parameter_mapping import ( + ParameterMappingForCondition, + create_parameter_mapping, +) +from amici.jax.model import JAXModel + + +def jax_unscale( + parameter: jnp.float_, + scale_str: str, +) -> jnp.float_: + """Unscale parameter according to ``scale_str``. + + Arguments: + parameter: + Parameter to be unscaled. + scale_str: + One of ``'lin'`` (synonymous with ``''``), ``'log'``, ``'log10'``. + + Returns: + The unscaled parameter. + """ + if scale_str == petab.LIN or not scale_str: + return parameter + if scale_str == petab.LOG: + return jnp.exp(parameter) + if scale_str == petab.LOG10: + return jnp.power(10, parameter) + raise ValueError(f"Invalid parameter scaling: {scale_str}") + + +class JAXProblem(eqx.Module): + """ + :ivar solver: + Diffrax solver to use for model simulation + :ivar controller: + Step-size controller to use for model simulation + :ivar max_steps: + Maximum number of steps to take during a simulation + :ivar parameters: + Values for the model parameters. Only populated after setting the PEtab problem via :meth:`set_petab_problem`. + Do not change dimensions, values may be changed during, e.g. model training. + :ivar parameter_mappings: + :class:`ParameterMappingForCondition` instances for each simulation condition. Only populated after setting the + PEtab problem via :meth:`set_petab_problem`. Do not set manually unless you know what you are doing. + :ivar measurements: + Subset measurement dataframes for each simulation condition. Only populated after setting the PEtab problem + via :meth:`set_petab_problem`. Do not set manually unless you know what you are doing. + :ivar petab_problem: + PEtab problem to simulate. Set via :meth:`set_petab_problem`. + """ + + parameters: jnp.ndarray + model: JAXModel + parameter_mappings: dict[tuple[str], ParameterMappingForCondition] = ( + eqx.field(static=True) + ) + measurements: dict[ + tuple[str], + tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray, str], + ] = eqx.field(static=True) + petab_problem: petab.Problem + + def __init__(self, model: JAXModel, petab_problem: petab.Problem): + """ + Initialize a JAXProblem instance with a model and a PEtab problem. + :param model: + JAXModel instance to use for simulation. + :param petab_problem: + PEtab problem to simulate. + """ + self.model = model + scs = petab_problem.get_simulation_conditions_from_measurement_df() + self.petab_problem = petab_problem + self.parameter_mappings = self._get_parameter_mappings(scs) + self.measurements = self._get_measurements(scs) + self.parameters = self._get_nominal_parameter_values() + + def _get_parameter_mappings(self, simulation_conditions: pd.DataFrame): + scs = list(set(simulation_conditions.values.flatten())) + mappings = create_parameter_mapping( + petab_problem=self.petab_problem, + simulation_conditions=[ + {petab.SIMULATION_CONDITION_ID: sc} for sc in scs + ], + scaled_parameters=False, + ) + for mapping in mappings: + for sim_var, value in mapping.map_sim_var.items(): + if isinstance(value, Number) and not np.isfinite(value): + mapping.map_sim_var[sim_var] = 1.0 + return dict(zip(scs, mappings)) + + def _get_measurements(self, simulation_conditions: pd.DataFrame): + """ + Set measurements for the model based on the provided simulation conditions. + :param simulation_conditions: + Simulation conditions to create parameter mappings for. Same format as returned by + :meth:`petab.Problem.get_simulation_conditions_from_measurement_df`. + :return: + JAXModel instance with measurements set. + """ + measurements = dict() + for _, simulation_condition in simulation_conditions.iterrows(): + measurements_df = self.petab_problem.measurement_df + for k, v in simulation_condition.items(): + measurements_df = measurements_df.query(f"{k} == '{v}'") + + measurements_df.sort_values(by=petab.TIME, inplace=True) + + ts = measurements_df[petab.TIME].values + ts_dyn = [t for t in ts if np.isfinite(t)] + my = measurements_df[petab.MEASUREMENT].values + iys = np.array( + [ + self.model.observable_ids.index(oid) + for oid in measurements_df[petab.OBSERVABLE_ID].values + ] + ) + + # using strings here prevents tracing in jax + dynamic = ts_dyn and max(ts_dyn) > 0 + measurements[tuple(simulation_condition)] = ( + np.array(ts), + np.array(ts_dyn), + my, + iys, + dynamic, + ) + return measurements + + def _get_nominal_parameter_values(self) -> jnp.ndarray: + """ + Set the nominal parameter values for the model based on the nominal values in the PEtab problem. + :return: + JAXModel instance with parameter values set to the nominal values. + """ + if self.petab_problem is None: + raise ValueError( + "PEtab problem not set, cannot set nominal values." + ) + return jnp.array( + [ + petab.scale( + self.petab_problem.parameter_df.loc[ + pval, petab.NOMINAL_VALUE + ], + self.petab_problem.parameter_df.loc[ + pval, petab.PARAMETER_SCALE + ], + ) + for pval in self.parameter_ids + ] + ) + + @property + def parameter_ids(self) -> list[str]: + """ + Parameter ids that are estimated in the PEtab problem. Same ordering as values in :attr:`parameters`. + :return: + PEtab parameter ids + """ + return self.petab_problem.parameter_df[ + self.petab_problem.parameter_df[petab.ESTIMATE] == 1 + ].index.tolist() + + def get_petab_parameter_by_id(self, name: str) -> jnp.float_: + """ + Get the value of a PEtab parameter by name. + :param name: + PEtab parameter id + :return: + Value of the parameter + """ + return self.parameters[self.parameter_ids.index(name)] + + def _unscale_p( + self, p: jnp.ndarray, pscale: tuple[str, ...] + ) -> jnp.ndarray: + """ + Unscaling of parameters. + + :param p: + Parameter values + :param pscale: + Parameter scaling + :return: + Unscaled parameter values + """ + return jnp.array( + [jax_unscale(pval, scale) for pval, scale in zip(p, pscale)] + ) + + def load_parameters(self, simulation_condition) -> jnp.ndarray: + mapping = self.parameter_mappings[simulation_condition] + p = jnp.array( + [ + pval + if isinstance(pval := mapping.map_sim_var[pname], Number) + else self.get_petab_parameter_by_id(pval) + for pname in self.model.parameter_ids + ] + ) + pscale = tuple( + [ + mapping.scale_map_sim_var[pname] + for pname in self.model.parameter_ids + ] + ) + return self._unscale_p(p, pscale) + + def run_simulation( + self, + simulation_condition: tuple[str], + solver: diffrax.AbstractSolver, + controller: diffrax.AbstractStepSizeController, + max_steps: int, + ): + ts, ts_dyn, my, iys, dynamic = self.measurements[simulation_condition] + p = self.load_parameters(simulation_condition[0]) + p_preeq = ( + self.load_parameters(simulation_condition[1]) + if len(simulation_condition) > 1 + else jnp.array([]) + ) + return self.model.simulate_condition( + ts, + ts_dyn, + my, + iys, + p, + p_preeq, + dynamic, + solver, + controller, + max_steps, + ) + + +def run_simulations( + problem: JAXProblem, + simulation_conditions: Iterable[tuple] = None, + solver: diffrax.AbstractSolver = diffrax.Kvaerno5(), + controller: diffrax.AbstractStepSizeController = diffrax.PIDController( + rtol=1e-8, + atol=1e-8, + pcoeff=0.4, + icoeff=0.3, + dcoeff=0.0, + ), + max_steps: int = 2**14, +): + results = { + sc: problem.run_simulation(sc, solver, controller, max_steps) + for sc in simulation_conditions + } + return sum(llh for llh, _ in results.values()), results diff --git a/python/sdist/amici/jaxcodeprinter.py b/python/sdist/amici/jaxcodeprinter.py index ee56d292ff..f2d5b29248 100644 --- a/python/sdist/amici/jaxcodeprinter.py +++ b/python/sdist/amici/jaxcodeprinter.py @@ -1,7 +1,6 @@ """Jax code generation""" import re -from typing import Optional, Union from collections.abc import Iterable import sympy as sp @@ -11,7 +10,7 @@ class AmiciJaxCodePrinter(NumPyPrinter): """JAX code printer""" - def doprint(self, expr: sp.Expr, assign_to: Optional[str] = None) -> str: + def doprint(self, expr: sp.Expr, assign_to: str | None = None) -> str: try: code = super().doprint(expr, assign_to) code = re.sub(r"numpy\.", r"jnp.", code) @@ -28,8 +27,8 @@ def _print_AmiciSpline(self, expr: sp.Expr) -> str: def _get_sym_lines( self, - symbols: Union[Iterable[str], sp.Matrix], - equations: sp.Matrix, + symbols: sp.Matrix | Iterable[str], + equations: sp.Matrix | Iterable[sp.Expr], indent_level: int, ) -> list[str]: """ diff --git a/python/sdist/amici/petab/parameter_mapping.py b/python/sdist/amici/petab/parameter_mapping.py index dc88c1064d..cef4c61e06 100644 --- a/python/sdist/amici/petab/parameter_mapping.py +++ b/python/sdist/amici/petab/parameter_mapping.py @@ -309,7 +309,7 @@ def create_parameter_mapping( petab_problem: petab.Problem, simulation_conditions: pd.DataFrame | list[dict], scaled_parameters: bool, - amici_model: AmiciModel, + amici_model: AmiciModel | None = None, **parameter_mapping_kwargs, ) -> ParameterMapping: """Generate AMICI specific parameter mapping. @@ -399,7 +399,7 @@ def create_parameter_mapping_for_condition( parameter_mapping_for_condition: petab.ParMappingDictQuadruple, condition: pd.Series | dict, petab_problem: petab.Problem, - amici_model: AmiciModel, + amici_model: AmiciModel | None = None, ) -> ParameterMappingForCondition: """Generate AMICI specific parameter mapping for condition. @@ -515,27 +515,38 @@ def create_parameter_mapping_for_condition( # have different variable parameters. without splitting, # merge_preeq_and_sim_pars_condition below may fail. # TODO: This can be done already in parameter mapping creation. - variable_par_ids = amici_model.getParameterIds() - fixed_par_ids = amici_model.getFixedParameterIds() - - condition_map_preeq_var, condition_map_preeq_fix = _subset_dict( - condition_map_preeq, variable_par_ids, fixed_par_ids - ) + if amici_model is not None: + variable_par_ids = amici_model.getParameterIds() + fixed_par_ids = amici_model.getFixedParameterIds() + condition_map_preeq_var, condition_map_preeq_fix = _subset_dict( + condition_map_preeq, variable_par_ids, fixed_par_ids + ) - ( - condition_scale_map_preeq_var, - condition_scale_map_preeq_fix, - ) = _subset_dict( - condition_scale_map_preeq, variable_par_ids, fixed_par_ids - ) + ( + condition_scale_map_preeq_var, + condition_scale_map_preeq_fix, + ) = _subset_dict( + condition_scale_map_preeq, variable_par_ids, fixed_par_ids + ) - condition_map_sim_var, condition_map_sim_fix = _subset_dict( - condition_map_sim, variable_par_ids, fixed_par_ids - ) + condition_map_sim_var, condition_map_sim_fix = _subset_dict( + condition_map_sim, variable_par_ids, fixed_par_ids + ) - condition_scale_map_sim_var, condition_scale_map_sim_fix = _subset_dict( - condition_scale_map_sim, variable_par_ids, fixed_par_ids - ) + condition_scale_map_sim_var, condition_scale_map_sim_fix = ( + _subset_dict( + condition_scale_map_sim, variable_par_ids, fixed_par_ids + ) + ) + else: + condition_map_preeq_var = condition_map_preeq + condition_map_preeq_fix = {} + condition_scale_map_preeq_var = condition_scale_map_preeq + condition_scale_map_preeq_fix = {} + condition_map_sim_var = condition_map_sim + condition_map_sim_fix = {} + condition_scale_map_sim_var = condition_scale_map_sim + condition_scale_map_sim_fix = {} logger.debug( "Fixed parameters preequilibration: " f"{condition_map_preeq_fix}" diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 97d96af324..401cfba6d5 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -5,9 +5,12 @@ for a subset of the benchmark problems. """ +from functools import partial from pathlib import Path import fiddy import amici +import equinox as eqx +import jax.numpy as jnp import numpy as np import pandas as pd import petab.v1 as petab @@ -28,10 +31,12 @@ from amici.logging import get_logger from amici.petab.simulations import ( LLH, + SLLH, RDATAS, rdatas_to_measurement_df, simulate_petab, ) +from amici.jax.petab import run_simulations, JAXProblem from petab.v1.visualize import plot_problem @@ -270,38 +275,102 @@ def test_jax_llh(benchmark_problem): pytest.skip("Excluded from JAX check due to excessive runtime") amici_solver = amici_model.getSolver() + cur_settings = settings[problem_id] amici_solver.setAbsoluteTolerance(1e-8) amici_solver.setRelativeTolerance(1e-8) amici_solver.setMaxSteps(10_000) - llh_amici = simulate_petab( + simulate_amici = partial( + simulate_petab, petab_problem=petab_problem, amici_model=amici_model, solver=amici_solver, + scaled_parameters=True, + scaled_gradients=True, log_level=logging.DEBUG, - )[LLH] + ) + + np.random.seed(cur_settings.rng_seed) + + problems_for_gradient_check_jax = list( + set(problems_for_gradient_check) - set("Laske_PLOSComputBiol2019") + # Laske has nan values in gradient due to nan values in observables that are not used in the likelihood + # but are problematic during backpropagation + ) + + problem_parameters = None + if problem_id in problems_for_gradient_check_jax: + point = petab_problem.x_nominal_free_scaled + for _ in range(20): + amici_solver.setSensitivityMethod(amici.SensitivityMethod.adjoint) + amici_solver.setSensitivityOrder(amici.SensitivityOrder.first) + amici_model.setSteadyStateSensitivityMode( + cur_settings.ss_sensitivity_mode + ) + point_noise = ( + np.random.randn(len(point)) * cur_settings.noise_level + ) + point += point_noise # avoid small gradients at nominal value + + problem_parameters = dict(zip(petab_problem.x_free_ids, point)) + + r_amici = simulate_amici( + problem_parameters=problem_parameters, + ) + if np.isfinite(r_amici[LLH]): + break + else: + raise RuntimeError("Could not compute expected derivative.") + else: + r_amici = simulate_amici() + llh_amici = r_amici[LLH] jax_model = import_petab_problem( petab_problem, model_output_dir=benchmark_outdir / problem_id, jax=True, ) - jax_model = jax_model.set_petab_problem(petab_problem) + jax_problem = JAXProblem(jax_model, petab_problem) simulation_conditions = ( petab_problem.get_simulation_conditions_from_measurement_df() ) simulation_conditions = tuple( tuple(row) for _, row in simulation_conditions.iterrows() ) - rdatas_jax = jax_model.run_simulations( - simulation_conditions=simulation_conditions, - ) + if problem_parameters: + jax_problem = eqx.tree_at( + lambda x: x.parameters, + jax_problem, + jnp.array( + [problem_parameters[pid] for pid in jax_problem.parameter_ids] + ), + ) + if problem_id in problems_for_gradient_check_jax: + (llh_jax, rdatas_jax), sllh_jax = eqx.filter_jit( + eqx.filter_value_and_grad(run_simulations, has_aux=True) + )(jax_problem, simulation_conditions) + else: + llh_jax, rdatas_jax = eqx.filter_jit(run_simulations)( + jax_problem, simulation_conditions + ) - llh_jax = sum(r.llh for r in rdatas_jax) + np.testing.assert_allclose( + llh_jax, + llh_amici, + rtol=1e-3, + atol=1e-3, + err_msg=f"LLH mismatch for {problem_id}", + ) - assert np.isclose( - llh_amici, llh_jax, rtol=1e-3, atol=1e-3 - ), f"LLH mismatch for {problem_id} with {llh_amici} (amici) vs {llh_jax} (jax)" + if problem_id in problems_for_gradient_check_jax: + sllh_amici = r_amici[SLLH] + np.testing.assert_allclose( + sllh_jax.parameters, + np.array([sllh_amici[pid] for pid in jax_problem.parameter_ids]), + rtol=1e-2, + atol=1e-2, + err_msg=f"SLLH mismatch for {problem_id}", + ) @pytest.mark.filterwarnings( From e399f4c1d9a91b651682beb58e8d51a75a8fd402 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 16 Nov 2024 09:57:27 +0000 Subject: [PATCH 58/80] update template --- .pre-commit-config.yaml | 21 ---------- python/sdist/amici/jax.template.py | 64 ++++++++++++++---------------- 2 files changed, 29 insertions(+), 56 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f16458b29a..10ee5a4925 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,27 +10,6 @@ repos: args: [--allow-multiple-documents] - id: end-of-file-fixer - id: trailing-whitespace -- repo: https://github.com/astral-sh/ruff-pre-commit - # Ruff version. - rev: v0.6.7 - hooks: - # Run the linter. - - id: ruff - args: - - --fix - - --config - - python/sdist/pyproject.toml - - # Run the formatter. - - id: ruff-format - args: - - --config - - python/sdist/pyproject.toml -- repo: https://github.com/asottile/pyupgrade - rev: v3.17.0 - hooks: - - id: pyupgrade - args: ["--py310-plus"] exclude: '^(ThirdParty|models)/' diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index b6048b57f5..a53ab2066a 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -1,7 +1,7 @@ import jax.numpy as jnp from interpax import interp1d -from amici.jax import JAXModel +from amici.jax.model import JAXModel class JAXModel_TPL_MODEL_NAME(JAXModel): @@ -11,24 +11,22 @@ def __init__(self): @staticmethod def xdot(t, x, args): - p, k, tcl = args + pk, tcl = args TPL_X_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k + TPL_PK_SYMS = pk TPL_TCL_SYMS = tcl - TPL_W_SYMS = JAXModel_TPL_MODEL_NAME._w(t, x, p, k, tcl) + TPL_W_SYMS = JAXModel_TPL_MODEL_NAME._w(t, x, pk, tcl) TPL_XDOT_EQ return TPL_XDOT_RET @staticmethod - def _w(t, x, p, k, tcl): + def _w(t, x, pk, tcl): TPL_X_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k + TPL_PK_SYMS = pk TPL_TCL_SYMS = tcl TPL_W_EQ @@ -36,10 +34,9 @@ def _w(t, x, p, k, tcl): return TPL_W_RET @staticmethod - def x0(p, k): + def x0(pk): - TPL_P_SYMS = p - TPL_K_SYMS = k + TPL_PK_SYMS = pk TPL_X0_EQ @@ -65,55 +62,48 @@ def x_rdata(x, tcl): return TPL_X_RDATA_RET @staticmethod - def tcl(x, p, k): + def tcl(x, pk): TPL_X_RDATA_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k + TPL_PK_SYMS = pk TPL_TOTAL_CL_EQ return TPL_TOTAL_CL_RET - @staticmethod - def y(t, x, p, k, tcl): + def y(self, t, x, pk, tcl): TPL_X_SYMS = x - TPL_P_SYMS = p - TPL_K_SYMS = k - TPL_W_SYMS = JAXModel_TPL_MODEL_NAME._w(t, x, p, k, tcl) + TPL_PK_SYMS = pk + TPL_W_SYMS = self._w(t, x, pk, tcl) TPL_Y_EQ return TPL_Y_RET - @staticmethod - def sigmay(y, p, k): + def sigmay(self, y, pk): + TPL_PK_SYMS = pk + TPL_Y_SYMS = y - TPL_P_SYMS = p - TPL_K_SYMS = k TPL_SIGMAY_EQ return TPL_SIGMAY_RET - @staticmethod - def Jy(y, my, sigmay): + + def llh(self, t, x, pk, tcl, my, iy): + y = self.y(t, x, pk, tcl) TPL_Y_SYMS = y - TPL_MY_SYMS = my + sigmay = self.sigmay(y, pk) TPL_SIGMAY_SYMS = sigmay TPL_JY_EQ - return TPL_JY_RET - - @property - def parameter_ids(self): - return TPL_P_IDS - - @property - def fixed_parameter_ids(self): - return TPL_K_IDS + return jnp.array([ + TPL_JY_RET.at[iy].get(), + y.at[iy].get(), + sigmay.at[iy].get() + ]) @property def observable_ids(self): @@ -122,3 +112,7 @@ def observable_ids(self): @property def state_ids(self): return TPL_X_IDS + + @property + def parameter_ids(self): + return TPL_PK_IDS From eaae77880bc3bf2c9a91ecfda357456ed93aa74a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 16 Nov 2024 09:57:35 +0000 Subject: [PATCH 59/80] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10ee5a4925..f16458b29a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,27 @@ repos: args: [--allow-multiple-documents] - id: end-of-file-fixer - id: trailing-whitespace +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.6.7 + hooks: + # Run the linter. + - id: ruff + args: + - --fix + - --config + - python/sdist/pyproject.toml + + # Run the formatter. + - id: ruff-format + args: + - --config + - python/sdist/pyproject.toml +- repo: https://github.com/asottile/pyupgrade + rev: v3.17.0 + hooks: + - id: pyupgrade + args: ["--py310-plus"] exclude: '^(ThirdParty|models)/' From d79cfc1cb3ea0f636e76184904625591d21ecc0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 16 Nov 2024 22:46:34 +0000 Subject: [PATCH 60/80] refactor fix test --- python/sdist/amici/de_export.py | 2 +- python/sdist/amici/jax.template.py | 32 +++--- python/sdist/amici/jax/model.py | 153 +++++++++++++++++------------ python/sdist/amici/jax/petab.py | 63 ++++++------ python/tests/test_jax.py | 120 ++++++++++++++-------- 5 files changed, 213 insertions(+), 157 deletions(-) diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index 793d746e9a..823f5f8ca1 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -319,7 +319,7 @@ def jnp_array_str(array) -> str: ), indent, ) - ) + )[indent:] for eq_name in eq_names }, **{ diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index a53ab2066a..08a546826f 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -8,17 +8,16 @@ class JAXModel_TPL_MODEL_NAME(JAXModel): def __init__(self): super().__init__() - @staticmethod - def xdot(t, x, args): + def xdot(self, t, x, args): pk, tcl = args TPL_X_SYMS = x TPL_PK_SYMS = pk TPL_TCL_SYMS = tcl - TPL_W_SYMS = JAXModel_TPL_MODEL_NAME._w(t, x, pk, tcl) + TPL_W_SYMS = self._w(t, x, pk, tcl) -TPL_XDOT_EQ + TPL_XDOT_EQ return TPL_XDOT_RET @@ -29,7 +28,7 @@ def _w(t, x, pk, tcl): TPL_PK_SYMS = pk TPL_TCL_SYMS = tcl -TPL_W_EQ + TPL_W_EQ return TPL_W_RET @@ -38,7 +37,7 @@ def x0(pk): TPL_PK_SYMS = pk -TPL_X0_EQ + TPL_X0_EQ return TPL_X0_RET @@ -47,7 +46,7 @@ def x_solver(x): TPL_X_RDATA_SYMS = x -TPL_X_SOLVER_EQ + TPL_X_SOLVER_EQ return TPL_X_SOLVER_RET @@ -57,7 +56,7 @@ def x_rdata(x, tcl): TPL_X_SYMS = x TPL_TCL_SYMS = tcl -TPL_X_RDATA_EQ + TPL_X_RDATA_EQ return TPL_X_RDATA_RET @@ -67,7 +66,7 @@ def tcl(x, pk): TPL_X_RDATA_SYMS = x TPL_PK_SYMS = pk -TPL_TOTAL_CL_EQ + TPL_TOTAL_CL_EQ return TPL_TOTAL_CL_RET @@ -77,7 +76,7 @@ def y(self, t, x, pk, tcl): TPL_PK_SYMS = pk TPL_W_SYMS = self._w(t, x, pk, tcl) -TPL_Y_EQ + TPL_Y_EQ return TPL_Y_RET @@ -86,7 +85,7 @@ def sigmay(self, y, pk): TPL_Y_SYMS = y -TPL_SIGMAY_EQ + TPL_SIGMAY_EQ return TPL_SIGMAY_RET @@ -94,16 +93,11 @@ def sigmay(self, y, pk): def llh(self, t, x, pk, tcl, my, iy): y = self.y(t, x, pk, tcl) TPL_Y_SYMS = y - sigmay = self.sigmay(y, pk) - TPL_SIGMAY_SYMS = sigmay + TPL_SIGMAY_SYMS = self.sigmay(y, pk) -TPL_JY_EQ + TPL_JY_EQ - return jnp.array([ - TPL_JY_RET.at[iy].get(), - y.at[iy].get(), - sigmay.at[iy].get() - ]) + return TPL_JY_RET.at[iy].get() @property def observable_ids(self): diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index ffd58ee8a1..f412faecac 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -3,7 +3,6 @@ import diffrax import equinox as eqx import jax.numpy as jnp -import numpy as np import jax # always use 64-bit precision. No-brainer on CPUs and GPUs don't make sense for stiff systems. @@ -16,10 +15,12 @@ class JAXModel(eqx.Module): JAXModel must provide model specific implementations of abstract methods. """ - @staticmethod @abstractmethod def xdot( - t: jnp.float_, x: jnp.ndarray, args: tuple[jnp.ndarray, jnp.ndarray] + self, + t: jnp.float_, + x: jnp.ndarray, + args: tuple[jnp.ndarray, jnp.ndarray], ) -> jnp.ndarray: """ Right-hand side of the ODE system. @@ -190,21 +191,6 @@ def parameter_ids(self) -> list[str]: """ ... - def _preeq(self, p, solver, controller, max_steps): - """ - Pre-equilibration of the model. - :param p: - parameters - :return: - Initial state vector - """ - x0 = self.x_solver(self.x0(p)) - tcl = self.tcl(x0, p) - return self._eq(p, tcl, x0, solver, controller, max_steps) - - def _posteq(self, p, x, tcl, solver, controller, max_steps): - return self._eq(p, tcl, x, solver, controller, max_steps) - def _eq(self, p, tcl, x0, solver, controller, max_steps): sol = diffrax.diffeqsolve( diffrax.ODETerm(self.xdot), @@ -216,12 +202,12 @@ def _eq(self, p, tcl, x0, solver, controller, max_steps): y0=x0, stepsize_controller=controller, max_steps=max_steps, + adjoint=diffrax.DirectAdjoint(), event=diffrax.Event(cond_fn=diffrax.steady_state_event()), ) - return sol.ys[-1, :] + return sol.ys[-1, :], sol.stats - def _solve(self, ts, p, x0, solver, controller, max_steps): - tcl = self.tcl(x0, p) + def _solve(self, p, ts, tcl, x0, solver, controller, max_steps, adjoint): sol = diffrax.diffeqsolve( diffrax.ODETerm(self.xdot), solver, @@ -229,14 +215,14 @@ def _solve(self, ts, p, x0, solver, controller, max_steps): t0=0.0, t1=ts[-1], dt0=None, - y0=self.x_solver(x0), + y0=x0, stepsize_controller=controller, max_steps=max_steps, - adjoint=diffrax.RecursiveCheckpointAdjoint(), + adjoint=adjoint, saveat=diffrax.SaveAt(ts=ts), throw=False, ) - return sol.ys, tcl, sol.stats + return sol.ys, sol.stats def _x_rdata(self, x, tcl): return jax.vmap(self.x_rdata, in_axes=(0, None))(x, tcl) @@ -246,62 +232,105 @@ def _outputs(self, ts, x, p, tcl, my, iys) -> jnp.float_: ts, x, p, tcl, my, iys ) + def _y(self, ts, xs, p, tcl, iys): + return jax.vmap( + lambda t, x, p, tcl, iy: self.y(t, x, p, tcl).at[iy].get(), + in_axes=(0, 0, None, None, 0), + )(ts, xs, p, tcl, iys) + + def _sigmay(self, ts, xs, p, tcl, iys): + return jax.vmap( + lambda t, x, p, tcl, iy: self.sigmay(self.y(t, x, p, tcl), p) + .at[iy] + .get(), + in_axes=(0, 0, None, None, 0), + )(ts, xs, p, tcl, iys) + # @eqx.filter_jit def simulate_condition( self, - ts: np.ndarray, - ts_dyn: np.ndarray, - my: np.ndarray, - iys: np.ndarray, p: jnp.ndarray, p_preeq: jnp.ndarray, - dynamic: bool, + ts_preeq: jnp.ndarray, + ts_dyn: jnp.ndarray, + ts_posteq: jnp.ndarray, + my: jnp.ndarray, + iys: jnp.ndarray, solver: diffrax.AbstractSolver, controller: diffrax.AbstractStepSizeController, + adjoint: diffrax.AbstractAdjoint, max_steps: int, + ret: str = "llh", ): # Pre-equilibration if p_preeq.shape[0] > 0: - x0 = self._preeq(p_preeq, solver, controller, max_steps) + x0 = self.x0(p_preeq) + tcl = self.tcl(x0, p_preeq) + current_x = self.x_solver(x0) + current_x, stats_preeq = self._eq( + p_preeq, tcl, current_x, solver, controller, max_steps + ) + # update tcl with new parameters + tcl = self.tcl(self.x_rdata(current_x, tcl), p) else: x0 = self.x0(p) + current_x = self.x_solver(x0) + stats_preeq = None + + tcl = self.tcl(x0, p) + x_preq = jnp.repeat( + current_x.reshape(1, -1), ts_preeq.shape[0], axis=0 + ) # Dynamic simulation - if dynamic: - x, tcl, stats = self._solve( - ts_dyn, p, x0, solver, controller, max_steps + if ts_dyn.shape[0] > 0: + x_dyn, stats_dyn = self._solve( + p, + ts_dyn, + tcl, + current_x, + solver, + controller, + max_steps, + adjoint, ) + current_x = x_dyn[-1, :] else: - x = jnp.repeat( - self.x_solver(x0).reshape(1, -1), - len(ts_dyn), - axis=0, + x_dyn = jnp.repeat( + current_x.reshape(1, -1), ts_dyn.shape[0], axis=0 ) - tcl = self.tcl(x0, p) - stats = None + stats_dyn = None # Post-equilibration - if len(ts) > len(ts_dyn): - if len(ts_dyn) > 0: - x_final = x[-1, :] - else: - x_final = self.x_solver(x0) - x_posteq = self._posteq( - p, x_final, tcl, solver, controller, max_steps - ) - x_posteq = jnp.repeat( - x_posteq.reshape(1, -1), - len(ts) - len(ts_dyn), - axis=0, + if ts_posteq.shape[0] > 0: + current_x, stats_posteq = self._eq( + p, tcl, current_x, solver, controller, max_steps ) - if len(ts_dyn) > 0: - x = jnp.concatenate((x, x_posteq), axis=0) - else: - x = x_posteq - - outputs = self._outputs(ts, x, p, tcl, my, iys) - llh = -jnp.sum(outputs[:, 0]) - obs = outputs[:, 1] - sigmay = outputs[:, 2] - x_rdata = jnp.stack(self._x_rdata(x, tcl), axis=1) - return llh, dict(llh=llh, x=x_rdata, y=obs, sigmay=sigmay, stats=stats) + else: + stats_posteq = None + + x_posteq = jnp.repeat( + current_x.reshape(1, -1), ts_posteq.shape[0], axis=0 + ) + + ts = jnp.concatenate((ts_preeq, ts_dyn, ts_posteq), axis=0) + x = jnp.concatenate((x_preq, x_dyn, x_posteq), axis=0) + + llhs = self._outputs(ts, x, p, tcl, my, iys) + llh = -jnp.sum(llhs) + return { + "llh": llh, + "llhs": llhs, + "x": self._x_rdata(x, tcl), + "x_solver": x, + "y": self._y(ts, x, p, tcl, iys), + "sigmay": self._sigmay(ts, x, p, tcl, iys), + "x0": self.x_rdata(x_preq[-1, :], tcl), + "x0_solver": x_preq[-1, :], + "tcl": tcl, + "res": self._y(ts, x, p, tcl, iys) - my, + }[ret], dict( + stats_preeq=stats_preeq, + stats_dyn=stats_dyn, + stats_posteq=stats_posteq, + ) diff --git a/python/sdist/amici/jax/petab.py b/python/sdist/amici/jax/petab.py index 6bf090d114..deb1d12d92 100644 --- a/python/sdist/amici/jax/petab.py +++ b/python/sdist/amici/jax/petab.py @@ -13,6 +13,7 @@ import diffrax import equinox as eqx +import jax.lax import jax.numpy as jnp import numpy as np import pandas as pd @@ -22,7 +23,7 @@ ParameterMappingForCondition, create_parameter_mapping, ) -from amici.jax.model import JAXModel +from amici.jax.model import JAXModel, simulate_condition def jax_unscale( @@ -35,7 +36,7 @@ def jax_unscale( parameter: Parameter to be unscaled. scale_str: - One of ``'lin'`` (synonymous with ``''``), ``'log'``, ``'log10'``. + One of ``petab.LIN``, ``petab.LOG``, ``petab.LOG10``. Returns: The unscaled parameter. @@ -51,12 +52,6 @@ def jax_unscale( class JAXProblem(eqx.Module): """ - :ivar solver: - Diffrax solver to use for model simulation - :ivar controller: - Step-size controller to use for model simulation - :ivar max_steps: - Maximum number of steps to take during a simulation :ivar parameters: Values for the model parameters. Only populated after setting the PEtab problem via :meth:`set_petab_problem`. Do not change dimensions, values may be changed during, e.g. model training. @@ -72,13 +67,11 @@ class JAXProblem(eqx.Module): parameters: jnp.ndarray model: JAXModel - parameter_mappings: dict[tuple[str], ParameterMappingForCondition] = ( - eqx.field(static=True) - ) + parameter_mappings: dict[tuple[str], ParameterMappingForCondition] measurements: dict[ tuple[str], - tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray, str], - ] = eqx.field(static=True) + tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray], + ] petab_problem: petab.Problem def __init__(self, model: JAXModel, petab_problem: petab.Problem): @@ -122,30 +115,31 @@ def _get_measurements(self, simulation_conditions: pd.DataFrame): """ measurements = dict() for _, simulation_condition in simulation_conditions.iterrows(): - measurements_df = self.petab_problem.measurement_df - for k, v in simulation_condition.items(): - measurements_df = measurements_df.query(f"{k} == '{v}'") + query = " & ".join( + [f"{k} == '{v}'" for k, v in simulation_condition.items()] + ) + m = self.petab_problem.measurement_df.query(query) - measurements_df.sort_values(by=petab.TIME, inplace=True) + m.sort_values(by=petab.TIME, inplace=True) - ts = measurements_df[petab.TIME].values - ts_dyn = [t for t in ts if np.isfinite(t)] - my = measurements_df[petab.MEASUREMENT].values + ts = m[petab.TIME].values + ts_preeq = ts[np.isfinite(ts) & (ts == 0)] + ts_dyn = ts[np.isfinite(ts) & (ts > 0)] + ts_posteq = ts[np.logical_not(np.isfinite(ts))] + my = m[petab.MEASUREMENT].values iys = np.array( [ self.model.observable_ids.index(oid) - for oid in measurements_df[petab.OBSERVABLE_ID].values + for oid in m[petab.OBSERVABLE_ID].values ] ) - # using strings here prevents tracing in jax - dynamic = ts_dyn and max(ts_dyn) > 0 measurements[tuple(simulation_condition)] = ( - np.array(ts), - np.array(ts_dyn), + ts_preeq, + ts_dyn, + ts_posteq, my, iys, - dynamic, ) return measurements @@ -236,21 +230,24 @@ def run_simulation( controller: diffrax.AbstractStepSizeController, max_steps: int, ): - ts, ts_dyn, my, iys, dynamic = self.measurements[simulation_condition] + ts_preeq, ts_dyn, ts_posteq, my, iys = self.measurements[ + simulation_condition + ] p = self.load_parameters(simulation_condition[0]) p_preeq = ( self.load_parameters(simulation_condition[1]) if len(simulation_condition) > 1 else jnp.array([]) ) - return self.model.simulate_condition( - ts, - ts_dyn, - my, - iys, + return simulate_condition( p, p_preeq, - dynamic, + self.model, + jax.lax.stop_gradient(jnp.array(ts_preeq)), + jax.lax.stop_gradient(jnp.array(ts_dyn)), + jax.lax.stop_gradient(jnp.array(ts_posteq)), + jax.lax.stop_gradient(jnp.array(my)), + jax.lax.stop_gradient(jnp.array(iys)), solver, controller, max_steps, diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 8c78253334..543f8f0544 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -5,6 +5,8 @@ import amici.jax import jax.numpy as jnp +import jax +import diffrax import numpy as np from amici.pysb_import import pysb2amici @@ -109,22 +111,16 @@ def _test_model(model_module, ts, p, k): amici_solver.setSensitivityOrder(amici.SensitivityOrder.first) rs_amici = amici.runAmiciSimulations(amici_model, amici_solver, [edata]) - check_fields_jax(rs_amici, jax_model, edata, ["x", "y", "llh"]) - check_fields_jax( - rs_amici, - jax_model, - edata, - ["x", "y", "llh", "sllh"], - sensi_order=amici.SensitivityOrder.first, + rs_amici, jax_model, edata, ["x", "y", "llh", "res", "x0"] ) check_fields_jax( rs_amici, jax_model, edata, - ["x", "y", "llh", "sllh"], - sensi_order=amici.SensitivityOrder.second, + ["sllh", "sx0", "sx", "sres", "sy"], + sensi_order=amici.SensitivityOrder.first, ) @@ -136,41 +132,81 @@ def check_fields_jax( sensi_order=amici.SensitivityOrder.none, ): r_jax = dict() - kwargs = { - "ts": np.array(edata.getTimepoints()), - "ts_dyn": np.array(edata.getTimepoints()), - "p": np.array(edata.parameters), - "k": np.array(edata.fixedParameters), - "k_preeq": np.array([]), - "my": np.array(edata.getObservedData()).reshape( - np.array(edata.getTimepoints()).shape[0], -1 - ), - "pscale": np.array(edata.pscale), - } - if sensi_order == amici.SensitivityOrder.none: - ( - r_jax["llh"], - (r_jax["x"], r_jax["y"], r_jax["stats"]), - ) = jax_model._fun(**kwargs) - elif sensi_order == amici.SensitivityOrder.first: - ( - r_jax["llh"], - r_jax["sllh"], - (r_jax["x"], r_jax["y"], r_jax["stats"]), - ) = jax_model._grad(**kwargs) - elif sensi_order == amici.SensitivityOrder.second: - ( - r_jax["llh"], - r_jax["sllh"], - r_jax["s2llh"], - (r_jax["x"], r_jax["y"], r_jax["stats"]), - ) = jax_model._hessian(**kwargs) + ts = np.array(edata.getTimepoints()) + my = np.array(edata.getObservedData()).reshape(len(ts), -1) + ts = np.repeat(ts.reshape(-1, 1), my.shape[1], axis=1) + iys = np.repeat(np.arange(my.shape[1]).reshape(1, -1), len(ts), axis=0) + my = my.flatten() + ts = ts.flatten() + iys = iys.flatten() + + ts_preeq = ts[ts == 0] + ts_dyn = ts[ts > 0] + ts_posteq = np.array([]) + p = jnp.array(list(edata.parameters) + list(edata.fixedParameters)) + args = ( + jnp.array([]), # p_preeq + jnp.array(ts_preeq), # ts_preeq + jnp.array(ts_dyn), # ts_dyn + jnp.array(ts_posteq), # ts_posteq + jnp.array(my), # my + jnp.array(iys), # iys + diffrax.Kvaerno5(), # solver + diffrax.PIDController(atol=1e-8, rtol=1e-8), # controller + diffrax.RecursiveCheckpointAdjoint(), # adjoint + 2**8, # max_steps + ) + fun = jax_model.simulate_condition + + for output in ["llh", "x0", "x", "y", "res"]: + oargs = (*args[:-2], diffrax.DirectAdjoint(), 2**8, output) + if sensi_order == amici.SensitivityOrder.none: + r_jax[output] = fun(p, *oargs)[0] + if sensi_order == amici.SensitivityOrder.first: + if output == "llh": + r_jax[f"s{output}"] = jax.grad(fun, has_aux=True)(p, *args)[0] + else: + r_jax[f"s{output}"] = jax.jacfwd(fun, has_aux=True)(p, *oargs)[ + 0 + ] for field in fields: for r_amici, r_jax in zip(rs_amici, [r_jax]): + actual = r_jax[field] + desired = r_amici[field] + if field == "x": + actual = actual[iys == 0, :] + if field == "y": + actual = np.stack( + [actual[iys == iy] for iy in sorted(np.unique(iys))], + axis=1, + ) + elif field == "sllh": + actual = actual[: len(edata.parameters)] + elif field == "sx": + actual = np.permute_dims( + actual[iys == 0, :, : len(edata.parameters)], (0, 2, 1) + ) + elif field == "sy": + actual = np.permute_dims( + np.stack( + [ + actual[iys == iy, : len(edata.parameters)] + for iy in sorted(np.unique(iys)) + ], + axis=1, + ), + (0, 2, 1), + ) + elif field == "sx0": + actual = actual[:, : len(edata.parameters)].T + elif field == "sres": + actual = actual[:, : len(edata.parameters)] + assert_allclose( - actual=r_amici[field], - desired=r_jax[field], - atol=1e-6, - rtol=1e-6, + actual=actual, + desired=desired, + atol=1e-5, + rtol=1e-5, + err_msg=f"field {field} does not match", ) From 94aa679684440adac244a6e0b58e77b664dab448 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sat, 16 Nov 2024 23:12:33 +0000 Subject: [PATCH 61/80] Update petab.py --- python/sdist/amici/jax/petab.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/python/sdist/amici/jax/petab.py b/python/sdist/amici/jax/petab.py index deb1d12d92..c929217e55 100644 --- a/python/sdist/amici/jax/petab.py +++ b/python/sdist/amici/jax/petab.py @@ -23,7 +23,7 @@ ParameterMappingForCondition, create_parameter_mapping, ) -from amici.jax.model import JAXModel, simulate_condition +from amici.jax.model import JAXModel def jax_unscale( @@ -239,10 +239,9 @@ def run_simulation( if len(simulation_condition) > 1 else jnp.array([]) ) - return simulate_condition( + return self.model.simulate_condition( p, p_preeq, - self.model, jax.lax.stop_gradient(jnp.array(ts_preeq)), jax.lax.stop_gradient(jnp.array(ts_dyn)), jax.lax.stop_gradient(jnp.array(ts_posteq)), From b129c868b161cee07f1891b248315ebc91052b47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 17 Nov 2024 00:26:00 +0000 Subject: [PATCH 62/80] fixups --- python/sdist/amici/jax/petab.py | 27 ++++++++++--------- .../benchmark-models/test_petab_benchmark.py | 7 ++--- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/python/sdist/amici/jax/petab.py b/python/sdist/amici/jax/petab.py index c929217e55..8fb7181aa9 100644 --- a/python/sdist/amici/jax/petab.py +++ b/python/sdist/amici/jax/petab.py @@ -118,9 +118,9 @@ def _get_measurements(self, simulation_conditions: pd.DataFrame): query = " & ".join( [f"{k} == '{v}'" for k, v in simulation_condition.items()] ) - m = self.petab_problem.measurement_df.query(query) - - m.sort_values(by=petab.TIME, inplace=True) + m = self.petab_problem.measurement_df.query(query).sort_values( + by=petab.TIME + ) ts = m[petab.TIME].values ts_preeq = ts[np.isfinite(ts) & (ts == 0)] @@ -240,16 +240,17 @@ def run_simulation( else jnp.array([]) ) return self.model.simulate_condition( - p, - p_preeq, - jax.lax.stop_gradient(jnp.array(ts_preeq)), - jax.lax.stop_gradient(jnp.array(ts_dyn)), - jax.lax.stop_gradient(jnp.array(ts_posteq)), - jax.lax.stop_gradient(jnp.array(my)), - jax.lax.stop_gradient(jnp.array(iys)), - solver, - controller, - max_steps, + p=p, + p_preeq=p_preeq, + ts_preeq=jax.lax.stop_gradient(jnp.array(ts_preeq)), + ts_dyn=jax.lax.stop_gradient(jnp.array(ts_dyn)), + ts_posteq=jax.lax.stop_gradient(jnp.array(ts_posteq)), + my=jax.lax.stop_gradient(jnp.array(my)), + iys=jax.lax.stop_gradient(jnp.array(iys)), + solver=solver, + controller=controller, + max_steps=max_steps, + adjoint=diffrax.RecursiveCheckpointAdjoint(), ) diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 401cfba6d5..e34602793b 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -260,6 +260,7 @@ def benchmark_problem(request): @pytest.mark.filterwarnings( "ignore:The following problem parameters were not used *", "ignore: The environment variable *", + "ignore:Adjoint sensitivity analysis for models with discontinuous ", ) def test_jax_llh(benchmark_problem): problem_id, petab_problem, amici_model = benchmark_problem @@ -271,7 +272,7 @@ def test_jax_llh(benchmark_problem): "SalazarCavazos_MBoC2020", "Smith_BMCSystBiol2013", ): - # confirmed to work 27/10/2024 but experienced high local runtime (M2 MBA, >30s) + # confirmed to work (no gradients) 27/10/2024 but experienced high local runtime (M2 MBA, >30s) pytest.skip("Excluded from JAX check due to excessive runtime") amici_solver = amici_model.getSolver() @@ -346,11 +347,11 @@ def test_jax_llh(benchmark_problem): ), ) if problem_id in problems_for_gradient_check_jax: - (llh_jax, rdatas_jax), sllh_jax = eqx.filter_jit( + (llh_jax, _), sllh_jax = eqx.filter_jit( eqx.filter_value_and_grad(run_simulations, has_aux=True) )(jax_problem, simulation_conditions) else: - llh_jax, rdatas_jax = eqx.filter_jit(run_simulations)( + llh_jax, _ = eqx.filter_jit(run_simulations)( jax_problem, simulation_conditions ) From 9b6a62ba7327c80c73d4838bda8a9184eadab49c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 17 Nov 2024 01:34:31 +0000 Subject: [PATCH 63/80] fixup --- python/sdist/amici/jax/model.py | 5 +++-- tests/benchmark-models/test_petab_benchmark.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index f412faecac..4d7059a0d7 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -325,11 +325,12 @@ def simulate_condition( "x_solver": x, "y": self._y(ts, x, p, tcl, iys), "sigmay": self._sigmay(ts, x, p, tcl, iys), - "x0": self.x_rdata(x_preq[-1, :], tcl), - "x0_solver": x_preq[-1, :], + "x0": self.x_rdata(x[0, :], tcl), + "x0_solver": x[0, :], "tcl": tcl, "res": self._y(ts, x, p, tcl, iys) - my, }[ret], dict( + x=x, stats_preeq=stats_preeq, stats_dyn=stats_dyn, stats_posteq=stats_posteq, diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index e34602793b..8fb5e17851 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -294,7 +294,7 @@ def test_jax_llh(benchmark_problem): np.random.seed(cur_settings.rng_seed) problems_for_gradient_check_jax = list( - set(problems_for_gradient_check) - set("Laske_PLOSComputBiol2019") + set(problems_for_gradient_check) - {"Laske_PLOSComputBiol2019"} # Laske has nan values in gradient due to nan values in observables that are not used in the likelihood # but are problematic during backpropagation ) From 74cd49854d1e992661ef174964623aad9021b702 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 17 Nov 2024 12:47:25 +0000 Subject: [PATCH 64/80] add documentation and typing --- python/sdist/amici/jax.template.py | 25 +- python/sdist/amici/jax/model.py | 351 ++++++++++++++---- python/sdist/amici/jax/petab.py | 155 +++++--- python/sdist/pyproject.toml | 1 + python/tests/test_jax.py | 2 + .../benchmark-models/test_petab_benchmark.py | 3 + 6 files changed, 400 insertions(+), 137 deletions(-) diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index 08a546826f..b9b37c8402 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -8,7 +8,7 @@ class JAXModel_TPL_MODEL_NAME(JAXModel): def __init__(self): super().__init__() - def xdot(self, t, x, args): + def _xdot(self, t, x, args): pk, tcl = args @@ -21,8 +21,7 @@ def xdot(self, t, x, args): return TPL_XDOT_RET - @staticmethod - def _w(t, x, pk, tcl): + def _w(self, t, x, pk, tcl): TPL_X_SYMS = x TPL_PK_SYMS = pk @@ -32,8 +31,7 @@ def _w(t, x, pk, tcl): return TPL_W_RET - @staticmethod - def x0(pk): + def _x0(self, pk): TPL_PK_SYMS = pk @@ -41,8 +39,7 @@ def x0(pk): return TPL_X0_RET - @staticmethod - def x_solver(x): + def _x_solver(self, x): TPL_X_RDATA_SYMS = x @@ -50,8 +47,7 @@ def x_solver(x): return TPL_X_SOLVER_RET - @staticmethod - def x_rdata(x, tcl): + def _x_rdata(self, x, tcl): TPL_X_SYMS = x TPL_TCL_SYMS = tcl @@ -60,8 +56,7 @@ def x_rdata(x, tcl): return TPL_X_RDATA_RET - @staticmethod - def tcl(x, pk): + def _tcl(self, x, pk): TPL_X_RDATA_SYMS = x TPL_PK_SYMS = pk @@ -80,7 +75,7 @@ def y(self, t, x, pk, tcl): return TPL_Y_RET - def sigmay(self, y, pk): + def _sigmay(self, y, pk): TPL_PK_SYMS = pk TPL_Y_SYMS = y @@ -90,10 +85,10 @@ def sigmay(self, y, pk): return TPL_SIGMAY_RET - def llh(self, t, x, pk, tcl, my, iy): - y = self.y(t, x, pk, tcl) + def _llh(self, t, x, pk, tcl, my, iy): + y = self._y(t, x, pk, tcl) TPL_Y_SYMS = y - TPL_SIGMAY_SYMS = self.sigmay(y, pk) + TPL_SIGMAY_SYMS = self._sigmay(y, pk) TPL_JY_EQ diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index 4d7059a0d7..2534728a96 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -1,45 +1,51 @@ +"""Model simulation using JAX.""" + +# ruff: noqa: F821 F722 + from abc import abstractmethod import diffrax import equinox as eqx import jax.numpy as jnp import jax - -# always use 64-bit precision. No-brainer on CPUs and GPUs don't make sense for stiff systems. -jax.config.update("jax_enable_x64", True) +import jaxtyping as jt class JAXModel(eqx.Module): """ - JAXModel provides an abstract base class for a JAX-based implementation of an AMICI model. Models inheriting from - JAXModel must provide model specific implementations of abstract methods. + JAXModel provides an abstract base class for a JAX-based implementation of an AMICI model. The class implements + routines for simulation and evaluation of derived quantities, model specific implementations need to be provided by + classes inheriting from JAXModel. """ @abstractmethod - def xdot( + def _xdot( self, t: jnp.float_, - x: jnp.ndarray, - args: tuple[jnp.ndarray, jnp.ndarray], - ) -> jnp.ndarray: + x: jt.Float[jt.Array, "nxs"], + args: tuple[jt.Float[jt.Array, "np"], jt.Float[jt.Array, "ncl"]], + ) -> jt.Float[jt.Array, "nxs"]: """ Right-hand side of the ODE system. :param t: time point :param x: state vector - :param args: tuple of parameters, fixed parameters and total values for conservation laws + :param args: tuple of parameters and total values for conservation laws :return: - Derivative of the state vector at time point, same data structure as x. + Temporal derivative of the state vector x at time point t. """ ... - @staticmethod @abstractmethod def _w( - t: jnp.float_, x: jnp.ndarray, pk: jnp.ndarray, tcl: jnp.ndarray - ) -> jnp.ndarray: + self, + t: jt.Float[jt.Array, ""], + x: jt.Float[jt.Array, "nxs"], + pk: jt.Float[jt.Array, "np"], + tcl: jt.Float[jt.Array, "ncl"], + ) -> jt.Float[jt.Array, "nw"]: """ - Compute the expressions (algebraic variables) of the model. + Compute the expressions, i.e. derived quantities that are used in other parts of the model. :param t: time point :param x: state vector @@ -50,9 +56,8 @@ def _w( """ ... - @staticmethod @abstractmethod - def x0(pk: jnp.ndarray) -> jnp.ndarray: + def _x0(self, pk: jt.Float[jt.Array, "np"]) -> jt.Float[jt.Array, "nx"]: """ Compute the initial state vector. @@ -60,9 +65,10 @@ def x0(pk: jnp.ndarray) -> jnp.ndarray: """ ... - @staticmethod @abstractmethod - def x_solver(x: jnp.ndarray) -> jnp.ndarray: + def _x_solver( + self, x: jt.Float[jt.Array, "nx"] + ) -> jt.Float[jt.Array, "nxs"]: """ Transform the full state vector to the reduced state vector for ODE solving. @@ -73,9 +79,10 @@ def x_solver(x: jnp.ndarray) -> jnp.ndarray: """ ... - @staticmethod @abstractmethod - def x_rdata(x: jnp.ndarray, tcl: jnp.ndarray) -> jnp.ndarray: + def _x_rdata( + self, x: jt.Float[jt.Array, "nxs"], tcl: jt.Float[jt.Array, "ncl"] + ) -> jt.Float[jt.Array, "nx"]: """ Compute the full state vector from the reduced state vector and conservation laws. @@ -88,11 +95,13 @@ def x_rdata(x: jnp.ndarray, tcl: jnp.ndarray) -> jnp.ndarray: """ ... - @staticmethod @abstractmethod - def tcl(x: jnp.ndarray, pk: jnp.ndarray) -> jnp.ndarray: + def _tcl( + self, x: jt.Float[jt.Array, "nx"], pk: jt.Float[jt.Array, "np"] + ) -> jt.Float[jt.Array, "ncl"]: """ Compute the total values for conservation laws. + :param x: state vector :param pk: @@ -103,11 +112,16 @@ def tcl(x: jnp.ndarray, pk: jnp.ndarray) -> jnp.ndarray: ... @abstractmethod - def y( - self, t: jnp.float_, x: jnp.ndarray, pk: jnp.ndarray, tcl: jnp.ndarray - ) -> jnp.ndarray: + def _y( + self, + t: jt.Float[jt.Scalar, ""], + x: jt.Float[jt.Array, "nxs"], + pk: jt.Float[jt.Array, "np"], + tcl: jt.Float[jt.Array, "ncl"], + ) -> jt.Float[jt.Array, "ny"]: """ Compute the observables. + :param t: time point :param x: @@ -117,17 +131,19 @@ def y( :param tcl: total values for conservation laws :return: - observable + observables """ ... - @staticmethod @abstractmethod - def sigmay(y: jnp.ndarray, pk: jnp.ndarray) -> jnp.ndarray: + def _sigmay( + self, y: jt.Float[jt.Array, "ny"], pk: jt.Float[jt.Array, "np"] + ) -> jt.Float[jt.Array, "ny"]: """ Compute the standard deviations of the observables. + :param y: - observable for the specified observable id + observables :param pk: parameters :return: @@ -136,16 +152,17 @@ def sigmay(y: jnp.ndarray, pk: jnp.ndarray) -> jnp.ndarray: ... @abstractmethod - def llh( + def _llh( self, - t: jnp.float_, - x: jnp.ndarray, - pk: jnp.ndarray, - tcl: jnp.ndarray, - iy: int, - ) -> jnp.float_: + t: jt.Float[jt.Scalar, ""], + x: jt.Float[jt.Array, "nxs"], + pk: jt.Float[jt.Array, "np"], + tcl: jt.Float[jt.Array, "ncl"], + my: jt.Float[jt.Array, ""], + iy: jt.Int[jt.Array, ""], + ) -> jt.Float[jt.Scalar, ""]: """ - Compute the log-likelihood of the observable for the specified observable id. + Compute the log-likelihood of the observable for the specified observable index. :param t: time point :param x: @@ -154,8 +171,10 @@ def llh( parameters :param tcl: total values for conservation laws + :param my: + observed data :param iy: - observable id + observable index :return: log-likelihood of the observable """ @@ -191,9 +210,34 @@ def parameter_ids(self) -> list[str]: """ ... - def _eq(self, p, tcl, x0, solver, controller, max_steps): + def _eq( + self, + p: jt.Float[jt.Array, "np"], + tcl: jt.Float[jt.Array, "ncl"], + x0: jt.Float[jt.Array, "nxs"], + solver: diffrax.AbstractSolver, + controller: diffrax.AbstractStepSizeController, + max_steps: jnp.int_, + ) -> tuple[jt.Float[jt.Array, "1 nxs"], dict]: + """ + Solve the steady state equation. + + :param p: + parameters + :param tcl: + total values for conservation laws + :param x0: + initial state vector + :param solver: + ODE solver + :param controller: + step size controller + :param max_steps: + maximum number of steps + :return: + """ sol = diffrax.diffeqsolve( - diffrax.ODETerm(self.xdot), + diffrax.ODETerm(self._xdot), solver, args=(p, tcl), t0=0.0, @@ -207,9 +251,41 @@ def _eq(self, p, tcl, x0, solver, controller, max_steps): ) return sol.ys[-1, :], sol.stats - def _solve(self, p, ts, tcl, x0, solver, controller, max_steps, adjoint): + def _solve( + self, + p: jt.Float[jt.Array, "np"], + ts: jt.Float[jt.Array, "nt_dyn"], + tcl: jt.Float[jt.Array, "ncl"], + x0: jt.Float[jt.Array, "nxs"], + solver: diffrax.AbstractSolver, + controller: diffrax.AbstractStepSizeController, + max_steps: jnp.int_, + adjoint: diffrax.AbstractAdjoint, + ) -> tuple[jt.Float[jt.Array, "nt nxs"], dict]: + """ + Solve the ODE system. + + :param p: + parameters + :param ts: + time points at which solutions are evaluated + :param tcl: + total values for conservation laws + :param x0: + initial state vector + :param solver: + ODE solver + :param controller: + step size controller + :param max_steps: + maximum number of steps + :param adjoint: + adjoint method + :return: + solution at time points ts and statistics + """ sol = diffrax.diffeqsolve( - diffrax.ODETerm(self.xdot), + diffrax.ODETerm(self._xdot), solver, args=(p, tcl), t0=0.0, @@ -224,23 +300,107 @@ def _solve(self, p, ts, tcl, x0, solver, controller, max_steps, adjoint): ) return sol.ys, sol.stats - def _x_rdata(self, x, tcl): - return jax.vmap(self.x_rdata, in_axes=(0, None))(x, tcl) + def _x_rdatas( + self, x: jt.Float[jt.Array, "nt nxs"], tcl: jt.Float[jt.Array, "ncl"] + ) -> jt.Float[jt.Array, "nt nx"]: + """ + Compute the full state vector from the reduced state vector and conservation laws. + + :param x: + reduced state vector + :param tcl: + total values for conservation laws + :return: + full state vector + """ + return jax.vmap(self._x_rdata, in_axes=(0, None))(x, tcl) + + def _llhs( + self, + ts: jt.Float[jt.Array, "nt nx"], + xs: jt.Float[jt.Array, "nt nxs"], + p: jt.Float[jt.Array, "np"], + tcl: jt.Float[jt.Array, "ncl"], + mys: jt.Float[jt.Array, "nt"], + iys: jt.Int[jt.Array, "nt"], + ) -> jt.Float[jt.Array, "nt"]: + """ + Compute the log-likelihood of the observables. - def _outputs(self, ts, x, p, tcl, my, iys) -> jnp.float_: - return jax.vmap(self.llh, in_axes=(0, 0, None, None, 0, 0))( - ts, x, p, tcl, my, iys + :param ts: + time points + :param xs: + state vectors + :param p: + parameters + :param tcl: + total values for conservation laws + :param mys: + observed data + :param iys: + observable indices + :return: + log-likelihood of the observables + """ + return jax.vmap(self._llh, in_axes=(0, 0, None, None, 0, 0))( + ts, xs, p, tcl, mys, iys ) - def _y(self, ts, xs, p, tcl, iys): + def _ys( + self, + ts: jt.Float[jt.Array, "nt"], + xs: jt.Float[jt.Array, "nt nxs"], + p: jt.Float[jt.Array, "np"], + tcl: jt.Float[jt.Array, "ncl"], + iys: jt.Float[jt.Array, "nt"], + ) -> jt.Int[jt.Array, "nt"]: + """ + Compute the observables. + + :param ts: + time points + :param xs: + state vectors + :param p: + parameters + :param tcl: + total values for conservation laws + :param iys: + observable indices + :return: + observables + """ return jax.vmap( - lambda t, x, p, tcl, iy: self.y(t, x, p, tcl).at[iy].get(), + lambda t, x, p, tcl, iy: self._y(t, x, p, tcl).at[iy].get(), in_axes=(0, 0, None, None, 0), )(ts, xs, p, tcl, iys) - def _sigmay(self, ts, xs, p, tcl, iys): + def _sigmays( + self, + ts: jt.Float[jt.Array, "nt"], + xs: jt.Float[jt.Array, "nt nxs"], + p: jt.Float[jt.Array, "np"], + tcl: jt.Float[jt.Array, "ncl"], + iys: jt.Int[jt.Array, "nt"], + ): + """ + Compute the standard deviations of the observables. + + :param ts: + time points + :param xs: + state vectors + :param p: + parameters + :param tcl: + total values for conservation laws + :param iys: + observable indices + :return: + standard deviations of the observables + """ return jax.vmap( - lambda t, x, p, tcl, iy: self.sigmay(self.y(t, x, p, tcl), p) + lambda t, x, p, tcl, iy: self._sigmay(self._y(t, x, p, tcl), p) .at[iy] .get(), in_axes=(0, 0, None, None, 0), @@ -249,35 +409,80 @@ def _sigmay(self, ts, xs, p, tcl, iys): # @eqx.filter_jit def simulate_condition( self, - p: jnp.ndarray, - p_preeq: jnp.ndarray, - ts_preeq: jnp.ndarray, - ts_dyn: jnp.ndarray, - ts_posteq: jnp.ndarray, - my: jnp.ndarray, - iys: jnp.ndarray, + p: jt.Float[jt.Array, "np"], + p_preeq: jt.Float[jt.Array, "?np"], + ts_preeq: jt.Float[jt.Array, "nt_preeq"], + ts_dyn: jt.Float[jt.Array, "nt_dyn"], + ts_posteq: jt.Float[jt.Array, "nt_posteq"], + my: jt.Float[jt.Array, "nt_preeq+nt_dyn+nt_posteq"], + iys: jt.Float[jt.Array, "nt_preeq+nt_dyn+nt_posteq"], solver: diffrax.AbstractSolver, controller: diffrax.AbstractStepSizeController, adjoint: diffrax.AbstractAdjoint, - max_steps: int, + max_steps: jnp.int_, ret: str = "llh", ): + r""" + Simulate a condition. + :param p: + parameters for simulation ordered according to ids in :ivar parameter_ids: + :param p_preeq: + parameters for pre-equilibration ordered according to ids in :ivar parameter_ids:. May be empty to + disable pre-equilibration. + :param ts_preeq: + time points for pre-equilibration. Usually valued 0.0, but needs to be shaped according to + the number of observables that are evaluated after pre-equilibration. + :param ts_dyn: + time points for dynamic simulation. Usually valued > 0.0 and sorted in monotonically increasing order. + Duplicate time points are allowed to facilitate the evaluation of multiple observables at specific time + points. + :param ts_posteq: + time points for post-equilibration. Usually valued \Infty, but needs to be shaped according to + the number of observables that are evaluated after post-equilibration. + :param my: + observed data + :param iys: + indices of the observables according to ordering in :ivar observable_ids: + :param solver: + ODE solver + :param controller: + step size controller + :param adjoint: + adjoint method. Recommended values are `diffrax.DirectAdjoint()` for jax.jacfwd (with vector-valued + outputs) and `diffrax.RecursiveCheckpointAdjoint()` for jax.grad (for scalar-valued outputs). + :param max_steps: + maximum number of solver steps + :param ret: + which output to return. Valid values are + - `llh`: negative log-likelihood (default) + - `llhs`: negative log-likelihoods at each time point + - `x0`: full initial state vector (after pre-equilibration) + - `x0_solver`: reduced initial state vector (after pre-equilibration) + - `x`: full state vector + - `x_solver`: reduced state vector + - `y`: observables + - `sigmay`: standard deviations of the observables + - `tcl`: total values for conservation laws (at final timepoint) + - `res`: residuals (observed - simulated) + :return: + output according to `ret` and statistics + """ # Pre-equilibration if p_preeq.shape[0] > 0: - x0 = self.x0(p_preeq) - tcl = self.tcl(x0, p_preeq) - current_x = self.x_solver(x0) + x0 = self._x0(p_preeq) + tcl = self._tcl(x0, p_preeq) + current_x = self._x_solver(x0) current_x, stats_preeq = self._eq( p_preeq, tcl, current_x, solver, controller, max_steps ) # update tcl with new parameters - tcl = self.tcl(self.x_rdata(current_x, tcl), p) + tcl = self._tcl(self._x_rdata(current_x, tcl), p) else: - x0 = self.x0(p) - current_x = self.x_solver(x0) + x0 = self._x0(p) + current_x = self._x_solver(x0) stats_preeq = None - tcl = self.tcl(x0, p) + tcl = self._tcl(x0, p) x_preq = jnp.repeat( current_x.reshape(1, -1), ts_preeq.shape[0], axis=0 ) @@ -316,19 +521,19 @@ def simulate_condition( ts = jnp.concatenate((ts_preeq, ts_dyn, ts_posteq), axis=0) x = jnp.concatenate((x_preq, x_dyn, x_posteq), axis=0) - llhs = self._outputs(ts, x, p, tcl, my, iys) + llhs = self._llhs(ts, x, p, tcl, my, iys) llh = -jnp.sum(llhs) return { "llh": llh, "llhs": llhs, - "x": self._x_rdata(x, tcl), + "x": self._x_rdatas(x, tcl), "x_solver": x, - "y": self._y(ts, x, p, tcl, iys), - "sigmay": self._sigmay(ts, x, p, tcl, iys), - "x0": self.x_rdata(x[0, :], tcl), + "y": self._ys(ts, x, p, tcl, iys), + "sigmay": self._sigmays(ts, x, p, tcl, iys), + "x0": self._x_rdata(x[0, :], tcl), "x0_solver": x[0, :], "tcl": tcl, - "res": self._y(ts, x, p, tcl, iys) - my, + "res": self._ys(ts, x, p, tcl, iys) - my, }[ret], dict( x=x, stats_preeq=stats_preeq, diff --git a/python/sdist/amici/jax/petab.py b/python/sdist/amici/jax/petab.py index 8fb7181aa9..aae83f410c 100644 --- a/python/sdist/amici/jax/petab.py +++ b/python/sdist/amici/jax/petab.py @@ -1,18 +1,11 @@ -""" -JAX ----- -This module provides functions and classes to enable the use of JAX-based ODE solvers (currently diffrax) to simulate - AMICI processed models. The API in this module is experimental. Expect substantial changes and do not use in production - code. - -Loading this module will automatically enable 64-bit precision for JAX. -""" +"""PEtab wrappers for JAX models.""" "" from numbers import Number from collections.abc import Iterable import diffrax import equinox as eqx +import jaxtyping as jt import jax.lax import jax.numpy as jnp import numpy as np @@ -52,31 +45,33 @@ def jax_unscale( class JAXProblem(eqx.Module): """ + PEtab problem wrapper for JAX models. + :ivar parameters: - Values for the model parameters. Only populated after setting the PEtab problem via :meth:`set_petab_problem`. - Do not change dimensions, values may be changed during, e.g. model training. - :ivar parameter_mappings: - :class:`ParameterMappingForCondition` instances for each simulation condition. Only populated after setting the - PEtab problem via :meth:`set_petab_problem`. Do not set manually unless you know what you are doing. - :ivar measurements: - Subset measurement dataframes for each simulation condition. Only populated after setting the PEtab problem - via :meth:`set_petab_problem`. Do not set manually unless you know what you are doing. - :ivar petab_problem: - PEtab problem to simulate. Set via :meth:`set_petab_problem`. + Values for the model parameters. Do not change dimensions, values may be changed during, e.g. model training. + :ivar model: + JAXModel instance to use for simulation. + :ivar _parameter_mappings: + :class:`ParameterMappingForCondition` instances for each simulation condition. + :ivar _measurements: + Subset measurement dataframes for each simulation condition. + :ivar _petab_problem: + PEtab problem to simulate. """ parameters: jnp.ndarray model: JAXModel - parameter_mappings: dict[tuple[str], ParameterMappingForCondition] - measurements: dict[ + _parameter_mappings: dict[str, ParameterMappingForCondition] + _measurements: dict[ tuple[str], tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray], ] - petab_problem: petab.Problem + _petab_problem: petab.Problem def __init__(self, model: JAXModel, petab_problem: petab.Problem): """ Initialize a JAXProblem instance with a model and a PEtab problem. + :param model: JAXModel instance to use for simulation. :param petab_problem: @@ -84,15 +79,26 @@ def __init__(self, model: JAXModel, petab_problem: petab.Problem): """ self.model = model scs = petab_problem.get_simulation_conditions_from_measurement_df() - self.petab_problem = petab_problem - self.parameter_mappings = self._get_parameter_mappings(scs) - self.measurements = self._get_measurements(scs) + self._petab_problem = petab_problem + self._parameter_mappings = self._get_parameter_mappings(scs) + self._measurements = self._get_measurements(scs) self.parameters = self._get_nominal_parameter_values() - def _get_parameter_mappings(self, simulation_conditions: pd.DataFrame): + def _get_parameter_mappings( + self, simulation_conditions: pd.DataFrame + ) -> dict[str, ParameterMappingForCondition]: + """ + Create parameter mappings for the provided simulation conditions. + + :param simulation_conditions: + Simulation conditions to create parameter mappings for. Same format as returned by + :meth:`petab.Problem.get_simulation_conditions_from_measurement_df`. + :return: + Dictionary mapping simulation conditions to parameter mappings. + """ scs = list(set(simulation_conditions.values.flatten())) mappings = create_parameter_mapping( - petab_problem=self.petab_problem, + petab_problem=self._petab_problem, simulation_conditions=[ {petab.SIMULATION_CONDITION_ID: sc} for sc in scs ], @@ -104,21 +110,28 @@ def _get_parameter_mappings(self, simulation_conditions: pd.DataFrame): mapping.map_sim_var[sim_var] = 1.0 return dict(zip(scs, mappings)) - def _get_measurements(self, simulation_conditions: pd.DataFrame): + def _get_measurements( + self, simulation_conditions: pd.DataFrame + ) -> dict[ + tuple[str], + tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray], + ]: """ Set measurements for the model based on the provided simulation conditions. + :param simulation_conditions: Simulation conditions to create parameter mappings for. Same format as returned by :meth:`petab.Problem.get_simulation_conditions_from_measurement_df`. :return: - JAXModel instance with measurements set. + Dictionary mapping simulation conditions to measurements (tuple of pre-equilibrium, dynamic, + post-equilibrium time points; measurements and observable indices). """ measurements = dict() for _, simulation_condition in simulation_conditions.iterrows(): query = " & ".join( [f"{k} == '{v}'" for k, v in simulation_condition.items()] ) - m = self.petab_problem.measurement_df.query(query).sort_values( + m = self._petab_problem.measurement_df.query(query).sort_values( by=petab.TIME ) @@ -146,20 +159,21 @@ def _get_measurements(self, simulation_conditions: pd.DataFrame): def _get_nominal_parameter_values(self) -> jnp.ndarray: """ Set the nominal parameter values for the model based on the nominal values in the PEtab problem. + :return: JAXModel instance with parameter values set to the nominal values. """ - if self.petab_problem is None: + if self._petab_problem is None: raise ValueError( "PEtab problem not set, cannot set nominal values." ) return jnp.array( [ petab.scale( - self.petab_problem.parameter_df.loc[ + self._petab_problem.parameter_df.loc[ pval, petab.NOMINAL_VALUE ], - self.petab_problem.parameter_df.loc[ + self._petab_problem.parameter_df.loc[ pval, petab.PARAMETER_SCALE ], ) @@ -171,42 +185,54 @@ def _get_nominal_parameter_values(self) -> jnp.ndarray: def parameter_ids(self) -> list[str]: """ Parameter ids that are estimated in the PEtab problem. Same ordering as values in :attr:`parameters`. + :return: PEtab parameter ids """ - return self.petab_problem.parameter_df[ - self.petab_problem.parameter_df[petab.ESTIMATE] == 1 + return self._petab_problem.parameter_df[ + self._petab_problem.parameter_df[petab.ESTIMATE] == 1 ].index.tolist() def get_petab_parameter_by_id(self, name: str) -> jnp.float_: """ Get the value of a PEtab parameter by name. + :param name: - PEtab parameter id + PEtab parameter id, as returned by :attr:`parameter_ids`. :return: Value of the parameter """ return self.parameters[self.parameter_ids.index(name)] - def _unscale_p( - self, p: jnp.ndarray, pscale: tuple[str, ...] - ) -> jnp.ndarray: + def _unscale( + self, p: jt.Float[jt.Array, "np"], scales: tuple[str, ...] + ) -> jt.Float[jt.Array, "np"]: """ Unscaling of parameters. :param p: Parameter values - :param pscale: - Parameter scaling + :param scales: + Parameter scalings :return: Unscaled parameter values """ return jnp.array( - [jax_unscale(pval, scale) for pval, scale in zip(p, pscale)] + [jax_unscale(pval, scale) for pval, scale in zip(p, scales)] ) - def load_parameters(self, simulation_condition) -> jnp.ndarray: - mapping = self.parameter_mappings[simulation_condition] + def load_parameters( + self, simulation_condition: str + ) -> jt.Float[jt.Array, "np"]: + """ + Load parameters for a simulation condition. + + :param simulation_condition: + Simulation condition to load parameters for. + :return: + Parameters for the simulation condition. + """ + mapping = self._parameter_mappings[simulation_condition] p = jnp.array( [ pval @@ -221,16 +247,31 @@ def load_parameters(self, simulation_condition) -> jnp.ndarray: for pname in self.model.parameter_ids ] ) - return self._unscale_p(p, pscale) + return self._unscale(p, pscale) def run_simulation( self, simulation_condition: tuple[str], solver: diffrax.AbstractSolver, controller: diffrax.AbstractStepSizeController, - max_steps: int, - ): - ts_preeq, ts_dyn, ts_posteq, my, iys = self.measurements[ + max_steps: jnp.int_, + ) -> tuple[jnp.float_, dict]: + """ + Run a simulation for a given simulation condition. + + :param simulation_condition: + Tuple of simulation conditions to run the simulation for. can be a single string (simulation only) or a + tuple of strings (pre-equilibration followed by simulation). + :param solver: + ODE solver to use for simulation + :param controller: + Step size controller to use for simulation + :param max_steps: + Maximum number of steps to take during simulation + :return: + Tuple of log-likelihood and simulation statistics + """ + ts_preeq, ts_dyn, ts_posteq, my, iys = self._measurements[ simulation_condition ] p = self.load_parameters(simulation_condition[0]) @@ -256,7 +297,7 @@ def run_simulation( def run_simulations( problem: JAXProblem, - simulation_conditions: Iterable[tuple] = None, + simulation_conditions: Iterable[tuple], solver: diffrax.AbstractSolver = diffrax.Kvaerno5(), controller: diffrax.AbstractStepSizeController = diffrax.PIDController( rtol=1e-8, @@ -267,6 +308,22 @@ def run_simulations( ), max_steps: int = 2**14, ): + """ + Run simulations for a problem. + + :param problem: + Problem to run simulations for. + :param simulation_conditions: + Simulation conditions to run simulations for. + :param solver: + ODE solver to use for simulation. + :param controller: + Step size controller to use for simulation. + :param max_steps: + Maximum number of steps to take during simulation. + :return: + Overall negative log-likelihood and condition specific results and statistics. + """ results = { sc: problem.run_simulation(sc, solver, controller, max_steps) for sc in simulation_conditions diff --git a/python/sdist/pyproject.toml b/python/sdist/pyproject.toml index e75d4c6df6..0635aec0aa 100644 --- a/python/sdist/pyproject.toml +++ b/python/sdist/pyproject.toml @@ -85,6 +85,7 @@ jax = [ "jax>=0.4.34", "jaxlib>=0.4.34", "diffrax>=0.6.0", + "jaxtyping>=0.2.34", "equinox>=0.11.8", "optimistix>=0.0.9", "interpax>=0.3.3", diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 543f8f0544..0e1c48eb34 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -14,6 +14,8 @@ pysb = pytest.importorskip("pysb") +jax.config.update("jax_enable_x64", True) + def test_conversion(): pysb.SelfExporter.cleanup() # reset pysb diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 8fb5e17851..c25356ed33 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -15,6 +15,7 @@ import pandas as pd import petab.v1 as petab import pytest +import jax from amici.petab.petab_import import import_petab_problem import benchmark_models_petab from collections import defaultdict @@ -39,6 +40,8 @@ from amici.jax.petab import run_simulations, JAXProblem from petab.v1.visualize import plot_problem +jax.config.update("jax_enable_x64", True) + # Enable various debug output debug = False From d94714bce578e475b0104bf58c23bb1293a98ab2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 17 Nov 2024 12:58:12 +0000 Subject: [PATCH 65/80] add runtime typechecks to jax tests --- python/sdist/amici/jax.template.py | 2 +- python/sdist/amici/jax/model.py | 8 ++++---- python/sdist/pyproject.toml | 3 ++- python/tests/test_jax.py | 3 ++- tests/benchmark-models/test_petab_benchmark.py | 3 ++- 5 files changed, 11 insertions(+), 8 deletions(-) diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index b9b37c8402..67a9decf07 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -65,7 +65,7 @@ def _tcl(self, x, pk): return TPL_TOTAL_CL_RET - def y(self, t, x, pk, tcl): + def _y(self, t, x, pk, tcl): TPL_X_SYMS = x TPL_PK_SYMS = pk diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index 2534728a96..22f994229d 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -410,16 +410,16 @@ def _sigmays( def simulate_condition( self, p: jt.Float[jt.Array, "np"], - p_preeq: jt.Float[jt.Array, "?np"], + p_preeq: jt.Float[jt.Array, "*np"], ts_preeq: jt.Float[jt.Array, "nt_preeq"], ts_dyn: jt.Float[jt.Array, "nt_dyn"], ts_posteq: jt.Float[jt.Array, "nt_posteq"], - my: jt.Float[jt.Array, "nt_preeq+nt_dyn+nt_posteq"], - iys: jt.Float[jt.Array, "nt_preeq+nt_dyn+nt_posteq"], + my: jt.Float[jt.Array, "nt"], + iys: jt.Int[jt.Array, "nt"], solver: diffrax.AbstractSolver, controller: diffrax.AbstractStepSizeController, adjoint: diffrax.AbstractAdjoint, - max_steps: jnp.int_, + max_steps: int | jnp.int_, ret: str = "llh", ): r""" diff --git a/python/sdist/pyproject.toml b/python/sdist/pyproject.toml index 0635aec0aa..c2a20fd0f2 100644 --- a/python/sdist/pyproject.toml +++ b/python/sdist/pyproject.toml @@ -71,7 +71,8 @@ test = [ # unsupported x86_64 / x86_64h "antimony!=2.14; platform_system=='Darwin' and platform_machine in 'x86_64h'", "scipy", - "pooch" + "pooch", + "beartype", ] vis = [ "matplotlib", diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 0e1c48eb34..d66f258e24 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -8,6 +8,7 @@ import jax import diffrax import numpy as np +from beartype import beartype from amici.pysb_import import pysb2amici from numpy.testing import assert_allclose @@ -158,7 +159,7 @@ def check_fields_jax( diffrax.RecursiveCheckpointAdjoint(), # adjoint 2**8, # max_steps ) - fun = jax_model.simulate_condition + fun = beartype(jax_model.simulate_condition) for output in ["llh", "x0", "x", "y", "res"]: oargs = (*args[:-2], diffrax.DirectAdjoint(), 2**8, output) diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index c25356ed33..132402f3c8 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -39,6 +39,7 @@ ) from amici.jax.petab import run_simulations, JAXProblem from petab.v1.visualize import plot_problem +from beartype import beartype jax.config.update("jax_enable_x64", True) @@ -354,7 +355,7 @@ def test_jax_llh(benchmark_problem): eqx.filter_value_and_grad(run_simulations, has_aux=True) )(jax_problem, simulation_conditions) else: - llh_jax, _ = eqx.filter_jit(run_simulations)( + llh_jax, _ = beartype(eqx.filter_jit(run_simulations))( jax_problem, simulation_conditions ) From 0a9fcdf5c61ae58930af4748c39a22de8153671c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 17 Nov 2024 13:01:52 +0000 Subject: [PATCH 66/80] add coverage from benchmark tests --- .../test_benchmark_collection_models.yml | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test_benchmark_collection_models.yml b/.github/workflows/test_benchmark_collection_models.yml index dd520de16d..201ae88da8 100644 --- a/.github/workflows/test_benchmark_collection_models.yml +++ b/.github/workflows/test_benchmark_collection_models.yml @@ -66,7 +66,21 @@ jobs: env: AMICI_PARALLEL_COMPILE: "" run: | - cd tests/benchmark-models && pytest --durations=10 + cd tests/benchmark-models && pytest \ + --durations=10 + --cov=amici \ + --cov-report=xml:"coverage_py.xml" \ + --cov-append \ + + - name: Codecov Python + if: github.event_name == 'pull_request' || github.repository_owner == 'AMICI-dev' + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: coverage_py.xml + flags: python + fail_ci_if_error: true + verbose: true # collect & upload results - name: Aggregate results From 186805c8f3d891ea7fa621e1b4b7336cce39d3f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Sun, 17 Nov 2024 20:23:55 +0000 Subject: [PATCH 67/80] add api versioning and reenable jit compilation --- python/sdist/amici/de_export.py | 3 +++ python/sdist/amici/jax.template.py | 2 ++ python/sdist/amici/jax/model.py | 12 +++++++++++- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index 823f5f8ca1..1bace90510 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -278,6 +278,8 @@ def _prepare_model_folder(self) -> None: @log_execution_time("generating jax code", logger) def _generate_jax_code(self) -> None: + from amici.jax.model import JAXModel + eq_names = ( "xdot", "w", @@ -360,6 +362,7 @@ def jnp_array_str(array) -> str: }, **{ "MODEL_NAME": self.model_name, + "MODEL_API_VERSION": f"'{JAXModel.MODEL_API_VERSION}'", }, } os.makedirs( diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index 67a9decf07..05d82288d5 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -5,6 +5,8 @@ class JAXModel_TPL_MODEL_NAME(JAXModel): + api_version = TPL_MODEL_API_VERSION + def __init__(self): super().__init__() diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index 22f994229d..9335d1a0a7 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -18,6 +18,16 @@ class JAXModel(eqx.Module): classes inheriting from JAXModel. """ + MODEL_API_VERSION = "0.0.1" + api_version: str + + def __init__(self): + if self.api_version != self.MODEL_API_VERSION: + raise ValueError( + "JAXModel API version mismatch, please regenerate the model class." + ) + super().__init__() + @abstractmethod def _xdot( self, @@ -406,7 +416,7 @@ def _sigmays( in_axes=(0, 0, None, None, 0), )(ts, xs, p, tcl, iys) - # @eqx.filter_jit + @eqx.filter_jit def simulate_condition( self, p: jt.Float[jt.Array, "np"], From 250f9dd4f618407965f63cd7cbc276ff18247dec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 18 Nov 2024 11:13:50 +0000 Subject: [PATCH 68/80] review comments --- documentation/conf.py | 1 + documentation/python_modules.rst | 1 + python/sdist/amici/de_export.py | 49 ++++++++++++------- python/sdist/amici/jax/__init__.py | 1 + python/sdist/amici/jax/model.py | 12 +++-- python/sdist/amici/jax/petab.py | 16 +++--- .../benchmark-models/test_petab_benchmark.py | 16 +++--- 7 files changed, 55 insertions(+), 41 deletions(-) diff --git a/documentation/conf.py b/documentation/conf.py index c86a145f9d..4445c62069 100644 --- a/documentation/conf.py +++ b/documentation/conf.py @@ -206,6 +206,7 @@ def install_doxygen(): "numpy": ("https://numpy.org/devdocs/", None), "sympy": ("https://docs.sympy.org/latest/", None), "python": ("https://docs.python.org/3", None), + "jax": ["https://jax.readthedocs.io/en/latest/", None], } # Add notebooks prolog with binder links diff --git a/documentation/python_modules.rst b/documentation/python_modules.rst index 2607447f0d..096dd0735f 100644 --- a/documentation/python_modules.rst +++ b/documentation/python_modules.rst @@ -25,6 +25,7 @@ AMICI Python API amici.petab_objective amici.petab_simulate amici.import_utils + amici.jax amici.de_export amici.de_model amici.de_model_components diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index 1bace90510..4865851265 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -21,6 +21,7 @@ TYPE_CHECKING, Literal, ) +from itertools import chain import sympy as sp @@ -300,30 +301,38 @@ def jnp_array_str(array) -> str: return f"jnp.array([{elems}])" + # replaces Heaviside variables with corresponding functions + subs_heaviside = dict( + zip( + self.model.sym("h"), + [sp.Heaviside(x) for x in self.model.eq("root")], + strict=True, + ) + ) + # replaces observables with a generic my variable + subs_observables = dict( + zip( + self.model.sym("my"), + [sp.Symbol("my")] * len(self.model.sym("my")), + strict=True, + ) + ) + tpl_data = { + # assign named variable using corresponding algebraic formula (function body) **{ f"{eq_name.upper()}_EQ": "\n".join( self._code_printer_jax._get_sym_lines( (str(strip_pysb(s)) for s in self.model.sym(eq_name)), self.model.eq(eq_name).subs( - dict( - zip( - list(self.model.sym("h")) - + list(self.model.sym("my")), - [ - sp.Heaviside(x) - for x in self.model.eq("root") - ] - + [sp.Symbol("my")] - * len(self.model.sym("my")), - ) - ) + {**subs_heaviside, **subs_observables} ), indent, ) - )[indent:] + )[indent:] # remove indent for first line for eq_name in eq_names }, + # create jax array from concatenation of named variables **{ f"{eq_name.upper()}_RET": jnp_array_str( strip_pysb(s) for s in self.model.sym(eq_name) @@ -332,6 +341,7 @@ def jnp_array_str(array) -> str: else "jnp.array([])" for eq_name in eq_names }, + # assign named variables from a jax array **{ f"{sym_name.upper()}_SYMS": "".join( str(strip_pysb(s)) + ", " for s in self.model.sym(sym_name) @@ -340,6 +350,7 @@ def jnp_array_str(array) -> str: else "_" for sym_name in sym_names }, + # tuple of variable names (ids as they are unique) **{ f"{sym_name.upper()}_IDS": "".join( f'"{strip_pysb(s)}", ' for s in self.model.sym(sym_name) @@ -349,19 +360,19 @@ def jnp_array_str(array) -> str: for sym_name in ("p", "k", "y", "x") }, **{ + # in jax model we do not need to distinguish between p (parameters) and + # k (fixed parameters) so we use a single variable combining both "PK_SYMS": "".join( str(strip_pysb(s)) + ", " - for s in list(self.model.sym("p")) - + list(self.model.sym("k")) + for s in chain(self.model.sym("p"), self.model.sym("k")) ), "PK_IDS": "".join( f'"{strip_pysb(s)}", ' - for s in list(self.model.sym("p")) - + list(self.model.sym("k")) + for s in chain(self.model.sym("p"), self.model.sym("k")) ), - }, - **{ "MODEL_NAME": self.model_name, + # keep track of the API version that the model was generated with so we + # can flag conflicts in the future "MODEL_API_VERSION": f"'{JAXModel.MODEL_API_VERSION}'", }, } diff --git a/python/sdist/amici/jax/__init__.py b/python/sdist/amici/jax/__init__.py index e69de29bb2..7f8575e88e 100644 --- a/python/sdist/amici/jax/__init__.py +++ b/python/sdist/amici/jax/__init__.py @@ -0,0 +1 @@ +"""Interface to facilitate AMICI generated models using JAX""" diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index 9335d1a0a7..ceeea8d817 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -173,6 +173,7 @@ def _llh( ) -> jt.Float[jt.Scalar, ""]: """ Compute the log-likelihood of the observable for the specified observable index. + :param t: time point :param x: @@ -430,10 +431,11 @@ def simulate_condition( controller: diffrax.AbstractStepSizeController, adjoint: diffrax.AbstractAdjoint, max_steps: int | jnp.int_, - ret: str = "llh", + ret: str = "nllh", ): r""" Simulate a condition. + :param p: parameters for simulation ordered according to ids in :ivar parameter_ids: :param p_preeq: @@ -464,8 +466,8 @@ def simulate_condition( maximum number of solver steps :param ret: which output to return. Valid values are - - `llh`: negative log-likelihood (default) - - `llhs`: negative log-likelihoods at each time point + - `nllh`: negative log-likelihood (default) + - `llhs`: log-likelihoods at each time point - `x0`: full initial state vector (after pre-equilibration) - `x0_solver`: reduced initial state vector (after pre-equilibration) - `x`: full state vector @@ -532,9 +534,9 @@ def simulate_condition( x = jnp.concatenate((x_preq, x_dyn, x_posteq), axis=0) llhs = self._llhs(ts, x, p, tcl, my, iys) - llh = -jnp.sum(llhs) + nllh = -jnp.sum(llhs) return { - "llh": llh, + "nllh": nllh, "llhs": llhs, "x": self._x_rdatas(x, tcl), "x_solver": x, diff --git a/python/sdist/amici/jax/petab.py b/python/sdist/amici/jax/petab.py index aae83f410c..b1ee96e167 100644 --- a/python/sdist/amici/jax/petab.py +++ b/python/sdist/amici/jax/petab.py @@ -108,7 +108,7 @@ def _get_parameter_mappings( for sim_var, value in mapping.map_sim_var.items(): if isinstance(value, Number) and not np.isfinite(value): mapping.map_sim_var[sim_var] = 1.0 - return dict(zip(scs, mappings)) + return dict(zip(scs, mappings, strict=True)) def _get_measurements( self, simulation_conditions: pd.DataFrame @@ -117,7 +117,7 @@ def _get_measurements( tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray], ]: """ - Set measurements for the model based on the provided simulation conditions. + Get measurements for the model based on the provided simulation conditions. :param simulation_conditions: Simulation conditions to create parameter mappings for. Same format as returned by @@ -156,17 +156,13 @@ def _get_measurements( ) return measurements - def _get_nominal_parameter_values(self) -> jnp.ndarray: + def _get_nominal_parameter_values(self) -> jt.Float[jt.Array, "np"]: """ - Set the nominal parameter values for the model based on the nominal values in the PEtab problem. + Get the nominal parameter values for the model based on the nominal values in the PEtab problem. :return: - JAXModel instance with parameter values set to the nominal values. + jax array with nominal parameter values """ - if self._petab_problem is None: - raise ValueError( - "PEtab problem not set, cannot set nominal values." - ) return jnp.array( [ petab.scale( @@ -306,7 +302,7 @@ def run_simulations( icoeff=0.3, dcoeff=0.0, ), - max_steps: int = 2**14, + max_steps: int = 2**10, ): """ Run simulations for a problem. diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index 132402f3c8..7a0afc6832 100644 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -9,13 +9,10 @@ from pathlib import Path import fiddy import amici -import equinox as eqx -import jax.numpy as jnp import numpy as np import pandas as pd import petab.v1 as petab import pytest -import jax from amici.petab.petab_import import import_petab_problem import benchmark_models_petab from collections import defaultdict @@ -37,11 +34,8 @@ rdatas_to_measurement_df, simulate_petab, ) -from amici.jax.petab import run_simulations, JAXProblem -from petab.v1.visualize import plot_problem -from beartype import beartype -jax.config.update("jax_enable_x64", True) +from petab.v1.visualize import plot_problem # Enable various debug output @@ -267,6 +261,14 @@ def benchmark_problem(request): "ignore:Adjoint sensitivity analysis for models with discontinuous ", ) def test_jax_llh(benchmark_problem): + import jax + import equinox as eqx + import jax.numpy as jnp + from amici.jax.petab import run_simulations, JAXProblem + + jax.config.update("jax_enable_x64", True) + from beartype import beartype + problem_id, petab_problem, amici_model = benchmark_problem if problem_id in ( From dc4992e888baba81d10d7d8629162fcedf793636 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 18 Nov 2024 14:01:47 +0000 Subject: [PATCH 69/80] use temporary directories --- python/sdist/amici/jaxcodeprinter.py | 2 ++ python/tests/test_jax.py | 53 ++++++++++++++-------------- 2 files changed, 29 insertions(+), 26 deletions(-) diff --git a/python/sdist/amici/jaxcodeprinter.py b/python/sdist/amici/jaxcodeprinter.py index f2d5b29248..ed9181cc09 100644 --- a/python/sdist/amici/jaxcodeprinter.py +++ b/python/sdist/amici/jaxcodeprinter.py @@ -2,6 +2,7 @@ import re from collections.abc import Iterable +from logging import warning import sympy as sp from sympy.printing.numpy import NumPyPrinter @@ -22,6 +23,7 @@ def doprint(self, expr: sp.Expr, assign_to: str | None = None) -> str: ) from e def _print_AmiciSpline(self, expr: sp.Expr) -> str: + warning("Spline interpolation is support in JAX is untested") # FIXME: untested, where are spline nodes coming from anyways? return f'interp1d(time, {self.doprint(expr.args[2:])}, kind="cubic")' diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index d66f258e24..d124a6e1be 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -11,6 +11,7 @@ from beartype import beartype from amici.pysb_import import pysb2amici +from amici.testing import TemporaryDirectoryWinSafe from numpy.testing import assert_allclose pysb = pytest.importorskip("pysb") @@ -28,17 +29,17 @@ def test_conversion(): pysb.Rule("conv", a(s="a") >> a(s="b"), pysb.Parameter("kcat", 0.05)) pysb.Observable("ab", a(s="b")) - outdir = model.name - pysb2amici(model, outdir, verbose=True, observables=["ab"]) + with TemporaryDirectoryWinSafe(prefix=model.name) as outdir: + pysb2amici(model, outdir, verbose=True, observables=["ab"]) - model_module = amici.import_model_module( - module_name=model.name, module_path=outdir - ) + model_module = amici.import_model_module( + module_name=model.name, module_path=outdir + ) - ts = tuple(np.linspace(0, 1, 10)) - p = jnp.stack((1.0, 0.1), axis=-1) - k = tuple() - _test_model(model_module, ts, p, k) + ts = tuple(np.linspace(0, 1, 10)) + p = jnp.stack((1.0, 0.1), axis=-1) + k = tuple() + _test_model(model_module, ts, p, k) @pytest.mark.filterwarnings( @@ -74,23 +75,23 @@ def test_dimerization(): pysb.Observable("a_obs", a()) pysb.Observable("b_obs", b()) - outdir = model.name - pysb2amici( - model, - outdir, - verbose=True, - observables=["a_obs", "b_obs"], - constant_parameters=["ksyn_a", "ksyn_b"], - ) - - model_module = amici.import_model_module( - module_name=model.name, module_path=outdir - ) - - ts = tuple(np.linspace(0, 1, 10)) - p = jnp.stack((5, 0.5, 0.5, 0.5), axis=-1) - k = (0.5, 5) - _test_model(model_module, ts, p, k) + with TemporaryDirectoryWinSafe(prefix=model.name) as outdir: + pysb2amici( + model, + outdir, + verbose=True, + observables=["a_obs", "b_obs"], + constant_parameters=["ksyn_a", "ksyn_b"], + ) + + model_module = amici.import_model_module( + module_name=model.name, module_path=outdir + ) + + ts = tuple(np.linspace(0, 1, 10)) + p = jnp.stack((5, 0.5, 0.5, 0.5), axis=-1) + k = (0.5, 5) + _test_model(model_module, ts, p, k) def _test_model(model_module, ts, p, k): From d547509d0fee609eb8eed97e850266b61632c8dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 18 Nov 2024 15:58:02 +0000 Subject: [PATCH 70/80] fix doc --- python/sdist/amici/jax/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/python/sdist/amici/jax/__init__.py b/python/sdist/amici/jax/__init__.py index 7f8575e88e..e14d231e1e 100644 --- a/python/sdist/amici/jax/__init__.py +++ b/python/sdist/amici/jax/__init__.py @@ -1 +1,6 @@ """Interface to facilitate AMICI generated models using JAX""" + +from amici.jax.petab import JAXProblem, run_simulations +from amici.jax.model import JAXModel + +__all__ = ["JAXModel", "JAXProblem", "run_simulations"] From 82bfe311f7bbc47b42d19cbb6bbfa80245cdf7e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 18 Nov 2024 15:58:14 +0000 Subject: [PATCH 71/80] Update test_jax.py --- python/tests/test_jax.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index d124a6e1be..1ccd388257 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -162,7 +162,7 @@ def check_fields_jax( ) fun = beartype(jax_model.simulate_condition) - for output in ["llh", "x0", "x", "y", "res"]: + for output in ["nllh", "x0", "x", "y", "res"]: oargs = (*args[:-2], diffrax.DirectAdjoint(), 2**8, output) if sensi_order == amici.SensitivityOrder.none: r_jax[output] = fun(p, *oargs)[0] From a0108034f79935beeb84b6c5a397c4ad3935de35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Mon, 18 Nov 2024 15:59:24 +0000 Subject: [PATCH 72/80] don't generate code if jax/diffrax not available --- python/sdist/amici/de_export.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/python/sdist/amici/de_export.py b/python/sdist/amici/de_export.py index 4865851265..416dec5694 100644 --- a/python/sdist/amici/de_export.py +++ b/python/sdist/amici/de_export.py @@ -279,7 +279,13 @@ def _prepare_model_folder(self) -> None: @log_execution_time("generating jax code", logger) def _generate_jax_code(self) -> None: - from amici.jax.model import JAXModel + try: + from amici.jax.model import JAXModel + except ImportError: + logger.warning( + "Could not import JAXModel. JAX code will not be generated." + ) + return eq_names = ( "xdot", From f7c2c10e4424417948c688e6ea8e99a2bb18fa18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 09:54:45 +0000 Subject: [PATCH 73/80] add example --- documentation/ExampleJaxPEtab.ipynb | 1 + documentation/python_examples.rst | 1 + .../example_jax_petab/ExampleJaxPEtab.ipynb | 1171 +++++++++++++++++ python/sdist/amici/jax.template.py | 2 +- python/sdist/amici/jax/model.py | 29 +- python/sdist/amici/jax/petab.py | 24 +- 6 files changed, 1210 insertions(+), 18 deletions(-) create mode 120000 documentation/ExampleJaxPEtab.ipynb create mode 100644 python/examples/example_jax_petab/ExampleJaxPEtab.ipynb diff --git a/documentation/ExampleJaxPEtab.ipynb b/documentation/ExampleJaxPEtab.ipynb new file mode 120000 index 0000000000..b3f3b4e18e --- /dev/null +++ b/documentation/ExampleJaxPEtab.ipynb @@ -0,0 +1 @@ +./python/examples/example_jax_petab/ExampleJaxPEtab.ipynb \ No newline at end of file diff --git a/documentation/python_examples.rst b/documentation/python_examples.rst index 286ebf3ffd..fd1163690e 100644 --- a/documentation/python_examples.rst +++ b/documentation/python_examples.rst @@ -17,5 +17,6 @@ Various example notebooks. example_errors.ipynb example_large_models/example_performance_optimization.ipynb ExampleJax.ipynb + ExampleJaxPEtab.ipynb ExampleSplines.ipynb ExampleSplinesSwameye2003.ipynb diff --git a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb new file mode 100644 index 0000000000..3515567706 --- /dev/null +++ b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb @@ -0,0 +1,1171 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d4d2bc5c", + "metadata": {}, + "source": [ + "# Simulating AMICI models using JAX\n", + "\n", + "## Overview\n", + "\n", + "This guide demonstrates how to use AMICI to export models in a format compatible with the [JAX](https://jax.readthedocs.io/en/latest/) ecosystem, enabling simulations with the [diffrax](https://docs.kidger.site/diffrax/) library. " + ] + }, + { + "cell_type": "markdown", + "id": "fb2fe897", + "metadata": {}, + "source": [ + "## Preparation\n", + "\n", + "To begin, we will import a model using [PEtab](https://petab.readthedocs.io). For this demonstration, we will utilize the [Benchmark Collection](https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab), which provides a diverse set of models. For more information on importing PEtab models, refer to the corresponding [PEtab notebook](https://amici.readthedocs.io/en/latest/petab.html).\n", + "\n", + "In this tutorial, we will import the Böhm model from the Benchmark Collection. Using [amici.petab_import](https://amici.readthedocs.io/en/latest/generated/amici.petab_import.html#amici.petab_import.import_petab_problem), we will load the PEtab problem. To create a [JAXModel](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel) instead of a standard AMICI model, we set the `jax` parameter to `True`. As we won't use the corresponding AMICI model, we set the `compile_` to False.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "6ada3fb8", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:50:53.712145Z", + "start_time": "2024-11-19T09:50:47.191184Z" + } + }, + "outputs": [], + "source": [ + "from amici.petab.petab_import import import_petab_problem\n", + "import petab.v1 as petab\n", + "\n", + "# Define the model name and YAML file location\n", + "model_name = \"Boehm_JProteomeRes2014\"\n", + "yaml_url = (\n", + " f\"https://raw.githubusercontent.com/Benchmarking-Initiative/Benchmark-Models-PEtab/\"\n", + " f\"master/Benchmark-Models/{model_name}/{model_name}.yaml\"\n", + ")\n", + "\n", + "# Load the PEtab problem from the YAML file\n", + "petab_problem = petab.Problem.from_yaml(yaml_url)\n", + "\n", + "# Import the PEtab problem as a JAX-compatible AMICI model\n", + "jax_model = import_petab_problem(\n", + " petab_problem,\n", + " compile_=False, # do not compile regular amici model\n", + " verbose=False, # no text output\n", + " jax=True, # return jax model\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "5258566d99c89ba4", + "metadata": {}, + "source": [ + "## Simulation\n", + "In principle, we can already use this model for simulation using the [JAXModel](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel) method. However, this approach can be cumbersome as timepoints, data etc need to be specified manually. Instead, we process the PEtab problem into a [JAXProblem](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXProblem), which enables efficient simulation using [amici.jax.run_simulations]((https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.run_simulations)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "76c1331372cd51b4", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:50:56.042924Z", + "start_time": "2024-11-19T09:50:53.718372Z" + } + }, + "outputs": [], + "source": [ + "from amici.jax import JAXProblem, run_simulations\n", + "\n", + "# Create a JAXProblem from the JAX model and PEtab problem\n", + "jax_problem = JAXProblem(jax_model, petab_problem)\n", + "\n", + "# Run simulations and compute the log-likelihood\n", + "llh, results = run_simulations(jax_problem)" + ] + }, + { + "cell_type": "markdown", + "id": "5f8684d76368bd76", + "metadata": {}, + "source": "This simulates the model for all conditions using the nominal parameter values. Simple, right? Now, let’s take a look at the simulation results." + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "2fc284bd3bfb3a62", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:50:56.141898Z", + "start_time": "2024-11-19T09:50:56.134945Z" + }, + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(Array(nan, dtype=float32),\n", + " {'stats_dyn': {'max_steps': 1024,\n", + " 'num_accepted_steps': Array(778, dtype=int32, weak_type=True),\n", + " 'num_rejected_steps': Array(246, dtype=int32, weak_type=True),\n", + " 'num_steps': Array(1024, dtype=int32, weak_type=True)},\n", + " 'stats_posteq': None,\n", + " 'stats_preeq': None,\n", + " 'ts': Array([ 0. , 0. , 0. , 2.5, 2.5, 2.5, 5. , 5. , 5. ,\n", + " 10. , 10. , 10. , 15. , 15. , 15. , 20. , 20. , 20. ,\n", + " 30. , 30. , 30. , 40. , 40. , 40. , 50. , 50. , 50. ,\n", + " 60. , 60. , 60. , 80. , 80. , 80. , 100. , 100. , 100. ,\n", + " 120. , 120. , 120. , 160. , 160. , 160. , 200. , 200. , 200. ,\n", + " 240. , 240. , 240. ], dtype=float32),\n", + " 'x': Array([[143.8668, 63.7332, 0. , 0. , 0. , 0. ,\n", + " 0. , 0. ],\n", + " [143.8668, 63.7332, 0. , 0. , 0. , 0. ,\n", + " 0. , 0. ],\n", + " [143.8668, 63.7332, 0. , 0. , 0. , 0. ,\n", + " 0. , 0. ],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf],\n", + " [ inf, inf, inf, inf, inf, inf,\n", + " inf, inf]], dtype=float32)})" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Define the simulation condition\n", + "simulation_condition = (\"model1_data1\",)\n", + "\n", + "# Access the results for the specified condition\n", + "results[simulation_condition]" + ] + }, + { + "cell_type": "markdown", + "id": "aa46125e508d38d3", + "metadata": {}, + "source": [ + "Unfortunately, the simulation failed! As seen in the output, the simulation broke down after the initial timepoint, indicated by the `inf` values in the state variables `results[simulation_condition][1].x` and the `nan` likelihood value. A closer inspection of this variable provides additional clues about what might have gone wrong.\n", + "\n", + "The issue stems from using single precision, as indicated by the `float32` dtype of state variables. Single precision is generally a [bad idea](https://docs.kidger.site/diffrax/examples/stiff_ode/) for stiff systems like the Böhm model. Let’s retry the simulation with double precision." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "8e5006774534ba3a", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:50:58.227222Z", + "start_time": "2024-11-19T09:50:56.235939Z" + }, + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{('model1_data1',): (Array(-138.22199834, dtype=float64),\n", + " {'stats_dyn': {'max_steps': 1024,\n", + " 'num_accepted_steps': Array(125, dtype=int64, weak_type=True),\n", + " 'num_rejected_steps': Array(7, dtype=int64, weak_type=True),\n", + " 'num_steps': Array(132, dtype=int64, weak_type=True)},\n", + " 'stats_posteq': None,\n", + " 'stats_preeq': None,\n", + " 'ts': Array([ 0. , 0. , 0. , 2.5, 2.5, 2.5, 5. , 5. , 5. ,\n", + " 10. , 10. , 10. , 15. , 15. , 15. , 20. , 20. , 20. ,\n", + " 30. , 30. , 30. , 40. , 40. , 40. , 50. , 50. , 50. ,\n", + " 60. , 60. , 60. , 80. , 80. , 80. , 100. , 100. , 100. ,\n", + " 120. , 120. , 120. , 160. , 160. , 160. , 200. , 200. , 200. ,\n", + " 240. , 240. , 240. ], dtype=float64),\n", + " 'x': Array([[1.43866806e+02, 6.37332001e+01, 0.00000000e+00, 0.00000000e+00,\n", + " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", + " [1.43866806e+02, 6.37332001e+01, 0.00000000e+00, 0.00000000e+00,\n", + " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", + " [1.43866806e+02, 6.37332001e+01, 0.00000000e+00, 0.00000000e+00,\n", + " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", + " [5.34614747e+01, 2.88662915e+01, 1.73038463e+01, 5.38666098e-05,\n", + " 1.57043241e-05, 1.12989551e+02, 1.44740461e+00, 2.65965680e+01],\n", + " [5.34614747e+01, 2.88662915e+01, 1.73038463e+01, 5.38666098e-05,\n", + " 1.57043241e-05, 1.12989551e+02, 1.44740461e+00, 2.65965680e+01],\n", + " [5.34614747e+01, 2.88662915e+01, 1.73038463e+01, 5.38666098e-05,\n", + " 1.57043241e-05, 1.12989551e+02, 1.44740461e+00, 2.65965680e+01],\n", + " [3.40645243e+01, 1.96396741e+01, 2.10101056e+01, 2.04431389e-05,\n", + " 6.79533169e-06, 1.36155797e+02, 3.93060446e+00, 3.39422194e+01],\n", + " [3.40645243e+01, 1.96396741e+01, 2.10101056e+01, 2.04431389e-05,\n", + " 6.79533169e-06, 1.36155797e+02, 3.93060446e+00, 3.39422194e+01],\n", + " [3.40645243e+01, 1.96396741e+01, 2.10101056e+01, 2.04431389e-05,\n", + " 6.79533169e-06, 1.36155797e+02, 3.93060446e+00, 3.39422194e+01],\n", + " [2.17740069e+01, 1.28936829e+01, 2.26400305e+01, 7.29828626e-06,\n", + " 2.55916689e-06, 1.49922977e+02, 9.56261350e+00, 3.90845534e+01],\n", + " [2.17740069e+01, 1.28936829e+01, 2.26400305e+01, 7.29828626e-06,\n", + " 2.55916689e-06, 1.49922977e+02, 9.56261350e+00, 3.90845534e+01],\n", + " [2.17740069e+01, 1.28936829e+01, 2.26400305e+01, 7.29828626e-06,\n", + " 2.55916689e-06, 1.49922977e+02, 9.56261350e+00, 3.90845534e+01],\n", + " [1.78289538e+01, 1.02603483e+01, 2.23703281e+01, 4.27571773e-06,\n", + " 1.41605997e-06, 1.53605377e+02, 1.53104054e+01, 4.07264964e+01],\n", + " [1.78289538e+01, 1.02603483e+01, 2.23703281e+01, 4.27571773e-06,\n", + " 1.41605997e-06, 1.53605377e+02, 1.53104054e+01, 4.07264964e+01],\n", + " [1.78289538e+01, 1.02603483e+01, 2.23703281e+01, 4.27571773e-06,\n", + " 1.41605997e-06, 1.53605377e+02, 1.53104054e+01, 4.07264964e+01],\n", + " [1.63397301e+01, 8.95194886e+00, 2.15687556e+01, 3.13802765e-06,\n", + " 9.41897178e-07, 1.54369347e+02, 2.09093940e+01, 4.12091821e+01],\n", + " [1.63397301e+01, 8.95194886e+00, 2.15687556e+01, 3.13802765e-06,\n", + " 9.41897178e-07, 1.54369347e+02, 2.09093940e+01, 4.12091821e+01],\n", + " [1.63397301e+01, 8.95194886e+00, 2.15687556e+01, 3.13802765e-06,\n", + " 9.41897178e-07, 1.54369347e+02, 2.09093940e+01, 4.12091821e+01],\n", + " [1.59598663e+01, 7.84978463e+00, 1.95400559e+01, 2.28580865e-06,\n", + " 5.52965361e-07, 1.52878988e+02, 3.13834269e+01, 4.08423997e+01],\n", + " [1.59598663e+01, 7.84978463e+00, 1.95400559e+01, 2.28580865e-06,\n", + " 5.52965361e-07, 1.52878988e+02, 3.13834269e+01, 4.08423997e+01],\n", + " [1.59598663e+01, 7.84978463e+00, 1.95400559e+01, 2.28580865e-06,\n", + " 5.52965361e-07, 1.52878988e+02, 3.13834269e+01, 4.08423997e+01],\n", + " [1.68960409e+01, 7.57954992e+00, 1.74766781e+01, 1.95598628e-06,\n", + " 3.93623013e-07, 1.49923893e+02, 4.08004734e+01, 3.97639408e+01],\n", + " [1.68960409e+01, 7.57954992e+00, 1.74766781e+01, 1.95598628e-06,\n", + " 3.93623013e-07, 1.49923893e+02, 4.08004734e+01, 3.97639408e+01],\n", + " [1.68960409e+01, 7.57954992e+00, 1.74766781e+01, 1.95598628e-06,\n", + " 3.93623013e-07, 1.49923893e+02, 4.08004734e+01, 3.97639408e+01],\n", + " [1.83667585e+01, 7.66955396e+00, 1.55594015e+01, 1.76473276e-06,\n", + " 3.07719966e-07, 1.46418868e+02, 4.91998176e+01, 3.84066930e+01],\n", + " [1.83667585e+01, 7.66955396e+00, 1.55594015e+01, 1.76473276e-06,\n", + " 3.07719966e-07, 1.46418868e+02, 4.91998176e+01, 3.84066930e+01],\n", + " [1.83667585e+01, 7.66955396e+00, 1.55594015e+01, 1.76473276e-06,\n", + " 3.07719966e-07, 1.46418868e+02, 4.91998176e+01, 3.84066930e+01],\n", + " [2.01288255e+01, 7.95104827e+00, 1.38272785e+01, 1.61833093e-06,\n", + " 2.52512177e-07, 1.42637837e+02, 5.66687226e+01, 3.69287741e+01],\n", + " [2.01288255e+01, 7.95104827e+00, 1.38272785e+01, 1.61833093e-06,\n", + " 2.52512177e-07, 1.42637837e+02, 5.66687226e+01, 3.69287741e+01],\n", + " [2.01288255e+01, 7.95104827e+00, 1.38272785e+01, 1.61833093e-06,\n", + " 2.52512177e-07, 1.42637837e+02, 5.66687226e+01, 3.69287741e+01],\n", + " [2.42069672e+01, 8.82343809e+00, 1.09015504e+01, 1.36440625e-06,\n", + " 1.81275253e-07, 1.34584160e+02, 6.91907904e+01, 3.38618223e+01],\n", + " [2.42069672e+01, 8.82343809e+00, 1.09015504e+01, 1.36440625e-06,\n", + " 1.81275253e-07, 1.34584160e+02, 6.91907904e+01, 3.38618223e+01],\n", + " [2.42069672e+01, 8.82343809e+00, 1.09015504e+01, 1.36440625e-06,\n", + " 1.81275253e-07, 1.34584160e+02, 6.91907904e+01, 3.38618223e+01],\n", + " [2.88236929e+01, 9.92100237e+00, 8.58815552e+00, 1.12770626e-06,\n", + " 1.33599425e-07, 1.26069389e+02, 7.90544164e+01, 3.08213014e+01],\n", + " [2.88236929e+01, 9.92100237e+00, 8.58815552e+00, 1.12770626e-06,\n", + " 1.33599425e-07, 1.26069389e+02, 7.90544164e+01, 3.08213014e+01],\n", + " [2.88236929e+01, 9.92100237e+00, 8.58815552e+00, 1.12770626e-06,\n", + " 1.33599425e-07, 1.26069389e+02, 7.90544164e+01, 3.08213014e+01],\n", + " [3.38427746e+01, 1.11365012e+01, 6.75633027e+00, 9.06279023e-07,\n", + " 9.81352036e-08, 1.17230823e+02, 8.68156402e+01, 2.78994196e+01],\n", + " [3.38427746e+01, 1.11365012e+01, 6.75633027e+00, 9.06279023e-07,\n", + " 9.81352036e-08, 1.17230823e+02, 8.68156402e+01, 2.78994196e+01],\n", + " [3.38427746e+01, 1.11365012e+01, 6.75633027e+00, 9.06279023e-07,\n", + " 9.81352036e-08, 1.17230823e+02, 8.68156402e+01, 2.78994196e+01],\n", + " [4.45767678e+01, 1.36929100e+01, 4.13936161e+00, 5.34332520e-07,\n", + " 5.04178629e-08, 9.91750041e+01, 9.76743159e+01, 2.25642862e+01],\n", + " [4.45767678e+01, 1.36929100e+01, 4.13936161e+00, 5.34332520e-07,\n", + " 5.04178629e-08, 9.91750041e+01, 9.76743159e+01, 2.25642862e+01],\n", + " [4.45767678e+01, 1.36929100e+01, 4.13936161e+00, 5.34332520e-07,\n", + " 5.04178629e-08, 9.91750041e+01, 9.76743159e+01, 2.25642862e+01],\n", + " [5.53512751e+01, 1.61684905e+01, 2.47997315e+00, 2.79973425e-07,\n", + " 2.38894456e-08, 8.17101310e+01, 1.04245916e+02, 1.80088542e+01],\n", + " [5.53512751e+01, 1.61684905e+01, 2.47997315e+00, 2.79973425e-07,\n", + " 2.38894456e-08, 8.17101310e+01, 1.04245916e+02, 1.80088542e+01],\n", + " [5.53512751e+01, 1.61684905e+01, 2.47997315e+00, 2.79973425e-07,\n", + " 2.38894456e-08, 8.17101310e+01, 1.04245916e+02, 1.80088542e+01],\n", + " [6.52754860e+01, 1.83796881e+01, 1.44531833e+00, 1.32320205e-07,\n", + " 1.04906457e-08, 6.59469727e+01, 1.08115837e+02, 1.42437160e+01],\n", + " [6.52754860e+01, 1.83796881e+01, 1.44531833e+00, 1.32320205e-07,\n", + " 1.04906457e-08, 6.59469727e+01, 1.08115837e+02, 1.42437160e+01],\n", + " [6.52754860e+01, 1.83796881e+01, 1.44531833e+00, 1.32320205e-07,\n", + " 1.04906457e-08, 6.59469727e+01, 1.08115837e+02, 1.42437160e+01]], dtype=float64)})}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import jax\n", + "\n", + "# Enable double precision in JAX\n", + "jax.config.update(\"jax_enable_x64\", True)\n", + "\n", + "# Re-run simulations with double precision\n", + "llh, results = run_simulations(jax_problem)\n", + "\n", + "results" + ] + }, + { + "cell_type": "markdown", + "id": "fea37568206351f7", + "metadata": {}, + "source": "Success! The simulation completed successfully, and we can now plot the resulting state trajectories." + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "95c75d098d3a1822", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:50:58.490052Z", + "start_time": "2024-11-19T09:50:58.305876Z" + }, + "scrolled": true + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsAAAAIjCAYAAAAN/63DAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAD+cUlEQVR4nOzdd1iTZ/fA8W8We4migAouREERRWW4cO9tHa1trVa7tO1rWzut47W1te1bW7Xtr4qzdVTrqKPuraCC4t6CE9yIyApJfn8gKSmooEAY53NdXJInT57nJA+B451zn1thMBgMCCGEEEIIUUYozR2AEEIIIYQQRUkSYCGEEEIIUaZIAiyEEEIIIcoUSYCFEEIIIUSZIgmwEEIIIYQoUyQBFkIIIYQQZYokwEIIIYQQokyRBFgIIYQQQpQpkgALIYQQQogyRRJgIUSJN2XKFOrUqYNerzd3KLmKjY1FoVAwd+7cfD92+/btKBQKtm/fXuBxPU5oaCihoaFFes7CFBQUxJgxY8wdhhCimJAEWAhRoiUmJvL111/z4YcfolRm/kp72mSzpDh9+jT/+c9/CAkJwcrKCoVCQWxsrLnDAiA5OZnx48c/U8KelJTEuHHj6NSpE87Ozo+9nqGhoQwZMuSJx/zwww+ZMWMG8fHxTx2XEKL0kARYCFGizZ49m4yMDAYNGmTuUIpMeHg4P/74I/fv36du3brmDsdEcnIyEyZMeKYE+NatW0ycOJGTJ0/SoEGDAomrZ8+eODg48NNPPxXI8YQQJZskwEKIEm3OnDn06NEDKysrc4dSZHr06EFCQgJHjx7lhRdeMHc4Bc7NzY24uDguXrzIN998UyDHVCqV9OvXj/nz52MwGArkmEKIkksSYCFEiRUTE8ORI0do167dY/cbP348CoWCM2fOMHjwYBwdHXFxcWHs2LEYDAYuX75sHCF0dXXlu+++y3GMGzduMGzYMCpVqoSVlRUNGjRg3rx5OfZLSEhgyJAhODo64uTkxMsvv0xCQkKucZ06dYp+/frh7OyMlZUVjRs35q+//nri83Z2dsbe3v6J++XVr7/+Ss2aNbG2tqZp06bs2rUrxz7p6el8/vnnBAQE4OjoiK2tLS1atGDbtm3GfWJjY3FxcQFgwoQJKBQKFAoF48ePB+DIkSMMGTKEGjVqYGVlhaurK0OHDuX27dsm57K0tMTV1bXAnl+W9u3bc/HiRaKjowv82EKIkkUSYCFEibV3714AGjVqlKf9BwwYgF6v56uvviIwMJBJkyYxdepU2rdvT+XKlfn666+pVasW77//Pjt37jQ+LiUlhdDQUBYsWMALL7zAN998g6OjI0OGDOGHH34w7mcwGOjZsycLFixg8ODBTJo0iStXrvDyyy/niOX48eMEBQVx8uRJPvroI7777jtsbW3p1asXK1aseMZXJu/CwsJ47bXXcHV1ZcqUKTRr1owePXpw+fJlk/0SExOZNWsWoaGhfP3114wfP56bN2/SsWNHY0Lp4uLCzz//DEDv3r1ZsGABCxYsoE+fPgBs2rSJCxcu8MorrzBt2jQGDhzI4sWL6dKlS5GMygYEBACwZ8+eQj+XEKKYMwghRAn12WefGQDD/fv3H7vfuHHjDIBhxIgRxm0ZGRmGKlWqGBQKheGrr74ybr97967B2tra8PLLLxu3TZ061QAYfvvtN+O29PR0Q3BwsMHOzs6QmJhoMBgMhpUrVxoAw5QpU0zO06JFCwNgmDNnjnF727ZtDfXr1zekpqYat+n1ekNISIjBy8vLuG3btm0GwLBt27Zcn9s333xjAAwxMTGPfQ1yk56ebqhYsaLB39/fkJaWZtz+66+/GgBDq1atTJ5H9n0MhszXqlKlSoahQ4cat928edMAGMaNG5fjfMnJyTm2LVq0yAAYdu7cmWuMBw4cyPHaPQsLCwvDG2+8USDHEkKUXDICLIQosW7fvo1arcbOzi5P+7/66qvG71UqFY0bN8ZgMDBs2DDjdicnJ7y9vblw4YJx27p163B1dTWZaKfRaHj77bdJSkpix44dxv3UajVvvPGGyXlGjRplEsedO3fYunUr/fv35/79+9y6dYtbt25x+/ZtOnbsyNmzZ7l69Wr+XoynEBkZyY0bN3j99dexsLAwbs8q4chOpVIZ99Hr9dy5c4eMjAwaN27MwYMH83Q+a2tr4/epqancunWLoKAggDwf41mVK1eOW7duFcm5hBDFl9rcAQghRFHx8PAwue3o6IiVlRUVKlTIsT17XerFixfx8vIytlnLktWB4eLFi8Z/3dzcciTk3t7eJrfPnTuHwWBg7NixjB07NtdYb9y4QeXKlfPx7PIvK24vLy+T7RqNhho1auTYf968eXz33XecOnUKrVZr3F69evU8ne/OnTtMmDCBxYsXc+PGDZP77t27l9/wn4rBYEChUBTJuYQQxZckwEKIEqt8+fJkZGRw//79PE0KU6lUedoGFGpNataCHe+//z4dO3bMdZ9atWoV2vmfxm+//caQIUPo1asXH3zwARUrVkSlUjF58mTOnz+fp2P079+fvXv38sEHH+Dv74+dnR16vZ5OnToV2SImCQkJOf7DI4QoeyQBFkKUWHXq1AEyu0H4+fkV2nk8PT05cuQIer3eZBT41KlTxvuz/t2yZQtJSUkmo8CnT582OV7W6KpGo3liB4vClBX32bNnadOmjXG7VqslJibGpAfvsmXLqFGjBsuXLzcZQR03bpzJMR81unr37l22bNnChAkT+Pzzz43bz549WyDPJS+uXr1Kenp6seudLIQoelIDLIQosYKDg4HMWtbC1KVLF+Lj41myZIlxW0ZGBtOmTcPOzo5WrVoZ98vIyDB2QgDQ6XRMmzbN5HgVK1YkNDSU//u//yMuLi7H+W7evFlIz8RU48aNcXFx4ZdffiE9Pd24fe7cuTlat2WNlGcfGd+3bx/h4eEm+9nY2ADk6fEAU6dOfZankC9RUVEAhISEFNk5hRDFk4wACyFKrBo1alCvXj02b97M0KFDC+08I0aM4P/+7/8YMmQIUVFRVKtWjWXLlrFnzx6mTp1qLL/o3r07zZo146OPPiI2NhYfHx+WL1+ea33rjBkzaN68OfXr12f48OHUqFGD69evEx4ezpUrVzh8+PAj47l3754xqc5q6TV9+nScnJxwcnJi5MiReXpeGo2GSZMm8dprr9GmTRsGDBhATEwMc+bMyVED3K1bN5YvX07v3r3p2rUrMTEx/PLLL/j4+JCUlGTcz9raGh8fH5YsWULt2rVxdnamXr161KtXj5YtWzJlyhS0Wi2VK1dm48aNxMTE5Brb9OnTSUhI4Nq1awCsXr2aK1euADBq1Kgck/SyCw0NZceOHTmS7U2bNuHh4UHDhg3z9PoIIUoxM3agEEKIZ/a///3PYGdnl2uLrSxZbdBu3rxpsv3ll1822Nra5ti/VatWBl9fX5Nt169fN7zyyiuGChUqGCwsLAz169fPtTXX7du3DS+++KLBwcHB4OjoaHjxxRcNhw4dyrWV1/nz5w0vvfSSwdXV1aDRaAyVK1c2dOvWzbBs2TLjPrm1QYuJiTEAuX55eno++sV6hJ9++slQvXp1g6WlpaFx48aGnTt3Glq1amXSBk2v1xu+/PJLg6enp8HS0tLQsGFDw5o1awwvv/xyjnPu3bvXEBAQYLCwsDBpiXblyhVD7969DU5OTgZHR0fDc889Z7h27VqubdM8PT0f+Ryf1PItICDA4OrqarJNp9MZ3NzcDJ999lm+Xx8hROmjMBhkTUghRMl17949atSowZQpU0zamYmy6f79+zg7OzN16lTeeust4/aVK1fy/PPPc/78edzc3MwYoRCiOJAaYCFEiebo6MiYMWP45ptviqyTgCi+du7cSeXKlRk+fLjJ9q+//pqRI0dK8iuEAEBGgIUQohS6c+eOycS2f1OpVLi4uBRhREIIUXxIAiyEEKVQ1kSwR/H09CQ2NrboAhJCiGJEEmAhhCiFoqKiuHv37iPvt7a2plmzZkUYkRBCFB+SAAshhBBCiDJFJsEJIYQQQogyRRbCAPR6PdeuXcPe3v6Ry3gKIYQQQgjzMRgM3L9/H3d3d5Nl6Z+GJMDAtWvXqFq1qrnDEEIIIYQQT3D58mWqVKnyTMeQBBiMy5hevnwZBweHQj+fVqtl48aNdOjQAY1GU+jnE4VPrmnpJNe19JFrWjrJdS19crumiYmJVK1a1Zi3PQuzJsA7d+7km2++ISoqiri4OFasWEGvXr1M9jl58iQffvghO3bsICMjAx8fH/788088PDwASE1N5b333mPx4sWkpaXRsWNHfvrpJypVqpTnOLLKHhwcHIosAbaxscHBwUHeqKWEXNPSSa5r6SPXtHSS61r6PO6aFkS5qlknwT148IAGDRowY8aMXO8/f/48zZs3p06dOmzfvp0jR44wduxYrKysjPv85z//YfXq1SxdupQdO3Zw7do1+vTpU1RPQQghhBBClDBmHQHu3LkznTt3fuT9n376KV26dGHKlCnGbTVr1jR+f+/ePcLCwli4cCFt2rQBYM6cOdStW5eIiAiCgoJyPW5aWhppaWnG24mJiUDm/za0Wu0zPae8yDpHUZxLFA25pqWTXNfSR65p6STXtfTJ7ZoW5PUtNn2AFQqFSQmEXq/H0dGRMWPGsHv3bg4dOkT16tX5+OOPjfts3bqVtm3bcvfuXZycnIzH8vT05N133+U///lPrucaP348EyZMyLF94cKF2NjYFPRTE0IIIYQQzyg5OZnnn3+ee/fuPXPJarGdBHfjxg2SkpL46quvmDRpEl9//TXr16+nT58+bNu2jVatWhEfH4+FhYVJ8gtQqVIl4uPjH3nsjz/+mNGjRxtvZxVVd+jQochqgDdt2kT79u2lVqmUkGtaOsl1LX3kmpZOuV1XnU5HRkYGxWScTzyBQqFApVKhUqlQKBS5XtOsT+wLQrFNgPV6PQA9e/Y0juT6+/uzd+9efvnlF1q1avXUx7a0tMTS0jLHdo1GU6S/EIv6fKLwyTUtneS6lj5yTUunrOualJTElStXJPktgWxsbHBzczO+P7O/VwvyPVtsE+AKFSqgVqvx8fEx2V63bl12794NgKurK+np6SQkJJiMAl+/fh1XV9eiDFcIIYQQxYBOp+PKlSvY2Njg4uIiC1yVEAaDgfT0dG7evElMTAzVqlUr1PMV2wTYwsKCJk2acPr0aZPtZ86cwdPTE4CAgAA0Gg1btmyhb9++AJw+fZpLly4RHBxc5DELIYQQwry0Wi0GgwEXFxesra3NHY7IB2trazQaDRcvXiz0CY1mTYCTkpI4d+6c8XZMTAzR0dE4Ozvj4eHBBx98wIABA2jZsiWtW7dm/fr1rF69mu3btwPg6OjIsGHDGD16NM7Ozjg4ODBq1CiCg4Mf2QFCCCGEEKWfjPyWTFlLHBd2+YpZE+DIyEhat25tvJ01Me3ll19m7ty59O7dm19++YXJkyfz9ttv4+3tzZ9//knz5s2Nj/n+++9RKpX07dvXZCEMIYQQQgghcmPWBDg0NPSJGf7QoUMZOnToI++3srJixowZj1xMQwghhBBCiOzMuhKcEEIIIYQQRU0SYCGEEEKIYuDmzZu88cYbeHh4YGlpiaurKx07duSLL75AoVA89itrftSVK1ewsLCgXr16xuOOHz/+iY9/1H516tTJNdbJkyejUqn45ptvCv11KQySAAshhBBCFAN9+/bl0KFDzJs3jzNnzvDXX38RGhpK/fr1iYuLM37179+fTp06mWwLCQkBYO7cufTv35/ExET27dsHwPvvv2+yb5UqVZg4caLJtiy+vr4m27Naz/7b7NmzGTNmDLNnzy78F6YQFNs2aEIIIYQQz8pgMJCi1Znl3NYaVZ67USQkJLBr1y62b99uXOzL09OTpk2b5jyutTVpaWk51jwwGAzMmTOHn376iSpVqhAWFkZgYCB2dnbY2dkZ91OpVNjb2+e6ZoJarX7iWgo7duwgJSWFiRMnMn/+fPbu3WtMwEsKSYCFEEIIUWqlaHX4fL7BLOc+MbEjNhZ5S7WyktSVK1cSFBSU64q1T7Jt2zaSk5Np164dlStXJiQkhO+//x5bW9s8H+Ps2bO4u7tjZWVFcHAwkydPxsPDw2SfsLAwBg0ahEajYdCgQYSFhZW4BFhKIIQQQgghzEytVjN37lzmzZuHk5MTzZo145NPPuHIkSN5PkZYWBgDBw5EpVJRr149atSowdKlS/P8+MDAQObOncv69ev5+eefiYmJoUWLFty/f9+4T2JiIsuWLWPw4MEADB48mD/++IOkpKS8P9liQEaARZ4l30vg1uWLKFUqKlSthlW2j1OEEEKI4shao+LExI5mO3d+9O3bl65du7Jr1y4iIiL4+++/mTJlCrNmzWLIkCGPfWxCQgLLly83qdkdPHgwYWFhT3xsls6dOxu/9/PzIzAwEE9PT/744w+GDRsGwKJFi6hZsyYNGjQAwN/fH09PT5YsWWLcpySQBFjkoE1L5faVy9y6FMvNS7HcuhTLrcsXSb6XYLKfXTlnKnhUo3xVTyo8/CpfpSoaSyvzBC6EEEL8i0KhyHMZQnFgZWVF+/btad++PWPHjuXVV19l3LhxT0xiFy5cSGpqKoGBgcZtBoMBvV7PmTNnqF27dr5jcXJyonbt2iar9oaFhXH8+HHU6n9eU71ez+zZsyUBFiVHWnIyl45Fc/NiLLcux3Lr0kXuxl+D3BYoUShwquiKTpfB/Vs3Sbp7h6S7d4g9fNB0n0qu/yTED/8t51YZlVp+3IQQQoj88PHxYeXKlU/cLywsjPfeey9Hovzmm28ye/Zsvvrqq3yfOykpifPnz/Piiy8CcPToUSIjI9m+fTvOzs7G/e7cuUNoaCinTp16ZNu04kYykjLsRuwFVkyZSNLtWznus7Z3oIJHNVw8qlHBoxoVPDypUMUTjVXm6G5a8gNuX7nErcsXuXXp4sN/Y0m5n0hCfBwJ8XGcOxBhPJ5Spca5chWTxNjFwxOHChVRKKUUXQghRNl2+/ZtnnvuOYYOHYqfnx/29vZERkYyZcoUevbs+djHRkdHc/DgQX7//fccCeigQYOYOHEikyZNMhm1zc37779P9+7d8fT05Nq1a4wbNw6VSsWgQYOAzCS7adOmtGzZMsdjmzRpQlhYWInpCywJcBl14eAB1vwwBW1qCvblXfCo55eZnHpUw8WzOjaOTo9t3WJpY4t77bq4165rsj2rTjgrIc78/hLa1JTM25diTfbXWFpRvqqHMTF28axO5To+qNSawnjaQgghRLFkZ2dHYGAg33//PefPn0er1VK1alWGDx/OJ5988tjHhoWF4ePjk+voa+/evRk5ciTr1q2jR48ejz3OlStXGDRoELdv38bFxYXmzZsTERGBi4sL6enp/Pbbb3z44Ye5PrZv37589913fPnll2g0xf9vuCTAZdDBv1ezfd5MDAY9HvX86P6fTwpsQpuNoxMejk541Gtg3GYwGLh/62aOxPjO1cto01KJP3eG+HNnjPtb2dpRq2kI3iEt8PD1Q6nK3yQCIYQQoqSxtLRk8uTJTJ48+Yn7zp071+T2tGnTHrmvq6srOp1pH+TY2Nhc9128ePEjj2NhYcGtWzk/Mc4yZswYxowZ88j7ixtJgMsQvU7Htnkzid6wBoB6rTvQ7tU3C702V6FQ4OBSEQeXitRo1MQknrvx17htTIwvcu3MSR4k3OXYto0c27YRawdHagc2o05ISyrX8ZFyCSGEEEI8M0mAy4j0lGTW/DCFmEORALR4fghNevTN8wo1hUGpUlG+clXKV65K7aDmAOj1Oq6ePM7p8F2cidhDSuI9Dm9ax+FN67BzLk/toObUCWmJa63aZo1dCCGEECWXJMBlQOKtm6z8egI3L8WitrCky8j38Aosniu2KJUqqvr6UdXXj9ZDXuPyscOcCt/Fuf3hJN25zcF1qzi4bhUOLpXwDmlBnZCWuHhWl2RYCCGEEHkmCXApF3/+LCunTORBwl1sncrR64OxuNbKfy9Ac1Cp1VTzD6CafwAZr77FxSMHObVnJ+cj95F48zoHVi3jwKpllHOrjHdIS+qEtKB8FY8nH1gIIYQQZZokwKXY2QPhrPvxWzLS06jgUY3eH36OQ4WK5g7rqag1GmoGBFIzIBBtWioxhyI5tXcnMQcjuRt3lYg/FxHx5yIqeFSjTkhLvINb4OTqZu6whRBCCFEMSQJcChkMBiLXrGDn73PAYKCafwDd3vkQSxsbc4dWIDSWVtQOak7toOakpyRzPnIfp/buJPbwIW5dimX3pVh2L55PpRpe1AlpQe3gFjhUcDF32EIIIYQoJiQBLmV0GRlsnf0LR7asB6BBh660GTKi1LYSs7C2oW6L1tRt0ZrUpCTOHtjL6b27uHTsMNcvnOX6hbPs+G027t4+eAe3wDu4ObZO5cwdthBCCCHMSBLgUiZi+ZLM5FehoPVLr9Kwc48yM0HMys6O+q07UL91B5LvJXBm315O793JlVPHuXb6BNdOn2DHgjBqNQ2mQbvOVPWtX2ZeGyGEEEL8QxLgUsRgMHBy9zYA2r/6Fn7tOpk5IvOxcXTCv0MX/Dt04f6dW5wJ38OpPduJP3+WM+G7OBO+i3JulWnQvjM+rdpibWdv7pCFEEIIUUQkAS5Fbl2K5d71eNQaC+o2DzV3OMWGvXMFArr2JKBrT27EXuDI5r85sWs7d+Ousn3+LHYtmod3cAsatO+Mm1cdGRUWQgghSjlZVqsUOXcgAgAPP380VlZmjqZ4qlitBu1efYvXf5lHu1ffwsWzOjqtlhM7t7Jo7AcsGDOK6I3rSEtONneoQgghypibN2/yxhtv4OHhgaWlJa6urnTs2JEvvvgChULx2K/t27cDcOXKFSwsLKhXr57xuOPHj3/i4x+1X506dUxirFatmvE+lUqFu7s7w4YN4+7du0X2OhUEGQEuRbISYK8mwWaOpPizsLahQfvO+LXrRPy5Mxze9Den9+7k5qVYtoT9xM7fZlO3eSh+7TtTqXpNc4crhBCiDOjbty/p6enMmzePGjVqcP36dbZs2YKvry9xcXHG/d555x0SExOZM2eOcZuzszMAc+fOpX///uzcuZN9+/YRGBjI+++/z+uvv27ct0mTJowYMYLhw4fniMHX15fNmzcbb6vVOVPFiRMnMnz4cHQ6HWfOnGHEiBG8/fbbLFiwoEBeh6IgCXApkXjzBjdiz6NQKKkR0NTc4ZQYCoUCNy9v3Ly8CX3pVU7s2srhTX9z5+pljmxZz5Et63GtVZsG7TrjHdICjaWMrAshRIliMIDWTJ/qaWwgj2V1CQkJ7Nq1i+3bt9OqVSsAPD09ado05990a2tr0tLScHV1NdluMBiYM2cOP/30E1WqVCEsLIzAwEDs7Oyws7Mz7qdSqbC3t8/xeMhMeHPbnl32x1auXJmXX36ZRYsW5el5FheSAJcS5w6EA1C5jg82Do5mjqZksrKzo1HnHjTs1J2rJ48TvWkdZ/ftJf7cGeLPnWH7/Fn4tGpDg3adZcU5IYQoKbTJ8KW7ec79yTWwsM3TrllJ6sqVKwkKCsLS0jLfp9u2bRvJycm0a9eOypUrExISwvfff4+tbd5iADh79izu7u5YWVkRHBzM5MmT8fB49N+8q1evsnr1agIDA/MdrzlJDXApkVX+UKtJkJkjKfkUCgVVfOrR7Z0xvPbzXFo8PwTHSq6kJT/g0N+rmfvemywZ/xEn9+wgQ6s1d7hCCCFKAbVazdy5c5k3bx5OTk40a9aMTz75hCNHjuT5GGFhYQwcOBCVSkW9evWoUaMGS5cuzfPjAwMDmTt3LuvXr+fnn38mJiaGFi1acP/+fZP9PvzwQ+zs7LC2tqZKlSooFAr+97//5fk8xYGMAJcCKfcTuXLyOCAJcEGzcXSiac9+NOneh4tHozm86W/OR+3jysljXDl5DGt7B+q1bk/dVm3NHaoQQojcaGwyR2LNde586Nu3L127dmXXrl1ERETw999/M2XKFGbNmsWQIUMe+9iEhASWL1/O7t27jdsGDx5MWFjYEx+bpXPnzsbv/fz8CAwMxNPTkz/++INhw4YZ7/vggw8YMmQIBoOBy5cv88knn9C1a1d27tyJqoQsvCUJcClw4eABDAY9Lp7Vcaz4+Lod8XQUSiXVGjSiWoNG3L9zi2NbN3Fk6waSbt/iwF9/cuCvP7F2rUyMmwtejYNQKOXDFSGEKBYUijyXIRQHVlZWtG/fnvbt2zN27FheffVVxo0b98QkduHChaSmppqUIhgMBvR6PWfOnKF27dr5jsXJyYnatWtz7tw5k+0VKlSgVq1aAHh5eTF16lSCg4PZtm0b7dq1y/d5zEH+SpcCZ/dn1v/K6G/RsHeuQHC/QQyfFkbPD8ZS3T8AFApS4q+y+rsvmTP6dQ5tWEN6aoq5QxVCCFHC+fj48ODBgyfuFxYWxnvvvUd0dLTx6/Dhw7Ro0YLZs2c/1bmTkpI4f/48bm5uj90va9Q3JaXk/N2TEeASTpuWysUjhwCoJe3PipRSpaJW40BqNQ7k1tUrrJr5E8kXz3E37hpbZ//CnsULqN+2Iw07dsPBpaK5wxVCCFGM3b59m+eee46hQ4fi5+eHvb09kZGRTJkyhZ49ez72sdHR0Rw8eJDff/89R9/eQYMGMXHiRCZNmpRrS7Ps3n//fbp3746npyfXrl1j3LhxqFQqBg0aZLLf/fv3iY+PN5ZAjBkzBhcXF0JCQp7uyZuBjACXcLFHDpGRnoaDSyVcPKubO5wyy7FiJSo0DGToj7NoO/QNyrlVJi35AZGrlzNr1Kus/t9krp46gcFgMHeoQgghiiE7OzsCAwP5/vvvadmyJfXq1WPs2LEMHz6c6dOnP/axYWFh+Pj45Eh+AXr37s2NGzdYt27dE2O4cuUKgwYNwtvbm/79+1O+fHkiIiJwcXEx2e/zzz/Hzc0Nd3d3unXrhq2tLRs3bqR8+fL5e9JmJCPAJdz5bN0fZAlf87Owssa/Y1catO9MTHQUUetWceloNGf27eHMvj1UquFFQNee1A5qhkqtMXe4QgghiglLS0smT57M5MmTn7jv3LlzTW5Pmzbtkfu6urqi0+lMtsXGxua67+LFi5947kc9tqSRBLgE0+t0nI/aD0j9b3GjUCqp0agJNRo14dalWA7+/Rcndm3j+oWzrJv2LTt/m41/x27Ub9tR+jYLIYQQRUxKIEqwKyePk5p0Hyt7Byp7+5g7HPEIFTyq0eG1txnx01yaDXgR23LOJN29w+7F85n55its/HUaty5fNHeYQgghRJkhI8BF7O6DdN76PYobt5R06fJsxzoXmdn9oWajpihLSN+9sszGwZGgPgNo0qMPp8N3E7V2JTdiznN0ywaObtmAp19DGnXpQfUGAdJGTQghhChEkgAXMQOw98IdQIle//QTogwGwz+rvzWV7g8liUqtwadFa+o2D+Xq6RMcXLeKc/sjuHjkEBePHKKcexUade6Bb8s2aKyszB2uEEIIUepIAlzENKp/Jqpp9Qbyv9J3phsx57l/6yZqS0s8/fwLJDZRtBQKBVXq+FKlji/3blzn0IY1HN2ygbvXrrAl7Cd2L56HX9tO+HfshkMFlycfUAghhBB5IglwEdOo/vloOz1D/9THOReZOfpbza8RGounTaNFceFYsRKhLw4jpN8gjm3fwqG//yLhehwH/vqTyDUr8ApsRuOuvXDz8jZ3qEIIIUSJJwlwEcueAGt1z5AAZ2t/JkoPC2sbGnXujn/HLsQciuTgulVcOnaEM+G7OBO+iyo+9Wjaox/V/AOk7Z0QQgjxlCQBLmIqpQKVUoFOb3jqBDghPo5bl2IzW20FNC3gCEVxoFSqqBkQSM2AQG5ejCFq7SpO7t7OlRPHuHLiGC6e1WnSsx/eQc1lAqQQQgiRTzLV3Ayy6oC1uqebBHfuQGb3h6o+9bC2sy+wuETx5OJZnU5vvsur02YR0LUXGksrbl6MYd2P3zD73REc2rAGbVqqucMUQgghSgxJgM3A4mEZxNOOAGfV/9ZsLN0fyhL78hUIfelVhv80h2b9B2Pt4Mi9G9fZOvsXZo4cRsSfi0lNSjJ3mEIIIUSxZ9YEeOfOnXTv3h13d3cUCgUrV6585L6vv/46CoWCqVOnmmy/c+cOL7zwAg4ODjg5OTFs2DCSinkSoHmGBDj5XgJXT58EoFaTwAKNS5QM1nb2BPUdyPDpYbQZ+joOLpVISbzHnj9+49c3h7B9/izu375l7jCFEEIUgkWLFqFSqXjrrbee6vFz585FoVAYv+zs7AgICGD58uUFHGnxZtYE+MGDBzRo0IAZM2Y8dr8VK1YQERGBu7t7jvteeOEFjh8/zqZNm1izZg07d+5kxIgRhRVygXiWEojzUfvBYKBSjVo4VKhY0KGJEkRjaUXDjt0Y9sOvdHn7A1w8qqFNSyVq7UpmjXqV9T9P5faVy+YOUwghRAEKCwtjzJgxLFq0iNTUpyt/c3BwIC4ujri4OA4dOkTHjh3p378/p0+fLuBoiy+zJsCdO3dm0qRJ9O7d+5H7XL16lVGjRvH777+j0WhM7jt58iTr169n1qxZBAYG0rx5c6ZNm8bixYu5du1aYYf/1LJGgNOfYgQ4q/63VmPp/iAyKVUq6jZrxYtTptHno/FU8amHXpfB8e2bmfveG6z6dhLXzpwyd5hCCGEWBoOBZG2yWb4MhrwPdIWGhjJy5EhGjhyJo6MjFSpUYOzYsSbHiImJYe/evXz00UfUrl07x6jt3LlzcXJyYuXKlXh5eWFlZUXHjh25fNl0MEShUODq6oqrqyteXl5MmjQJpVLJkSNHnu3FLkGKdRcIvV7Piy++yAcffICvr2+O+8PDw3FycqJx48bGbe3atUOpVLJv375HJtZpaWmkpaUZbycmJgKg1WrRarUF/CxyyhoBTk3L3/nSU1K4eDQagGoNGxdJrCJvsq6Fua9JlXoNqFKvAXHnThO1egUXovZx7kAE5w5EULmOLwHde+Pp10haqOVRcbmuouDINS2dsl9XnU6HwWBAr9ej1+tJ1iYTvNg8c2bCB4Zjo7HJ8/7z5s1j6NChREREEBkZyeuvv06VKlUYPnw4ALNnz6ZLly7Y29vzwgsvEBYWxsCBA42P1+v1JCcn88UXXzB37lwsLCwYOXIkAwcOZNeuXcZ9sv+r0+mYP38+AP7+/sbt5qLX6zEYDGRkZACm79WCfN8W6wT466+/Rq1W8/bbb+d6f3x8PBUrmpYBqNVqnJ2diY+Pf+RxJ0+ezIQJE3Js37hxIzY2ef9BfVqpySpAQfj+SO6eyfv/DpMuXUCn1aKxcyDiyDEUR48XXpDiqWzatMncIRgpvf3wcPXg7snD3I89x9VTx7l66jgWTs6U82mAnUcNFEqZB5sXxem6ioIh17R02rRpE2q1GldXV5KSkkhPTyclI8Vs8dy/f58MdUae9s3IyKBy5cqMHz8ehUJB9+7diYqK4vvvv2fAgAHo9XrmzJnDlClTSExMpEuXLrz//vscPXoUT09PAFJTU9FqtUyePNk4cDht2jQCAwPZtm0bAQEBpKamcu/ePRwcHABISUlBo9EwdepUXFxcjIOC5pKenk5KSgp79+4FTN+rycnJBXaeYpsAR0VF8cMPP3Dw4MECH636+OOPGT16tPF2YmIiVatWpUOHDsYfiMI082I415Lv49fAn3a+bnl+3IafviceqN+yNc27di28AEW+abVaNm3aRPv27XOU6pjf89y/fYvoDWs4tnUD6Ql3uL53Gylnj9Owc098WrVFYymrCeameF9X8TTkmpZO2a+rTqfj8uXL2NnZYWVlhb3BnvCB4WaJy1ptneccRq1WExwcjKOjo3Fbq1atmDFjBra2tmzevJmUlBT69u2LRqPBwcGBdu3asXTpUiZOnAiAlZUVarWa0NBQlA8HOBo3boyTkxOXLl2idevWma+JvT2RkZFAZlK5ZcsWRo8eTeXKlenevXsBvwr5k5qairW1NSEhIezcudPkvVqQyXmxTYB37drFjRs38PDwMG7T6XS89957TJ06ldjYWFxdXblx44bJ4zIyMrhz5w6urq6PPLalpSWWufzB12g0RfIL0UKduXCBXqHM8/l0GVpio6MAqB3YTH5xF1NF9TOUX86ubrR5eTghfQcRvXEtB//+i8SbN9gxfyb7VyyhUeceNOjYVfpKP0Jxva7i6ck1LZ00Gg1KpRKFQoFSqTQmgXYqOzNHljdZcWfJ+l6pVDJnzhzu3LmDra2t8X69Xs/Ro0eZOHGiyfPN/n32Y2X/ql27tvE+f39/Nm3axDfffEPPnj0L8yk+Udb1U6szU9Ts79WCfM8W288/X3zxRY4cOUJ0dLTxy93dnQ8++IANGzYAEBwcTEJCAlFRUcbHbd26Fb1eT2Bg8W0R9jRdIC6fOEZa8gNsHJ1wq+1dWKGJUs7Kzo6gPgMYPmM2bYe+kdlC7X4ie/74jZlvvsL2+TOlhZoQQpjJvn37TG5HRETg5eVFQkICq1atYvHixSZ50aFDh7h79y4bN240PiYjI8M4ugtw+vRpEhISqFu37mPPrVKpSEkxX7lIUTPrCHBSUhLnzp0z3o6JiSE6OhpnZ2c8PDwoX768yf4ajQZXV1e8vTMTwLp169KpUyeGDx/OL7/8glarNRZ759Yyrbh4moUwzh14uPhFQFOUSln6VjwbjYUl/h274teuE6cjdnNg5VJuXoolau0qDq1fS93moTTp0ZfyVaqaO1QhhCgzLl26xOjRo3nttdc4ePAg06ZN47vvvmPBggWUL1+e/v375yip6NKlC2FhYXTq1AnIzJVGjRrFjz/+iFqtZuTIkQQFBdG0aVPjYwwGg3GuVEpKCps2bWLDhg18/vnnRfdkzcysCXBkZCStW7c23s6qy3355ZeZO3duno7x+++/M3LkSNq2bYtSqaRv3778+OOPhRFugcnvQhgGvZ7zD1d/q9VEVn8TBSerhVqdkJbEHj7IgVXLuHziKMd3bOb4js3UbBxE0559ca/9+JEDIYQQz+6ll14iJSWFpk2bolKpeOeddxgxYgQNGjSgd+/eudYT9+3blxdffJFbtzI/vbOxseHDDz/k+eef5+rVq7Ro0YKwsDCTxyQmJuLmljkHydLSEk9PTyZOnMiHH35Y+E+ymDBrAhwaGpqvHnmxsbE5tjk7O7Nw4cICjKrwZZVApOexBCL+wlmS7txGY2WNR70GhRmaKKMUCgXV/QOo7h9A3NnT7F+1jHOREZx/+FWlbj2a9OxLdf/G0kJNCCEKSVY3hp9//tlk++P68/bv35/+/fubbOvTpw99+vTJdf8hQ4YwZMiQZ461pCu2k+BKs/yOAGeVP1T3D0BtYVFocQkB4OblTc/3P+X21ctErl7OiZ3buHLyGFdOHsPFoxpNevTFO6QlSpWU4gghhCiZiu0kuNJMo366BLhWE1n9TRSd8pWr0vH1d3h12iwCuvVGY2XNzUuxrJv+HWHvjODQ+tVo055uGU4hhBDCnCQBNgOLrBKIjCeXQNy5dpU7Vy+jVKmo3rDxE/cXoqDZl69A6IvDGDFjDs0GvIi1gyOJN6+zdc7/MfOtoYT/uYiUpPvmDlMIIUq07du3M3Xq1Gc6xpAhQ0hISCiQeEo7SYDNID8lEHFnTwHgXrsuVrYlo4+hKJ3+3ULNsWJmC7W9f/xubKGWeOumucMUQgghnkhqgM0gPwnwg4S7ADhUcCnUmITIq+wt1M5E7Gb/qmXcvBjzsIXaGuo2by0t1IQQQhRrkgCbQX4Wwki+l5kA2ziVK9SYhMgvpUpFnWat8H5kC7VAmvbsJy3UhBBCFDuSAJtB/kaAEwCwdXQqxIiEeHqPbqG2j/OR+6hcx5fAXs9RzT9AWqgJIYQoFiQBNoP8rASXVQJhKyPAogTIrYXa1VPHWf7VcVxrehHUdxA1GjWRRFgIIYRZySQ4M9Co874QRvK9BEBKIETJYmyhNn0WAV17obawJP78WVZOmchvH73L2QPh+VoERwghhChIkgCbgbEEIkNGgEXpZu9cgdCXXmX49DCa9OiLxtKKG7Hn+evbL1jw4duc2bcHgz5v/bCFEEKIgiIJsBnktQY4Q6sl9WF/VUmARUlm4+hEyxde4dXpYQT27o+FtTU3L8aw+n+TmT9mFKfDd6HX68wdphBCFHuLFi1CpVLx1ltvPdNxUlJScHZ2pkKFCqSlpRVQdCWHJMBmYFwI4wkJcFb5g1Kllh7AolSwcXCk+cCXeHX6bIL6DsTC2oZbly+yZurXzHt/JCd3b5dEWAghHiMsLIwxY8awaNEiUlOffjXOP//8E19fX+rUqcPKlSsLLsASQhJgM/hnBPjxNZDJD8sfbBwdUSjlUonSw9rOnmb9BzN8xmxCnnsBS1tb7ly9zLpp3zJ39Juc2LkVvU4SYSHEszMYDOiTk83ylZ+5DqGhoYwcOZKRI0fi6OhIhQoVGDt2rMkxYmJi2Lt3Lx999BG1a9dm+fLlJseYO3cuTk5OrFy5Ei8vL6ysrOjYsSOXL1/Ocb6wsDAGDx7M4MGDCQsLe/oXuISSLhBmkNcSiAf3pP5XlG5WtnYE9xtEoy49OLR+DVFrV3I37ip/z/gf4csWEdi7P3VbtEalll9VQoinY0hJ4XSjALOc2/tgFAobmzzvP2/ePIYNG8b+/fuJjIxkxIgReHh4MHz4cADmzJlD165dcXR0NCauzz//vMkxkpOT+eKLL5g/fz4WFha8+eabDBw4kD179hj3OX/+POHh4SxfvhyDwcB//vMfLl68iKenZ8E88RJAhhXNIK8LYcgEOFFWWNrYZi6zPD2MFs8PwdregYTrcWz45Qfm/Oc1jmzZgC5Da+4whRCiUFWtWpXvv/8eb29vXnjhBUaNGsX3338PgF6vZ+7cuQwePBiAgQMHsnv3bmJiYkyOodVqmT59OsHBwQQEBDBv3jz27t3L/v37jfvMnj2bzp07U65cOZydnenYsSNz5swpuidaDMiwihnkdQQ4+eEiGDaOkgCLssHC2oamPfvRsGM3ojetI3L1cu7duM6mX6cRsXwxgb2ewze0PWqNxtyhCiFKCIW1Nd4Ho8x27vwICgoy6ZMeHBzMd999h06nY/PmzTx48IAuXboAUKFCBdq3b8/s2bP573//a3yMWq2mSZMmxtt16tTBycmJkydP0rRpU3Q6HfPmzeOHH34w7jN48GDef/99Pv/8c5RlpORSEmAzsFDnrQ2alECIskpjZUWT7n3w79CFI5s3cOCvZdy/dZPNs34iYvkSmvbsR/02HVFbWJg7VCFEMadQKPJVhlBchYWFcefOHayzJdV6vZ4jR44wYcKEPCeuGzZs4OrVqwwYMMBku06nY8uWLbRv375A4y6uykaaX8xoVHlbCOOfEginwg5JiGJJY2lFQNeeDJs2i9ZDXsOunDNJd26zdc7/MevtVzm4bhXa9LLXvkcIUTrt27fP5HZERAReXl4kJCSwatUqFi9eTHR0tPHr0KFD3L17l40bNxofk5GRQWRkpPH26dOnSUhIoG7dukBmIj1w4ECT40RHRzNw4MAyNRlORoDNIM+T4B6WQMgIsCjrNBaWNOrcHb+2HTm2fTP7Vy7l/u2bbJs3k30rl9Kkex8atO+CxsrK3KEKIcRTu3TpEqNHj+a1117j4MGDTJs2je+++44FCxZQvnx5+vfvn2Mp+S5duhAWFkanTp0A0Gg0jBo1ih9//BG1Ws3IkSMJCgqiadOm3Lx5k9WrV/PXX39Rr149k+O89NJL9O7dmzt37uDs7Fxkz9lcZATYDCzyXAOc1QbNqbBDEqJEUFtY4N+hC8N+/JX2w0fi4FKR5HsJ7PhtNjNHDWP/qmWkp6aYO0whhHgqL730EikpKTRt2pS33nqLd955hxEjRjB79mx69+6dI/kF6Nu3L3/99Re3bt0CwMbGhg8//JDnn3+eZs2aYWdnx5IlSwCYP38+tra2tG3bNsdx2rZti7W1Nb/99lvhPsliQkaAzUC6QAjxbFRqDX7tOuEb2o4Tu7ayb8Uf3Lsez66FczmwejmNu/bCv2M3LEtB3Z8QouzQaDRMnTqVn3/+2WT7kSNHHvmY/v37079/f5Ntffr0oU+fPjn2fe+993jvvfdyPY6FhQV37959iqhLJkmAzSCrBOJxK8Glp6agTctc4UUSYCFyp1Krqd+6Az4t2nBqzw4ili8mIT6O3YvnE7l6OY269qRhp+6ykqIQQggTkgCbQV5qgLNaoKktLdFY5a+NihBljUqtxrdVW+o2D+X03p2EL1/C3WtX2PvH70StWUmjLj1o1LknVnaSCAshhJAE2CzyUgKRvfwht5ofIUROSpWKui1a492sJWfCdxOxfAm3r1wifNkiotaupGGnHgR07Ym1vYO5QxVCCBPbt29/5mMMGTKEIUOGPPNxygJJgM0gawRYpzeg0xtQKXMmuMYewLIIhhD5plSqqNOsFd7BLTi7fy/hfy7m1qVY9q1YwsG//6Jhx64EdOuNjYOjuUMVQghhBpIAm0HWQhiQWQahUqpy7CMT4IR4dgqlktpBzfFqGsK5yAjC/1zMzdgL7F+1jIPrV+PfoSuNu/WW95kQQpQxkgCbQdYIMGQmwFaanAmwtEATouAolEq8moZQq0kwFw7uJ3zZYq5fOEvk6uVEb1hLg/adaNy9L3blSn/vSyGEEJIAm4UmW8nDo+qAZQRYiIKnUCioGRBIjUZNiYmOJGLZYuLOnSZq7SqiN67Dr20nmvTsi71zBXOHKoQQohBJAmwGSqUCpcKA3qB4ZCeIB/cSAEmAhSgMCoWCGg2bUN2/MRePHCJ82SKunTnJofWrObL5b+q16UjTnn2xlhp8IYQolSQBNhO1AtINkJ6RewJsLIGQBFiIQqNQKKjWoBGefg25dOww4csWcfXUcQ5vXMvRLRvwadkGrYOURQghRGkjCbCZPOyE9sjFMB487ANs6+RUNAEJUYYpFAo86/vjWd+fyyeOEr5sEZePH+HYto2gVLItKYHgfgOlNEIIIUoJ5ZN3EYUhax5cbiUQBoNBaoCFMJOqPvXp//mXDBj/FVV9/UCv5+iW9YS9PZxtc381vjeFEKIohYaGolAojF+VKlXiueee4+LFi/k6zvbt202OY21tja+vL7/++mshRV48SQJsJuqHI8DajJyT4FIfJKHXZQBgIzWIQphFlbr16P3xBCq37Ya7tw86rZaDf//FrFGvsuO32SQn3jN3iEKIMmb48OHExcVx7do1Vq1axeXLlxk8ePBTHev06dPExcVx4sQJXnvtNd544w22bNlSwBEXX5IAm8njSiCy6n8tbW1RazRFGZYQ4l+sK7nR97NJ9P30v7jV8iYjPY3I1cuZNepVdi9eQGpSkrlDFEI8hsFgQJumM8uXwfDoFV//LTQ0lJEjRzJy5EgcHR2pUKECY8eONTmGjY0Nrq6uuLm5ERQUxMiRIzl48KDx/qzR3bVr1+Ln54eVlRVBQUEcO3Ysx/kqVqyIq6sr1atX5+2336Z69eomxyrtpAbYTNSPKYEw1v/K6K8QxYJCoaCaX0M86/sTcyiSPX/8xo2Y8+xbsYToDWsI6NqLRl16YmljY+5QhRD/kpGu59d3dpjl3CN+aIXGMmev/0eZN28ew4YNY//+/URGRjJixAg8PDwYPnx4jn3v3LnDH3/8QWBgYI77PvjgA3744QdcXV355JNP6N69O2fOnEGTy6CawWBgw4YNXLp0KddjlVaSAJuJsQQitwT4ntT/ClEcKRQKajRqQvWGjTl3IJy9f/zOrcsX2bv0dw7+/ReNu/ehUafuaKyszB2qEKIEqlq1Kt9//z0KhQJvb2+OHj3K999/b0yAf/rpJ2bNmoXBYCA5OZnatWuzYcOGHMcZN24c7du3BzKT6ipVqrBixQr69+9v3KdKlSoApKWlodfrmThxIi1btiyCZ1k8SAJsJqrHJMDSAk2I4k2hUGSuLNc4iNMRuwlfupA7166we9E8otaupGnPfjTo0AWNhaW5QxWizFNbKBnxQyuznTs/goKCUCj+WSwrODiY7777Dp1OB8ALL7zAp59+CsD169f58ssv6dChA1FRUdjb25s8LouzszPe3t6cPHnS5Fy7du3C3t6etLQ09u/fz8iRI3F2duaNN97I9/MsiSQBNpOsLhDpuUyCkw4QQpQMCqWSOiEtqR3UjFO7dxC+bBEJ1+PYsSCMyDUrCOz1HPXbdpJafiHMSKFQ5KsMoThzdHSkVq1aANSqVYuwsDDc3NxYsmQJr776ar6OVb16dZwetlr19fVl3759fPHFF2UmAZZJcGaiVmQmvrnXAEsCLERJolSq8GnZhiH/+5kOr72NfQUXHty9w9Y5/8fsd0ZwZPN6dBkZ5g5TCFHM7du3z+R2REQEXl5eqFS5J/BZ21NSUnI8Lsvdu3c5c+YMdevWfey5VSpVjuOUZjICbCbGLhC5rAQnCbAQJZNKraZ+mw74tGzN0a2b2Ld8Mfdv32TTzOnsX7WUoL6D8GnRGuUj/pgJIcq2S5cuMXr0aF577TUOHjzItGnT+O6774z3JycnEx8fD2SWQPz3v//FysqKDh06mBxn4sSJlC9fnkqVKvHpp59SoUIFevXqZbLPjRs3SE1NNZZALFiwgH79+hX6cywuJAE2k8d1gTDWADs6FWFEQoiColJr8O/QBd/QthzZtJ79q5Zy78Z1Nvw8lf0rlxLcbxDeIS1QKiURFkL846WXXiIlJYWmTZuiUql45513GDFihPH+mTNnMnPmTADKlSuHn58f69atw9vb2+Q4X331Fe+88w5nz57F39+f1atXY2FhYbJP1mPUajVVq1bltddeY/z48YX7BIsRSYDN5HGT4B7cSwBkBFiIkk5jYUlA1574te1I9Ma17P/rT+7GXWXdtG/Zt+IPQvq/gFeTYBRKqUYTQoBGo2Hq1Kn8/PPPOe7bvn17no/TvHnzXHv/Qma/4fz0Jy6t5LeumfyzEIbpD6FeryMlMRGQBFiI0kJjZUWTHn0ZPm0WzQa8iKWtLbevXGL1/yaz4ON3OR+1T/4gCSFEETJrArxz5066d++Ou7s7CoWClStXGu/TarV8+OGH1K9fH1tbW9zd3XnppZe4du2ayTHu3LnDCy+8gIODA05OTgwbNoykErAy06NKIFISEzEY9CgUSqwdHMwQmRCisFhY2xDUZwCvTgsjqO9ALKytuRl7gZVT/svCz94jNjpKEmEhhCgCZk2AHzx4QIMGDZgxY0aO+5KTkzl48CBjx47l4MGDLF++nNOnT9OjRw+T/V544QWOHz/Opk2bWLNmDTt37jSplymujAth/GsSXNYEOGsHB6kPFKKUsrK1o1n/wbw6LYwmPfuhtrQk/twZ/pw8jsXjPuTSsSPmDlEIUcS2b9/O1KlTn+kYWeUNWe3NxKOZtQa4c+fOdO7cOdf7HB0d2bRpk8m26dOn07RpUy5duoSHhwcnT55k/fr1HDhwgMaNGwMwbdo0unTpwrfffou7u3uhP4enpXrECLB0gBCi7LC2d6Dl80MI6NKTA38t4/DGv7l2+gRL//sJVX39aNZ/MJXr+Jg7TCGEKHVK1CS4e/fuoVAojP+zCQ8Px8nJyZj8ArRr1w6lUsm+ffvo3bt3rsdJS0sjLS3NeDvxYc2tVqtFq9UW3hN4SKvVGmuAU7UZJudMvH0LAGsHpyKJRRSMrGsl16x0KarramFrR7NBQ2jQqQeRfy3j2NZNXD5+hMXjxuBRvyHB/QZRqaZXocZQVsh7tXTKfl11Oh0GgwG9Xo9en3OiuSje9Ho9BoOBjIe907O/VwvyfVtiEuDU1FQ+/PBDBg0ahMPD2tj4+HgqVqxosp9arcbZ2dnYJy83kydPZsKECTm2b9y4ERsbm4IN/BHUiswh4DPnLrAu45xx+93j0Zn/JiWxbt26IolFFJx/f2ohSociva4uVaja7TnuHjtE4oXTXDp6iEtHD2FT2YPyfo2xLFe+6GIpxeS9Wjpt2rQJtVqNq6srSUlJpKenmzskkU/p6emkpKSwd+9ewPS9mpycXGDnKREJsFarpX///hgMhlxbg+TXxx9/zOjRo423ExMTqVq1Kh06dDAm14VJq9WybvYWACpX9aRLl39WZ9l5J57bh6G2bz2adelS6LGIgqHVatm0aRPt27dHI8velhpmva7P9efejXj2r/iDU7t3kHz1EslXL1GraTCBfQZSvopH0cZTSsh7tXTKfl11Oh2XL1/Gzs4OKysrc4cm8ik1NRVra2tCQkLYuXOnyXs16xP7glDsE+Cs5PfixYts3brVJEF1dXXlxo0bJvtnZGRw584dXF1dH3lMS0tLLC0tc2zXaDRF9gsxaylknQGTc6Yk3gPA3rm8/HIugYryZ0gUHXNd1wqVq9Jl5HsE9RlA+LJFnNq7k3P7wzl3III6IS0J7vc8zu6Vizyu0kDeq6WTRqNBqVSiUChQKpUopcd2iZN1/dTqzBQ1+3u1IN+zxfonIyv5PXv2LJs3b6Z8edOP/oKDg0lISCAqKsq4bevWrej1egIDA4s63Hz5pw+waX2ScRU4mQQnhHjI2b0KXd/+gJenTMOraQgYDJzas4O5o99g/U9TuXfj0SVfQgghcjLrCHBSUhLnzv1T/xoTE0N0dDTOzs64ubnRr18/Dh48yJo1a9DpdMa6XmdnZywsLKhbty6dOnVi+PDh/PLLL2i1WkaOHMnAgQOLdQcIyN4FwrTnp7ELhKMkwEIIUxU8qtHjvU+4HnOevX/8xoWDBzi+YzMnd2+jXmh7AvsMwKGCi7nDFEKIYs+sI8CRkZE0bNiQhg0bAjB69GgaNmzI559/ztWrV/nrr7+4cuUK/v7+uLm5Gb+yCqMBfv/9d+rUqUPbtm3p0qULzZs359dffzXXU8qzR/YBvidt0IQQj1epek16fziO5yd9h6dfQ/Q6HUe2rGf2O8PZMvsXku7eMXeIQogSLDw8HJVKRdeuXZ/5WHXq1MHS0vKxzQnMwawjwE9ajzovKyI5OzuzcOHCggyrSOS2ElxGejppDx4AkgALIZ7Mzcubfp/+lyunjrN3yW9cPnGU6A1rOLZ1Iw06dKFpz37YODqZO0whRAkTFhbGqFGjCAsL49q1a0/9qfru3btJSUmhX79+zJs3jw8//LCAI316xboGuDTLrQY4OTEh8z61GktbWzNEJYQoiarU8aX/uMk8N/YL3GvXJUObTtTalcwa9Sp7liwg9UHxXx5eiMJiMBjQpqaa5Ss/S5uHhoby9ttvM2bMGJydnXF1dWX8+PEAxMbGolAoiI6ONu6fkJCAQqFg+/btxm3Hjx+nW7duODg4YG9vT4sWLTh//jwAQ4YMoVevXkyYMAEXFxccHBx4/fXXc7SKS0pKYsmSJbzxxht07dqVuXPnmty/fft2FAoFa9euxc/PDysrK4KCgjh27FiO5xQWFsbzzz/Piy++yOzZs/P8WhSFYt8ForQylkBkS4Cz6n9tHMuhUCjMEZYQogTzqNeAqr5+xB4+yJ4lv3H9wlkili8hesNamvTsR8OO3dBIWyhRxmSkpfHjy/3Mcu635y3L13tu3rx5jB49mn379hEeHs6QIUNo1qwZXl5PXgjn6tWrtGzZktDQUGPXrD179hgXlADYsmULVlZWbN++ndjYWF555RXKly/PF198Ydznjz/+oE6dOnh7ezN48GDeffddPv744xx5yQcffMAPP/yAq6srn3zyCd27d+fMmTPGTg33799n6dKl7Nu3jzp16nDv3j127dpFixYt8vx6FCYZATaT3CbBPUhIAMBW1vAWQjwlhUJBdf8AXvjyf/R47xPKV/Eg9UESuxbOJeyd4RzasAZdhqyCJkRx5Ofnx7hx4/Dy8uKll16icePGbNmyJU+PnTFjBo6OjixevJjGjRtTu3ZtXnnlFby9vY37WFhYMHv2bHx9fenatSsTJ07kxx9/NFkxLywsjMGDBwPQqVMn7t27x44dO3Kcb9y4cbRv35769eszb948rl+/zooVK4z3L168GC8vL3x9fVGpVAwcOJCwsLCnfWkKnIwAm4mxBCLbJDhpgSaEKCgKhQKvpiHUbBzIqd072Lv0d+7duM7W2b8QuXoFwf0G4dOiNUqVytyhClGo1JaWvD1vmdnOnR9+fn4mt93c3HKsd/Ao0dHRtGjR4rG9chs0aGCy4m1wcDBJSUlcvnwZT09PTp8+zf79+42JrFqtZsCAAYSFhREaGmpyrODgYOP3zs7OeHt7c/LkSeO22bNnGxNpgMGDB9OqVSumTZuGvb19np5TYZIE2EweVwIhE+CEEAVFqVTh07IN3iEtOLp1ExHLF5N48zobfp7KgVXLaDZgMF5NQ1DIggGilFIoFCWm9OffyatCoUCv1xsX9MheU6zVmn6SY21t/cznDwsLIyMjw2TSm8FgwNLSkunTp+Po6Jin45w4cYKIiAj2799vMvFNp9OxePFihg8f/syxPiv5jWcmKmXmD3G6JMBCiCKgUmvw79CFYT/8SssXXsHKzp47166w+vuv+O2T/xATHZWvCTtCiKLj4pLZ3zsuLs64LfuEOMgcPd61a1eOxDi7w4cPk5KSYrwdERGBnZ0dVatWJSMjg/nz5/Pdd98RHR1t/Dp8+DDu7u4sWrTI5FgRERHG7+/evcuZM2eoW7cukJlIt2zZksOHD5sca/To0cWmDEISYDN57AiwLIIhhCgkGksrmvToy6vTZhHcbxAaK2tuxJxn+eRxLBn/EVdO5pzJLYQwL2tra4KCgvjqq684efIkO3bs4LPPPjPZZ+TIkSQmJjJw4EAiIyM5e/YsCxYs4PTp08Z90tPTGTZsGCdOnGDdunWMGzeOkSNHolQqWbNmDXfv3mXYsGHUq1fP5Ktv3745EteJEyeyZcsWjh07xpAhQ6hQoQK9evVCq9WyYMECBg0alOM4r776Kvv27eP48eNF8ro9jiTAZqIyLoTxz4hL8r0EAGxkEpwQopBZ2tgS8twLvDptFgHdeqPSaLh66jhLxn/En5PHcf3CuScfRAhRZGbPnk1GRgYBAQG8++67TJo0yeT+8uXLs3XrVpKSkmjVqhUBAQHMnDnTpKyibdu2eHl50bJlSwYMGECPHj2MrdbCwsJo165drmUOffv2JTIykiNHjhi3ffXVV7zzzjsEBAQQHx/P6tWrsbCw4K+//uL27dv07t07x3Hq1q1L3bp1i8UosNQAm0luC2EYV4GTEWAhRBGxcXAk9MVhBHTpScTyxRzbtonY6Chio6OoHdiMkP6DKV+lqrnDFKLUy97PN8vKlSuN39etW9dkJVzIuWCYn58fGzZseOx5JkyYwIQJE3JsX7169SMf07RpU+O5suJs3rx5rr1/+/bti06ne+SxTpw48dj4ioqMAJvJvxfCMBgMUgMshDAb+/IVaD98JEP+9zN1m4eCQsGZfXuY9/5brP9pKvduXDd3iEIIUWAkATaTf48Aa1NTyEhLA6QEQghhPuVc3eky6n1enjKNWk2CMBj0HN+xmdnvvsaW2b8Y/6MuhBAlmZRAmImxBvjhQhhZf1Q0VtZYWD17KxMhhHgWFTyq0fP9z4g7e5rdSxZw6Wg00RvWcGzbJhp27k6THn2xtjN/L08hRN78e0njpxUaGloqOsbICLCZZHWB0OkN6PQGUpOSALAuBs2hhRAii5uXN899Nonnxn6Bm5c3GelpHFi1jFkjhxHx52LSU5LNHaIQQuSbJMBmosr2ymt1euPSpCr1o1dwEUIIc/Go14BB//2WXmPGUsGjGukpyez54zdmvT2cqLWryEhPN3eIQgiRZ1ICYSZZI8CQORFO/3DGpCxLKoQorhQKBTUDAqnRsAmnwnex94/fSIiPY/v8mUSuXUFw30H4tmqLSi1/WoQQxZuMAJuJMlsCrM3Qo8/IyNwuCbAQophTKJXUbdaKId/9TPsRo7ArX4Gk27fY9Os05r3/Jqf27MCg1z/5QEIIYSaSAJuJUgHqh1mwVmcw9sxTqmTkRAhRMqjUavzadmTY1F8JfWk41vYO3I27xtofv2HBh29zPmpfqZgsI4QofSQBNiONKisB1qPXPRwBVssIsBCiZFFbWBDQtSevTptFs/6DsbC24ealWFZO+S+Lxr7PpWNHnnwQIYQoQpIAm5HFw2bA2WuAVTICLIQooSysbQjqO5BXp4fRpGc/1BaWxJ09zdL/fsLSSZ8Rd+60uUMUQuRBeHg4KpWKrl27PtXjt2/fjkKhMH5ZW1vj6+vLr7/+WsCRPj1JgM1I87AVhFYmwQkhShFrO3taPj+EYT/OxL9jN5QqNZeORrPw0/dY9e0kbl2KNXeIQojHCAsLY9SoUezcuZNr16499XFOnz5NXFwcJ06c4LXXXuONN95gy5YtBRjp05ME2IyMCXCG4Z8EWGZPCyFKCbtyzrQd+jpDp/4fvq3aoVAoOXcggnljRrFu2rckxMeZO0Qhio3Q0FDefvttxowZg7OzM66urowfPx6A2NhYFAoF0dHRxv0TEhJQKBRs377duO348eN069YNBwcH7O3tadGiBefPnwdgyJAh9OrViwkTJuDi4oKDgwOvv/466f9qYZiUlMSSJUt444036Nq1a44FNLJGd9euXYufnx9WVlYEBQVx7NixHM+pYsWKuLq6Ur16dd5++22qV6/OwYMHC+T1elaSAJtRVg1wuk6HTrpACCFKKceKlej05ru8/O0Magc2A4OBk7u3M2f062yaOZ37d26ZO0RRihkMBvTpOrN85XcS6Lx587C1tWXfvn1MmTKFiRMnsmnTpjw99urVq7Rs2RJLS0u2bt1KVFQUQ4cOJeNhfgGwZcsWTp48yfbt21m0aBHLly9nwoQJJsf5448/qFOnDt7e3gwePJjZs2fn+jw++OADvvvuOw4cOICLiwvdu3dHq9XmGpvBYGD9+vVcunSJwMDAfLwihUeGG80oawQ4PcOASmqAhRClXPkqVek++mOuXzjH7iULiI2O4sjm9ZzYsZUGHbvStGc/bBwczR2mKGUMWj3XPt9rlnO7TwxBYZH3gS0/Pz/GjRsHgJeXF9OnT2fLli14eXk98bEzZszA0dGRxYsXo9FkLqpVu3Ztk30sLCyYPXs2NjY2+Pr6MnHiRD744AP++9//olRm5iRhYWEMHjwYgE6dOnHv3j127NhBaGioybHGjRtH+/btgczEvUqVKqxYsYL+/fsb96lSpQoAaWlp6PV6Jk6cSMuWLfP8ehQmybbMKHsNsKVORoCFEGVDpRq16PvxBK6cOMbuJfO5euoEUWtWcGTzegK69qJxt15Y2tiaO0whipyfn5/JbTc3N27cuJGnx0ZHR9OiRQtj8pubBg0aYGNjY7wdHBxMUlISly9fxtPTk9OnT7N//35WrFgBgFqtZsCAAYSFheVIgIODg43fOzs74+3tzcmTJ0322bVrF/b29qSlpbF//35GjhyJs7Mzb7zxRp6eU2GSBNiMLLK3QZMSCCFEGVPFpx4Dxn9N7OGD7F40nxux54n4cxHRG9bQtGc//Dt2RWNpZe4wRQmn0ChxnxhitnPnx7+TV4VCgV6vN47OZi9F+He5gbW19VNG+Y+wsDAyMjJwd3c3bjMYDFhaWjJ9+nQc7B0wZGQucqNL1qJLTEOhUaG0zj2drF69Ok5OTgD4+vqyb98+vvjii2KRAEsNsBllHwGWhTCEEGWRQqGgun8Agyd/T/f/fISzexVSk+6z8/c5hL0zguiN69Bl5F5XKEReKBQKlBYqs3wpFIonB5gHLi4uAMTF/TNxNPuEOMgcPd61a9cj63ABDh8+TEpKivF2REQEdnZ2VK1aFa1Wy/z58/n26ylEhR8gas8BInfuI3L7PtwrubHg57loryWRcTcVgPBtu9ElpqNPyeDu3bucOXOGunXrPvZ5qFQqk/ObkyTAZvTPJDiDLIQhhCjTFEoltYOa8/K3M+j4xrs4uFTkwd07bAn7iTn/eZ0TO7ei1+vMHaYQZmFtbU1QUBBfffUVJ0+eZMeOHXz22Wcm+4wcOZLExEQGDhxIZGQkZ8+eZcGCBZw+fTpz5FhvID09naEvv8LRA4dZ/cdKxo39nDdeeY2M+GRWzlvG3bt3eanbIOq61aJu5Vr4eNTGt7o3vTr3YO6ieQ/PlJm7fPHj12w/sJvj504yZMgQKlSoQK9evUxiunHjBvHx8Vy8eJGlS5eyYMECevbsWQSv2JPJcKMZZS2Eoc3I3gdYLokQouxSqlTUC21HnWatOLplPRHLl3DvxnX+nvE/9q9aRrP+g6nVNLjARtaEKClmz57NsGHDCAgIwNvbmylTptChQwcMBgOGDD3l7JzYtHYDH37yEa1atUKlVNGgnh9NvPzRXk1Cn5JB65BW1KxcjdYd25CWnk7/nv0Y+85HoDcwd8l82jQPxcm5HKgUKFTKh/8q6DfwOb77eSonb55HXSGzLOnrb6Yw+qMPOHv2LP7+/qxevRoLCwuTmL29vYHMWuKqVavy2muvGVu7mZtkW2aU20IYKqkBFkII1BoNDTt1p15oew5tWMOBVcu4feUSf/3vSyrV8KL5wBfx9GsoibAoNbL3882yYsUK0BvQp+mo7VmLXRu2Y9AZQKfHoDOQdu0+6Axo4x8A4Fu5NmvmLc/9BApAoWDcJ58zfuw4UClRZEt016xfC0pFru+poJYhxvrjrDibN2+ea+9fyOxpnN8WcEVNEmAzynUlOFkIQwghjDRWVjTt2Q+/dp2IWrOCqLWruH7hLH9++TlVfOrRfMBLVK7jY+4whci3rLKE7AmtQWeAjGzf6/R5P6AxoVWAOtv3D7crbTQo01VoXGyefKwyQLItM8qqAU7LkC4QQgjxOFa2djQb8CINO3Vn38qlHN64lisnjrF43BhqNGpCswEvUrFaDXOHKQTw7+TWgOFhgotOjyHj4Xa9HvI6SKrKmdAq1P98/6iRW/FokgCb0T8jwAbpAiGEEHlg4+hE65eHE9C1JxF/LubY9s1cOHiACwcP4B3cgpD+L+DsXsXcYYpS7NHJrWmim/fkNrMMIXtya7qtYJLbfy9p/LRKQnlDXki2ZUamJRCZI8BSAyyEEE/mUKEiHV57myY9+rLnj985vXcnp8N3cSZiD76hbQnuNwiHChXNHaYoYZ6U3JJVmpDXBPDfiaxxYlnBJrci/yQBNiPThTAyR4AVkgALIUSelXOrTLd3xtC0Zz/2/PEbF6L2c2zbJk7u2oZf+84E9uqPrVM5c4cpigmDPnsim1mO8NTJrTJbQqs27Zpg/F6S22JLEmAzyhoBTpcRYCGEeCYVq9Wg95jPuXbmJLsXzefyiaMc+ns1R7duJKBLTxp364OVnZ25wxSFSKmDjBsppCemoE/ToUtKJyOZZ0hus9fbSnJb2kgCbEbGEoiMbDXA0gVCCCGemnvtujz3+ZdcOnqY3YvnEX/+LPtW/EH0xrU06d6X+u06mztE8YwMOj0ZN1NIv5qENu4B2mtJpMc9oGGKM7f3HybDXoG+tS26JC16dS5J6r+TW5PvHya3SkluSzvJtszIQp29BCKrC4RcEiGEeBYKhQJPP3886jfgXGQEexYv4PaVS+xePJ+odauw9fIho317NBqNuUMVT6BP1xmTXO21B6RfS0J7/QFk5D6Sq7BSoSpviUKjRGmtRmVtkXNimSS3AkmAzcpkEpw+qwuElEAIIURBUCgUeDUJpmZAU07t2cnepb9z73o8KVHhLHj/HMHPDcK3ZVv5vVtM6JLS0V57gDYuifRrmUlvxq2UXLspKCxVaNxtsXCzQ+Nuh6KiJVsid9Cpexd0Oh1JMTGoHS1RWVkW/RMRJYIkwGZkWgP8cCU4KYEQQogCpVSq8GnRGu/gFhzevJ6di+dz//ZNNv7yIwdW/UmzAYOpHdgMhVJp7lDLBIPBgO5uWmbpwsORXe21JHSJ6bnur7S3wMLdFo17ZrJr4W6LqpyVyUiuVqtFL/+PEfkg2ZYZZS2EkS4LYQghRKFTqdXUb9uRiw/ScFcbiFy9nLtxV1kz9WtcqtWg+cAXqe7fWCY3FSCDTo/2RsrDEoaHI7txSRhSdTl3VoC6vDWah8muhbsdGjdbVPYWRR94GTVkyBDmzZtnvO3s7EyTJk2YMmUKfn5+eT5ObGws1atXN97WaDR4eHgwZMgQPv3002LxHpME2IxyXQpZaoCFEKJQKdVqGnXpgn/7LkStXUnU2hXcjL3Aiq8m4O7tQ4uBL1HFp565wyxx9Gk6tPEPJ6VlTVB7VL2uSoHG1RaNmy0WlTMTXY2bLUpL+Rtobp06dWLOnDkAxMfH89lnn9GtWzcuXbqU72Nt3rwZX19f0tLS2L17N6+++ipubm4MGzasoMPON/m8x4xMV4J7OAKslhFgIYQoCpY2NoQ89zzDfpxF4+59UGssuHb6BEsmfMSyL8YSf/6suUMstnRJ6aSeuUvi9svcXniS+G8juTZ+Lzd/PkzCqvMkR15HezUJMgworFRYVHfErpk75Z6rTcV3GlF5YgiVRjXEuV9t7ILdsazmWOaT39DQUN5++23GjBmDs7Mzrq6ujB8/HsgcUVUoFERHRxv3T0hIQKFQsH37duO248eP061bNxwcHLC3t6dFixacP38eyBzd7dWrFxMmTMDFxQUHBwdef/110tNNS08sLS1xdXXF1dUVf39/PvroIy5fvszNmzdNYlm8eDEhISFYWVlRr149duzYkeM5lS9fHldXVzw9PXnhhRdo1qwZBw8eLNgX7imV7Z82M8ttIQylUhJgIYQoSjYOjrQaPJSALj2JWL6Eo1s3cPHIIS4eOYRX0xCaDRhM+Soe5g7TLAwGA7o7qcZJadq4zE4M+kfV6zpYGEsXjPW6zlZm/cjbYDCg1WrNcm6NRpOv5z5v3jxGjx7Nvn37CA8PZ8iQITRr1gwvL68nPvbq1au0bNmS0NBQtm7dioODA3v27CHjYYklwJYtW7CysmL79u3ExsbyyiuvUL58eb744otcj5mUlMRvv/1GrVq1KF++vMl9H3zwAVOnTsXHx4f//e9/dO/enZiYmBz7ZYmMjCQqKoqXXnopz69HYZIE2IyMk+AyZCEMIYQwNzvn8rR79U0ad+9D+NLfObF7O2f37+XsgXB8WrQm5Lnncazoau4wC41Bp0d7Pfmf3roPJ6gZ0vJYr+tui8qu+NXrarVavvzyS7Oc+5NPPsHCIu+viZ+fH+PGjQPAy8uL6dOns2XLljwlwDNmzMDR0ZHFixcbW/zVrl3bZB8LCwtmz56NjY0Nvr6+TJw4kQ8++ID//ve/KB9OAl2zZg12DxeNefDgAW5ubqxZs8Z4f5aRI0fSt29fAH7++WfWr19PWFgYY8aMMe4TEhKCUqkkPT0drVbLiBEjJAEG2LlzJ9988w1RUVHExcWxYsUKevXqZbzfYDAwbtw4Zs6cSUJCAs2aNePnn382+UG4c+cOo0aNYvXq1SiVSvr27csPP/xgvHjFWa41wNIFQgghzMqpkiudR75Hk5792LPkN84dCOfEzq2c2rOT+m06ENRnAHbOuY9ylRT6NB3auGy9deMeoI1/ALo81Ou626FxtUVpKQM2Be3fE83c3Ny4ceNGnh4bHR1NixYtHtvfukGDBtjY2BhvBwcHk5SUxOXLl/H09ASgdevW/PzzzwDcvXuXn376ic6dO7N//37jPlmPzaJWq2ncuDEnT540Od+SJUuoW7cuWq2WY8eOMWrUKMqVK8dXX32Vp+dUmMyabT148IAGDRowdOhQ+vTpk+P+KVOm8OOPPzJv3jyqV6/O2LFj6dixIydOnMDKygqAF154gbi4ODZt2oRWq+WVV15hxIgRLFy4sKifTr79sxCGIdskOPmFIoQQxUGFqp70fP9T4s+dYfeSBVw8cojDm9ZxfPtm/Dt1o2nPfljbO5g7zCfS3U83li5kLSiRcfsR/XWtVGjc7EzajmkqWmeukFZCaTQaPvnkE7Od+1n2VygU6PV64+irIdtSzv8u67C2tn7KKE3Z2tpSq1Yt4+1Zs2bh6OjIzJkzmTRpUr6OVbVqVeOx6taty/nz5xk7dizjx4835nHmYtYEuHPnznTunPuylAaDgalTp/LZZ5/Rs2dPAObPn0+lSpVYuXIlAwcO5OTJk6xfv54DBw7QuHFjAKZNm0aXLl349ttvcXd3L7Ln8jRMR4BlJTghhCiOXGvVpt+n/+Xy8SPsWjyfuDOniFy9nCOb19O4W28CuvbEwtrmyQcqZP/U6/7TWzf92gP093Ov11U5WDxMcm0fljDYoSpnWSxaVBUkhUKRrzKE4sjFxQWAuLg4GjZsCGAyIQ4yR4/nzZuHVqt9ZOJ9+PBhUlJSjMlyREQEdnZ2VK1a9ZHnVigUKJVKUlJSTLZHRETQsmVLADIyMoiKimLkyJGPfR4qlYqMjAzS09PLdgL8ODExMcTHx9OuXTvjNkdHRwIDAwkPD2fgwIGEh4fj5ORkTH4B2rVrh1KpZN++ffTu3TvXY6elpZGWlma8nZiYCGT+b6ooCuWzzqEw6AFIz9Che1ikbiDn/+pE8Zd1zeTalS5yXUufZ7mmrrXr0m/sl8RGRxG+9HduXYpl79LfObh+NY2798GvXSfUFkWz8pghQ0/GzRQy4h6gjUsmI/4BGXHJj6zXVZW3Qu1mi8bVBrW7bWYJg51pkmQAkwlTJUn266rT6TAYDOj1evR6vZkjy5+suLPfNhgMWFpaEhQUxFdffYWnpyc3btzgs88+AzA+zzfffJNp06YxYMAAPvroIxwdHYmIiKBp06Z4e3tjMBhIT09n6NChfPrpp8TGxjJu3Djeeust43EMBgOpqalcu3YNyCyBmDFjBklJSXTt2tXkNZ0xYwY1a9akbt26TJ06lbt37zJkyBCTfW7evMm1a9fIyMjg6NGj/PDDD7Ru3Ro7O7tHXpusOLJ+FrO/Vwvyd3GxTYDj4+MBqFSpksn2SpUqGe+Lj4+nYsWKJver1WqcnZ2N++Rm8uTJTJgwIcf2jRs3mtTGFLZDUQcANQn3H5CSnAzAnr17sTx5ushiEAVr06ZN5g5BFAK5rqXPs15Tx2btUFW9wJ0jkaTeT2T3wrmEr1iKc72GONT0LtBV5ZQ6sH6gxuaBCpsHamySVVglq1Aaco7U6hUGUmx0JNtmkGKrI9lWR4pNxj+rpCUBZx5+lUKbNm1CrVbj6upKUlJSjhZfxVnWyGjWoFzWNq1WS2JiIlOnTmXUqFE0adKEWrVqMWHCBPr06UNycjKJiYloNBpWrlzJuHHjaN26NSqVinr16tGgQQMSExPRarW0bNkSDw8PWrVqRXp6On379uU///mPyUDghg0bqFy5MgD29vZ4eXkxd+5cGjVqRGJiIklJSQCMHTuWyZMnc/ToUWrUqMHChQuxsLAw2adDhw5A5shvpUqVaN++PZ999pnJc/y39PR0UlJS2Lt3L2D6Xk1+mCsVhGKbABemjz/+mNGjRxtvJyYmUrVqVTp06ICDQ+HXc2m1WjZt2kSz4CC+PxaJxsIKjVqFDmgVGopz5Ud/FCGKp6xr2r59+3zXfIniS65r6VPQ11Sv03Fy1zb2rVhC0u1b3Dywm7SLZ2naqz91mrXK97wOXVI6GXHJZFzLnJSWEZeM7k7qI+t1jaO6braZXy5WJbpe92llv646nY7Lly9jZ2dn9o/Z82Pnzp05tq1evdr4fZMmTYiIiDC5X6czHfEPCQl55H/uNBoNarWayZMnM3ny5Fz3+e233/jtt98eG2dWk4FGjRqxf//+XPepV69ejtjyKjU1FWtra0JCQti5c6fJe/VxiXN+FdsE2NU1s9XM9evXcXNzM26/fv06/v7+xn3+PTsyIyODO3fuGB+fG0tLSywtc35MpdFoivSPnI1lZk2SVv/PJDgLKyv5Q1uCFfXPkCgacl1LnwK7phoN/u07U69VW45sWc++FX+QeOM6m3+dRuRfywjqM5C6zUNzJMIGfbZ63Wxtx/T3c/+It6zU6z4rjUaDUqk01q3+u3VXWaZQKIyvy7PIenxhvb5Z10/9sCtW9vdqQf4eLrYJcPXq1XF1dWXLli3GhDcxMZF9+/bxxhtvAJktOBISEoiKiiIgIACArVu3otfrCQwMNFfoeabJ6gKRIQthCCFESaa2sKBR5x7Ub92B6I1rOfDXnyTEx7H+p+/Zv2IZIe36U7mSNxlxKcak95H9dStYGxeR0DxcVKI49tcVoiQzawKclJTEuXPnjLdjYmKIjo7G2dkZDw8P3n33XSZNmoSXl5exDZq7u7uxV3DdunXp1KkTw4cP55dffkGr1TJy5EgGDhxY7DtAQLaFMHR6WQpZCCFKAZVCjZ9fO7wcA7geeRrttQfYq51R7VaRwHnTndWZ/XWzFpHQuGUmu0oL+TsgCt7cuXML5DjVqlUzacdWUpk1AY6MjKR169bG21l1uS+//DJz585lzJgxPHjwgBEjRpCQkEDz5s1Zv369SU3P77//zsiRI2nbtq1xIYwff/yxyJ/L0zC2QcvQYXg4G1IlbdCEEKJEyUhII/XEbVKO3yIt5h48nNxuiw1oMidWp+tTuZt2nYT0G2htM6jRPphabYJQauR3vhDmYNZ3Xmho6GP/F6FQKJg4cSITJ0585D7Ozs4lYtGL3GhUD2u3DP+0ApE+wEIIUbwZDAYybiSTcvw2KSduo72SZHK/ytHCWLqQVa+bYZnB9b9Xc3LtPtLuPOD47J1U2FyNkH7PU6tJUIF2jRBCPJlkW2Zk8XAEWJk9AZYSCCGEKHYMegPpl++TcuI2qcdvk3Er26IACrDwcMDatzzWPuVRV8i5IpcaCO43iIaduxO1dhUH163i1qVY/vrfl7hUq0FIv+ep2ThQJrUVoNLwMX1ZlNUfuLDfC5IAm1FWCYQSGQEWQojixpChJ+18AiknMkd6TTo0qBRY1XLCyrc81nXLo7LP2yQ1K1s7mvV/gUZdehC1ZiUH//6Lm7EXWPXtJCrVqEVwv+ep0aiJJMLPQKPRoFAouHnzJi4uLvJalhBZC3XcvHkTpVJZ6J13JNsyo6wSiOwjwKp89owUQghRcPRpGaSevkvK8duknrpj0qlBYanCqo4z1r7lsapdDqXV0/8Jtbazp/nAFx8mwis4tH4N1y+cY+WUibjW9CLkuReo5h8gydtTUKlUVKlShStXrhAbG2vucEQ+2djY4OHhISPApZlCoUCjUqDMyBruV0odmBBCFDHd/XRST94h5fgtUs8lgO6fj86V9hqsfTJLGyxrOqFQF+zvaBsHR1o8P4SAbr058NefRG9cS/z5syz/ajxuXt6EPPcCnn4NJRHOJzs7O7y8vGQZ8xJGpVKhVqtRKBSFfu0kATYzjUppLIGQ+l8hhCgaGXdSST19nZQTt0m/mGiy0pq6vBVWvhWw9i2PRVV7FMrCTz5tHBxpNXgojR8mwoc3riPu7Gn+/PJz3L19CHnueTzqNZBEOB9UKpV8qioeSRJgM9OolBgelkDIIhhCCFE4DAYD2msPeHD0BnUPO3A7PNrkfk1lu8xJbL7lUVe0MVuiaetUjtCXXqVx9z4cWLWMw5v/5trpEyyb9BlV6tYj5LnnqerrZ5bYhChNJAE2M41Kic4gI8BCCFHQDDoD6RfvZbYrO34bXUIaADaoQQmW1R2x9imPlW8F1E6WZo7WlF05Z1oPGUGTHn3Zv2oZRzb/zZWTx/hj4idU9fUj5LnnqVK3nrnDFKLEkgTYzCxUCtKySiCkA4QQQjwTg1ZH6tmEzElsJ2+jT84w3qfQKLGo5cgp7WWaPtcKS0cbM0aaN3bO5Wnzyms06dGXfSuXcnTLBi4fP8KS40fwqO9PyHMvUNm7rrnDFKLEkYzLzDRqJemGrFXgZARYCCHyS5+sJeX0XVKP3SL1zF0M2mytJW3Uxs4Nll7l0Cn03Fl3HqVN4bZYKmj25SvQbtgbNO3Zl33L/+DY9k1cOhrNpaPRVGvQiJDnXsDNy9vcYQpRYkgCbGYWKiWpxklwcjmEECIvdPfSMvvzHr9N2oV7oP9nFpvK0TKzVZlveSyrOaJQ/VPPq8uWHJdEDhUq0n7ESJr2eo6I5Us4vmMzsYcPEnv4INUbNibkuRdwrell7jCFKPYk4zIzjUpp7AOslBFgIYR4JG3W8sPHb+VYflhdyca4Epumsl2p75bgWLESHV9/m8CHifCJXVuJORRJzKFIagQ0JaTf81SqUcvcYQpRbEkCbGYadfYEWC6HEEJkMegNpF+5T+rxzJXYMm7msvywT+ZIryaX5YfLAidXNzq9+S6BvZ8j4s/FnNy9gwtR+7kQtZ9aTYII7vc8FavVMHeYQhQ7knGZmYVKYewDLDXAQoiyzpChJ+3CvX+WH05M/+dOlQLLmk7Gkd68Lj9cFpRzq0znke8R2GcA4csWcWrvTs4diODcgQi8AkMI7vc8Lh7VzB2mEMWGJMBmZlICITXAQogySJ+mI/XMnX+WH0791/LD3uUya3q9nZ9p+eGywNm9Cl3f/oCgh4nw6YjdnN23l7P79lI7uAUh/QZRvoqHucMUwuzy/Ztk/fr12NnZ0bx5cwBmzJjBzJkz8fHxYcaMGZQrV67AgyzNTBJgWQhDCFFG6JKylh++Teq5u5CRbflhO42xtMGqEJYfLgvKV/Gg27sfEnQpMxE+s28PZ8J3cSZiN3VCWhLUdyDlK1c1d5hCmE2+E+APPviAr7/+GoCjR4/y3nvvMXr0aLZt28bo0aOZM2dOgQdZmslSyEKIsiLjTqpxEtu/lx9WlbcyljZYeDgUyfLDZUEFj2p0H/0xNy/GEL5sEWf37+XUnh2c3ruLOs1aEtR3EM7ulc0dphBFLt8JcExMDD4+PgD8+eefdOvWjS+//JKDBw/SpUuXAg+wtLNQK2QSnBCiVDIYDGjjHpD6sF2ZNu6Byf2aynZY+zxcfriS+ZYfLgtcPKvT471PuBF7gb1LF3I+MoKTu7dzas9O6rYIJajvQMq5ups7TCGKTL4zLgsLC5KTkwHYvHkzL730EgDOzs4kJiYWbHRlgLRBE0KUJga9gfSLiZkjvSduo7uT+s+diszlh60ejvSqy1mZL9AyqmK1GvT64DOuXzjH3qW/c+HgAU7s3MrJ3dvxadmGoD4Dcarkau4whSh0+U6AmzdvzujRo2nWrBn79+9nyZIlAJw5c4YqVaoUeIClnUW2EgiVTIITQpRABq2e1HN3Hy4/fAf9A+0/d6qVWNUul1nTW9cZlW3JWoGttKpUoxa9PxxH3LnThC9dSEx0FMe3b+bkrm34tmpLUJ+BOLhUNHeYQhSafGdc06dP580332TZsmX8/PPPVK6cWTv0999/06lTpwIPsLQz7QMsI8BCiJJBn5JB6umHk9hO38GQ/s8KawprNdZZyw/XLofSQn63FVdutbzp8/EErp05yd6lC7l45BBHt27k+I6t1GvdjsDeA3Co4GLuMIUocPlOgD08PFizZk2O7d9//32BBFTWWKiUqJAaYCFE8adLzLb88Pl/Lz9sgdXDel7L6o4oVNK5oSRxr12Xfp/+l6unTrB36e9cOnaYI5vXc3z7Zuq37UjTXs9h71zB3GEKUWCeKuM6f/48c+bM4fz58/zwww9UrFiRv//+Gw8PD3x9fQs6xlJNo1LICLAQotjS3sxcfjj1+G3SL983uU9d8eHyw75lY/nhsqByHR+eG/sFV04cY8/S37hy4hjRG9ZydOtG/Np1omnP57Ar52zuMIV4ZvlOgHfs2EHnzp1p1qwZO3fu5IsvvqBixYocPnyYsLAwli1bVhhxllrZJ8FJDbAQwtwMBgPaK0kPJ7HdIuNGisn9Fh72mYtS+JRH42JjpihFYaviU48B477i0rEj7F36G1dPneDQ36s5unkDDTp0pkmPftg6Sd9/UXLlO+P66KOPmDRpEqNHj8be3t64vU2bNkyfPr1AgysLsvcBVshCGEIIMzDo/ll+OPX4bXS5LT/s83D5YQdZfrgs8ajnR1Xfr7l09DB7lv5G3JlTRK1dxeFN6/Hv2JUm3ftg4+hk7jCFyLd8J8BHjx5l4cKFObZXrFiRW7duFUhQZYmFOvsIsCTAQoiioU/XkXYms3NDysk7GFIzjPcpLP61/LC1fDpVlikUCjz9/PGo34CLhw+yd+lC4s6dJnL1cqI3rqVhx2407t4HGwdHc4cqRJ7l+7eak5MTcXFxVK9e3WT7oUOHjB0hRN6Z1gDLHxkhROHRPdCSejJzElvq2QTI+Kdzg9L2X8sPa2QSmzClUCio5h+AZ4NGxERHsvePhVy/cJYDf/1J9MZ1NOzUjcbdemNt72DuUIV4onxnXAMHDuTDDz9k6dKlKBQK9Ho9e/bs4f333zcuiiHyzmQpZJkEJ4QoYBl3s5Yfvk167D3T5YedrTJLG+rJ8sMi7xQKBTUaNqG6f2MuHNzP3j8WciP2PPtXLiV6wxoade5BQNfeWNnZmTtUIR4p3wnwl19+yVtvvUXVqlXR6XT4+Pig0+l4/vnn+eyzzwojxlItewmEUibBCSEKgC4pnQcH4kk5egvttX8tP+xmm1na4FsBjassPyyenkKhoGZAIDUaNeVcZAThSxdy82IMEcuXcPDv1QR07UmjLj2xspVEWBQ/T7UU8syZMxk7dizHjh0jKSmJhg0b4uXlVRjxlXrZR4BVMgIshHgG2hvJJO2+yoOD1yHj4VCvAiyqOWa2K/Mpj9pZlh8WBUuhUODVJJhaAYGcPRBO+NKF3Lp8kfBlizj4918EdO1Fo849sbSRriGi+HjqIUcPDw88PDwKMpYyyUKllBpgIcRTMxgMpF24R9Kuq6SeumPcrqlqj11T18zlh+2kc4MofAqlktqBzfBqEsyZfXsIX7aI21cusfeP3zm47i8ad+tNw07dsLCWRFiYX74zrqFDhz72/tmzZz91MGWRRiVLIQsh8s+g05Ny5Bb3d19FezUpc6MCrHzKY9+iMhaeDlLeIMxCoVTiHdwCr8AQTofvJnzZIu5eu8LuxfOJXLuSJt374N+xKxZW1uYOVZRh+U6A7969a3Jbq9Vy7NgxEhISaNOmTYEFVlZoVIp/lkKWGmAhxBPoUzN4sC+epL1X0d3L7Ner0CixCaiEXfPKaCpIUiGKB6VSRd1mrfAObs6pPTuJ+HMRd+OusWvhXCLXrKBJj774d+iCxlLKckTRy3fGtWLFihzb9Ho9b7zxBjVr1iyQoMoSTfZJcLIQhhDiETLuppK05xoPDsRjSNMBoLTTYBfijm2gGypbjZkjFCJ3SqUKnxatqRPSkpO7txP+5yLuXY9n52+ziVy9nKY9++HXvjMaC0tzhyrKkAIZclQqlYwePZrQ0FDGjBlTEIcsM0xqgGUhDCHEv6Rfvs/9XVdIOXaLhx8Woa5kg32Lytj4V0Shln69omRQqlT4tmpLnWatOLFrKxF/LiHx5nW2z5/FgaxEuG0n1BZSsy4KX4F95n7+/HkyMjKevKMwYdoFQkoghBBg0BtIPXmH+7uukB6baNxuWcsJ+5ZVsPRykvpeUWKp1Grqt+6AT4s2HN+xhYjli7l/6ybb5v7KgVXLaNq7P/XbdEStkU81ROHJd8Y1evRok9sGg4G4uDjWrl3Lyy+/XGCBlRWmK8HJCLAQZZk+XUfywesk7b5Gxq2UzI0qBTYNXLBrXhkLd+mnKkoPlVqNX9uO+LZqw7Ftm4lYsYSk27fYOvsX9q9aRlDv/tRr3R6VWhJhUfDynQAfOnTI5LZSqcTFxYXvvvvuiR0iRE4mXSBkEpwQZZLufjpJ4dd4EBGHPjnzkzSFlRq7IFfsQtxROUhtpCi9VGoNDdp3xje0Hce2bmTfw0R486yf2LdyKUF9BuDbqh0q+RspClC+f5q2bdtWGHGUWZZqWQpZiLJKe/0B93ddJfnQDdBlLlyhcrbCvpk7No1dUVrK7wRRdqg1Gvw7dqVe6/Yc2bKe/SuXcv/WTTb9Op39K5cS2GcAPi3aSCIsCoT8FJlZ9hFgqQEWovQzGAyknU/IXLji9D9tJS087LFrURlr3woolFLfK8outYUFjTr3oH7bjhzZ9Df7Vy3j3o3rbPzlR/avWEpQ34HUbR4qg0bimeQp42rYsGGeJ1wcPHjwmQIqazTZR4ClC4QQpZYhQ0/ykZsk7bqKNu5B5kYFWPuUx65lFSw9HcwboBDFjMbCkoCuvfBr24noTes48NefJFyPY/1P37NvxRKC+g6iTrOW0kJUPJU8JcC9evUq5DDKruyT4BTyJhai1NEna0naH0/S3mvoE7MtXNG4EvbNK6MuLwtXCPE4GisrmnTvQ4P2nYnesJYDq5dzN+4af0//jn3LlxDUbxA1GgeaO0xRwuQpAR43blxhx1FmZe8DjEL6eQpRWmTcSSVp91UeRMZjSH/4KY+9BruQytgFuqK0kZntQuSHhZU1TXv2w79DFw6tX0Pk6uXcuXaFdT9+g3Plqmg8aqHLyEAj7dNEHkjRqZlpVErjUsh6hYwAC1HSpV1KJGnX1cyFKzLntaFxtcGueRVs/F1k4QohnpGFtQ2Bvfvj37EbB/9eRdTaldy5ehmuXmb+mWMEdO1J/TYdsLC2MXeoohjL929inU7Ht99+S9OmTXF1dcXZ2dnkqyDpdDrGjh1L9erVsba2pmbNmvz3v//FYDAY9zEYDHz++ee4ublhbW1Nu3btOHv2bIHGUZiyT4LTywiwECWSQW8g5dgtbvx8mJs/HSblaGbya+nlRIWh9aj4TiNsG1eS5FeIAmRpY0Nw30G8Oi2MoH7Po7Ky5v7tm2yfP4tf33yFnQvnknTntrnDFMVUvkeAJ0yYwKxZs3jvvff47LPP+PTTT4mNjWXlypV8/vnnBRrc119/zc8//8y8efPw9fUlMjKSV155BUdHR95++20ApkyZwo8//si8efOoXr06Y8eOpWPHjpw4cQIrK6sCjacwaFQKFJIAC1Ei6dN1JEdd5/7uq+hup2ZuVCmw8a+IfYvKaFxtzRugEGWAla0dTXs9xw2VJdVsrTj091/cjbvKgVXLiFqzkrotQmnSvQ/lq3iYO1RRjOQ7Af7999+ZOXMmXbt2Zfz48QwaNIiaNWvi5+dHRESEMTEtCHv37qVnz5507doVgGrVqrFo0SL2798PZI7+Tp06lc8++4yePXsCMH/+fCpVqsTKlSsZOHBggcVSWBQKRbYSCGl9JERJoEvMXLgiKSIOQ8rDhSus1dgFuWEX7I7KwcLMEQpR9ihVauq16YB/+86cj9rPgdXLuXb6BMe3b+b49s3UaNSExt37UKVuPVlKXOQ/AY6Pj6d+/foA2NnZce/ePQC6devG2LFjCzS4kJAQfv31V86cOUPt2rU5fPgwu3fv5n//+x8AMTExxMfH065dO+NjHB0dCQwMJDw8/JEJcFpaGmlpacbbiYmJAGi1WrRabYE+h9xknSPr36w2aGkZhiI5vyh4/76monT493XVxieTvDeO1CO3/lm4opwlNs3csG7ogsIi87+zevk5KLbkvVo6/fu6evoH4OkfQNzZUxxcu5LzUfu5cPAAFw4eoFKNWjTq2ouajYOkl3Axltt7tSDft/lOgKtUqUJcXBweHh7UrFmTjRs30qhRIw4cOIClZcEu1/nRRx+RmJhInTp1UKlU6HQ6vvjiC1544QUgMxkHqFSpksnjKlWqZLwvN5MnT2bChAk5tm/cuBEbm6Irmt+0aROAsQZ434FILl08V2TnFwUv65qKUsQAEUu3UemaNY73/pldnmSv5bpbKgnOd+B2HGw2Y4wi3+S9Wjrldl0Vtf3wcPUg4dRR7l84y/UL5/h72reobe1xqlMfh5q1Uaqlc0Rxlf2aJicnF9hx850A9+7dmy1bthAYGMioUaMYPHgwYWFhXLp0if/85z8FFhjAH3/8we+//87ChQvx9fUlOjqad999F3d3d15++eWnPu7HH3/M6NGjjbcTExOpWrUqHTp0wMGh8JvRa7VaNm3aRPv27dFoNJxaNAeAhk2CaORbo9DPLwrev6+pKPkMegMPDl7n5qZz2CQ//FWpAEsfZ2yauVGpqj01zRuieAryXi2d8npdk+8lcGTTOo5s/pvUpPvcitpL0ukj+LXrjF/7Ltg4OhVd0OKxcrumWZ/YF4Q8J8DTp09n8ODBfPXVV8ZtAwYMwMPDg/DwcLy8vOjevXuBBQbwwQcf8NFHHxlLGerXr8/FixeZPHkyL7/8Mq6urgBcv34dNzc34+OuX7+Ov7//I49raWmZ62i1RqMp0l+IWefLKoFApZZfyCVcUf8MicKRej6Be2suoI17gA1qFBZKbBu7YtfMXRauKCXkvVo6Pem6OlZwocWglwnqPYBjOzYTtXYl967Hs3/lUg6uXYVPqzY07tabcm6VizBq8TjZr2lBvmfznAB/+umnjBkzht69ezNs2DDatGkDQHBwMMHBwQUWUHbJyckolaadEVQqFXp9ZsJYvXp1XF1d2bJlizHhTUxMZN++fbzxxhuFElNBMxgMxhIIHVKUL4Q5ZdxO4d66GFKOZ7ZOUlipuFLxPv6DW2LpIImvEKWFxsqKhh270aB9Z87uCydy9Z/Enz/Lkc3rObJlA7UaB9GkRx/ca9c1d6iikOQ5AY6Pj2fp0qXMmTOH9u3b4+HhwdChQxkyZAhVq1YtlOC6d+/OF198gYeHB76+vhw6dIj//e9/DB06FMjsoPDuu+8yadIkvLy8jG3Q3N3dS8zyzYaHyTyANv9tmYUQBUCfmkHitssk7b6aOblNAbaBbtiEuhO5YxNKa1kzSIjSSKlU4R3cnNpBzbhy8hiRq5dz4eABzh0I59yBcNxr16Vxjz7UCghEoZS/0aVJnn+rW1tb89JLL/HSSy9x4cIF5s6dS1hYGBMmTKBdu3YMGzaMXr16Fejw9LRp0xg7dixvvvkmN27cwN3dnddee82k3/CYMWN48OABI0aMICEhgebNm7N+/foS0QMYQKfL+Od7g7y5hChKBr2BB5HxJG68iD4pc3axpZcTTl1roHG1lU4BQpQRCoWCqj71qepTn9tXLhG5ZgUnd23j2pmT/PXtF5Rzq0zjbr3xadkGtYW0OSwNnirjqlGjBhMnTiQmJoa///6b8uXLM2TIECpXLtiaGXt7e6ZOncrFixdJSUnh/PnzTJo0CYtsP3wKhYKJEycSHx9Pamoqmzdvpnbt2gUaR2Ey6HTG7zMMUgIhRFFJPZ/AjWmHSFh+Dn2SFnUFa8q/7EOFofVkAQshyrDyVTzo+Po7vDp9Nk179sPSxpa7cVfZNHM6M0cOJeLPxaTcL7jJWMI8nulzPYVCgVqtRqFQYDBID9unocueAEsNsBCFLuN2CgnrYkg11vmqcWjngV2QmyxVLIQwsivnTIvnhxDYuz9Ht24kau0q7t++yZ4/fmPfqqXUb92BgK49cazoau5QxVN4qgT48uXLzJkzh7lz53Lp0iVatmzJzJkz6du3b0HHV+rpM/4pgUjXP2ZHIcQzeVSdr0N7T1S20g1ACJE7C2sbArr2wr9jN86E7+LA6uXcvBjDofWrid6wFq+gZjTp3gfXml7mDlXkQ54T4PT0dJYvX87s2bPZunUrbm5uvPzyywwdOpQaNaR37dPSPxwB1qEkQxJgIQrck+p8hRAiL1RqNXVbtKZO81AuHT3MgdV/cvHIIc6E7+JM+C6q+tSncY8+VPdvLEstlwB5ToBdXV1JTk6mW7durF69mo4dO+ZoUSbyT/9wEpxeoSRdMmAhClT2fr4A6grWOHargZV3OfkDJYR4KgqFAk8/fzz9/LkRe4HINSs4vXcnl08c5fKJo5Sv4kHj7n2o27wVKllhrtjKcwL82Wef8eKLL+Li4lKY8ZQ5uozMEWC9QolWJwmwEAVB6nyFEEWhYrUadBn5Hs0HvsTBdas4smUDt69cYsPPU9mzeD4NO/fAr10nrGztzB2q+Jc8J8DZlw4WBcc4AowkwEI8qxx1vsqHdb7tpM5XCFF4HCq4EPrSqwT1HciRzes5+PdfJN29w66Fc9m3Ygn123SkUZeeOFSQQcTiQrq7m1lWDbBeoSRdEmAhnsoj63y71UBTSep8hRBFw8rWjqY9+xHQtScnd+8gcvVybl+5RNTalRxavxrvkJY07tabitVk7pS5SQJsZlldIPQo0WYYzByNECWP1PkKIYoblVpDvdB2+LZqS0x0JJF/LefyiaOc3LWNk7u24enXkCbd++JRv4H8njITSYDNTK+XGmAhnobU+QohijuFQkGNhk2o0bAJ8efPcmD1cs5G7OHikUNcPHIIl2o1aNKtN7WDW6BSS0pWlJ761U5PTycmJoaaNWuilov21PQyCU6IfNGnZpC49TJJe6TOVwhRcrjW9KL7ux+ScD2eg+tWcXTbRm7GXmDd9O/YtXg+AV16Ur9NByysbcwdapmQ72GS5ORkhg0bho2NDb6+vly6dAmAUaNG8dVXXxV4gKWd7uEkOB1SAyzE4xj0BpL2xxH/bSRJO6+AzoCllxOV3mlEuZ61JPkVQpQITpVcafPKa4yYMYdm/Qdj4+jE/Vs32T5/Fr++9Qq7Fs4l6e4dc4dZ6uU7Af744485fPgw27dvx8rKyri9Xbt2LFmypECDKwuyT4KTEWAhcpd6PoEbPx4iYfk59Ela1BWsKT/ElwpD68kkNyFEiWRt70BQ34EMnz6b9sNHUs6tMmkPHrB/1TJmjRzKhl9+4PaVS+YOs9TKd+3CypUrWbJkCUFBQSaF276+vpw/f75AgysLZCEMIR5N6nyFEKWd2sICv3adqN+mA+ei9hH513KunTnJsW2bOLZtEzUaNaFx9z5UqVtPJswVoHwnwDdv3qRixYo5tj948EAuzFMw1gCjRK+TLhBCgNT5CiHKHoVSiVeTYLyaBHP19EkiVy/nXGQEFw4e4MLBA7jW9KJx9754BQajVKrMHW6Jl+8EuHHjxqxdu5ZRo0YBGJPeWbNmERwcXLDRlQG67CPAUgIhyjjp5yuEEFDZuy6VvT/lzrWrRK1dwfEdW4g/f5Y1U7/CsZIrAV17US+0HRpLqycfTOQq3wnwl19+SefOnTlx4gQZGRn88MMPnDhxgr1797Jjx47CiLFUM9YAo0QrJRCiDEs9n8C91RfQxks/XyGEAHB2r0z74SNp1n8whzasIXrDWu5dj2fr7F/Yu3Qh/h260rBTN2wcHM0daomT7yK65s2bEx0dTUZGBvXr12fjxo1UrFiR8PBwAgICCiPGUs24EIZMghNlVMbtFG4tOMGtmUfRxj9AYaXGsVsNKv2nEdZ1nCX5FUKUeTaOTjTrP5gRM+bQZujrOFZyJfV+IhF/LmLmm6+wedYM7sZdNXeYJcpTNfCtWbMmM2fOLOhYyiTThTCkBliUHVLnK4QQ+aOxsqJhx240aN+Zs/vCiVz9J/Hnz3J4098c3ryeWo2DaNKjD+6165o71GIv3wmwSqUiLi4ux0S427dvU7FiRXQPP9IXeZN9EpzUAIuyQOp8hRDi2SiVKryDm1M7qBlXTh4jcvVyLhw8wLkD4Zw7EI67tw9NuvehZkBTFErpmJObfCfABkPuo5RpaWlYWFg8c0BlTfY2aFICIUo7qfMVQoiCo1AoqOpTn6o+9bl95RKRa1Zwctc2rp0+warTJyjnVpnG3Xrj07INasnRTOQ5Af7xxx+BzBd71qxZ2NnZGe/T6XTs3LmTOnXqFHyEpZxOFsIQZUDGrYf9fE/8q59vsBsKlYxOCCHEsypfxYOOr79DswEvcujvvzi86W/uxl1l08zp7Pnjt8zSiY5dsbazN3eoxUKeE+Dvv/8eyBwB/uWXX1Cp/ulBZ2FhQbVq1fjll18KPsJSLmsSnA5ZCEOUPgatnsTNF7m/W+p8hRCiKNiVc6bF80MI7N2fo1s3ErV2Ffdv32TPH7+xb9VS6rfuQEDXnjhWdDV3qGaV5wQ4JiYGgNatW7N8+XLKlStXaEGVJaZLIcskOFF6aK8/4M6i08ZyB8va5XDqWl3qfIUQoghYWNsQ0LUX/h27cSZ8FwdWL+fmxRgOrV9N9P+3d+fxcZ3l3f8/Z/bRrJJGqyVb3vcl+57YieMsEBIStiSFJKXhARIKhBYanpZA2h8U2ofS0gCFQgjQLKQEspDFxrETkjiJYzveLXmTZVvWNtLMaPblnN8foznSaLFlW9Joud6v17w0OmeWIx1p9NU9133dr/yReRdfxgUfuo2KWXMKfagFcdo1wBs2bBiN45iycjXAmowAi0lC0zQim1sIPn8ILaVicJopvnUutoXS0kwIIcaa0WRi4RWrWHD5Spp2bmfz87/jyI5t1G/6M/Wb/kzt4mVccNOt1K04b0q9Rp9RG7Rjx47x3HPP0dTURDKZzNv3/e9/f0QObKrI1QBnpAZYTAJqNEXXM/uJ7crW+lrnFVPy0XkYXTL5QgghCklRFGYsW8GMZStoazzEey/8nvq3Xufo7h0c3b2D0prpnH/TrSy8/CqMpslfonbaAXj9+vV86EMfYtasWezbt48lS5bQ2NiIpmmce+65o3GMk5oshCEmi0RjkM4n6skEE2BU8FxXh/PyaSiGqTOiIIQQE0F53SxuvP8rXP6JT7H1xWfZsf4V/MeaeOXHP+DNJ3/FOTd8iOXX3oC1aPKWrJ329OsHH3yQv/mbv2Hnzp3YbDZ+97vfcfToUa666io++tGPjsYxTmr6QhhIDbCYmLSMRuhPR2j/rx1kgglMpTbKP7cc15U1En6FEGIcc/vKWPmpv+IzP3qUK+64G0dxCeGuTv78+C/56efvZuOvf06oo73QhzkqTnsEeO/evTzxxBPZO5tMxGIxnE4nDz/8MDfffDOf+9znRvwgJzN9IQxFFsIQE086EKfzyXqSjSEAis4tx3vzbAzWM6quEkIIUQA2h5MLb/4I533gZva+8RrvPf8M/mNNbHnh92x76TkWXHol5990K2UzZhb6UEfMaf+Vcjgcet1vVVUVBw8eZPHixQB0dHSM7NFNAf0XwtA0bUoVoYuJK7qzg67f7UeLp1GsRopvmUPROeWnvqMQQohxyWgys2TlahZfdQ2H33+P9557hqN7drLnzxvY8+cNzFh2DhfcdBvTly6f8FnltAPwxRdfzBtvvMHChQu58cYb+cpXvsLOnTt55plnuPjii0fjGCc1vQ0aBjQNMqqGyTixf6jE5KYmMwT/eIjIOy0AmGtdlH5iPqZSe4GPTAghxEhQFIVZ51zArHMuoOXgfjY//wz7336TIzu2cWTHNsrqZnHBTbcy7+LLMZom5jt+p33U3//+9wmHwwB861vfIhwO89RTTzF37lzpAHEGMn0mwQEkMyomWRlLjFPJExE6n9hHui0KCriuqsF97QxZzU0IISapytlzuelLXyPQ2sLWF59l54a1tDce4sUf/it/fuIxLrjpVs65/qZCH+ZpO+0APGvWLP26w+GQ1d/OUt+FMABSaQ2kY5QYZzRNI/L2CQJ/PARpDYPLTMnH5mObKwviCCHEVOCtqOTqe/4Pl3zkdravfZFtr7xAd0c7Tfv2TMgAfNrDNrNmzcLv9w/YHggE8sKxGB69BpjeEWAhxpNMJIX/V3sIPHsQ0hq2BSVUfPFcCb9CCDEFRRUrJ2Zdzt4r/5q3q67mYNVFhT6kM3LaI8CNjY364g19JRIJjh8/PiIHNZXkRoAVoxFAegGLcSV+MEDnU/WooWS2t++NM3FeWj3hJz8IIYQYHlXV2HE8yMb6NjbUt7PjWAAt17XVNh9T0FbQ4ztTww7Azz33nH79lVdewePx6J9nMhnWr19PXV3diB7cVJCrATYYJACL8UPLqIT+1ET3xqOgganMTsntC7BUOwt9aEIIIUZZVyTJ6/vb2VjfzusN7fgj+av+Lqpys2pBGSvnl3NOrbcwB3mWhh2Ab7nlFiA7M/Cuu+7K22c2m6mrq+P//b//N6IHNxVoPQthKCYJwGJ8SHfG6XxyH8mmbgAcF1TiuWkWBouxwEcmhBBiNKiqxu7mUM8obxvvHw2g9lmby2U1cflcH6vml3PV/DIq3BNz1LevYQdgVc0Gs5kzZ7J582Z8Pt+oHdSkFm7H+IfPc0lrM3AjmZ6FMBRD9lQk07IanCic6PY2up45gJbIoNiMFN86l6JlZYU+LCGEECMsGE3x5wPtbNjXzmsN7XSEE3n7F1S6WDm/nJXzyzhvRjHmSdbt57RrgA8fPjwaxzF1GIwYDqylHEipaX0SnFFqgEUBqckMgWcPEt3SCoBlhpuST8zHVDzx/8sXQgiR7eaz50SIjfXtbKxvY2tTgEyfYV6Hxchlc3ysWlDOVfPKqPZO7t7uww7AmzZtwu/388EPflDf9qtf/YqHHnqISCTCLbfcwg9/+EOsVuuoHOikYXX3Xo+H9ElwRpMJ0hKAxdhLHg9ne/t2xLK9fVfV4r5mBoosyCKEEBNaKJ7izf0d2dDb0EZrKH+Ud265k1ULylk5r4zz60qwmCbXKO/JDDsAP/zww6xcuVIPwDt37uTTn/40d999NwsXLuRf/uVfqK6u5pvf/OZoHevkYDShmR0oqQgkgqi5SXBGI6QhmZYALMaGpmmE32wm+NJhyGgY3RaKPz4f22xvoQ9NCCHEGdA0jYbWMBvq29iwr40tR7pI9xnltZuNXDanVC9tqCkuKuDRFtawA/D777/PP/7jP+qfP/nkk1x00UX87Gc/A6C2tpaHHnpIAvBw2NyQikA8pLeUM5hMkJA+wGJsZMJJup5uIF7fBYBtUSnFt83F6DAX+MiEEEKcjnAizZsHOvTShhPBeN7+WWUOVs4rZ9WCMi6oK8FmlgnNcBoBuKuri4qKCv3z1157jRtuuEH//IILLuDo0aMje3STlc0D3SdQEsFBaoBlEpwYXfH9XdnevuEUmAx4PzgTx0VV0ttXCCEmAE3TONAWZmN9Oxvq29jc2JmXHawmA5fOLu0pbShneunUHeU9mWEH4IqKCg4fPkxtbS3JZJKtW7fyrW99S9/f3d2N2SyjR8OhWT0okF8DbMyeCqkBFqNFS6sE1x4h/PoxAEwVRZTevgBzpaPARyaEEOJkosk0bx3ws7GhjQ372jkeiOXtn1FaxKqesoaLZ5XKKO8wDDsA33jjjfzd3/0d3/3ud/nDH/5AUVERV1xxhb5/x44dzJ49e1QOctKx9UyEi/fWABvN0gVCjJ50Rwz/k/tIHQsD4Li4Cu8HZqLIi6QQQow7mqZxuCPChp6yhncOdeaVSFpMBi6eVcrKeWWsWlDOTJ8MZJyuYQfgf/zHf+TWW2/lqquuwul08thjj2GxWPT9v/jFL1izZs2IH+Dx48f52te+xksvvUQ0GmXOnDk8+uijnH/++UD2h+Shhx7iZz/7GYFAgMsuu4wf//jHzJ07d8SPZcTYsqvoKYmg3l/ZaMr1AZYALEZWZGsrgT8cREtmUOwmSj4yF/ti6eMthBDjSTyVYdMhPxv3ZZccbuqM5u2vKbazan62lvfiWaUUWU67k63oY9jfPZ/Px+uvv04wGMTpdOo1qzlPP/00TufILpPa1dXFZZddxqpVq3jppZcoKytj//79FBcX67f53ve+x3/8x3/w2GOPMXPmTP7hH/6B6667jj179mCzjc8eppq1ZxnpPiPAJpMJUKUGWIwYNZ7O9vbd1gaAZaabko8vwOSVVoVCCDEeHPFH2LCvjY0N7Ww66CfRZxDMbFS4aGYpK+dnlxyeXeaQuRoj6LT/ffB4PINuLykpOeuD6e+73/0utbW1PProo/q2mTNn6tc1TeMHP/gBf//3f8/NN98MZHsTV1RU8Ic//IFPfOITgz5uIpEgkejthRcKhQBIpVKkUqkR/zoGMDswAmo0QKZnEpzBYARU4skxOgYxonLnbLycu9SxMMGn95PpTIABHKtqcFw5Dc2gjJtjnAjG23kVZ0/O6eQ0Uc5rIpXh3cYuXtvfwWsNHTT680d5qzw2rprnY+VcHxfPKsFh7Y1p6Z4Bs6lisHM6kudX0TRt3A45Llq0iOuuu45jx47x2muvMW3aND7/+c9z7733AnDo0CFmz57Ntm3bWLFihX6/q666ihUrVvDv//7vgz7uN7/5zbwJfDmPP/44RUWjP1tyTusfWdz8FE0ll/G/m8xoaoYty+7grW4Pt8zIsKp63J4SMd5pUNFsY9pRO4qmkLBkODw3QsQ9tV44hRBivPDHYU9AYW9AYX9QIan2juIaFI3ZLo2FXo1FxRqVdpBB3qFFo1HuuOMOgsEgbrf71Hc4iXFdQHLo0CF+/OMf88ADD/D1r3+dzZs389d//ddYLBbuuusuWlpaAPLas+U+z+0bzIMPPsgDDzygfx4KhaitrWXNmjVn/Q0dDvW9Vmh+iuoSJ2jZfn01NbWwN8TsufO58apZo34MYmSlUinWrVvHtddeW7BuKJnuJKHfHSTZFATAuriEsptnMd0+rn/Nx7XxcF7FyJJzOjmNp/OaSKtsOdLFaw0dvLa/g4Ptkbz9FS4rV83zceVcH5fOLsVlk9fowQx2TnPv2I+Ecf1dV1WV888/n29/+9sAnHPOOezatYuf/OQn3HXXXWf8uFarddAlm81m85j84qQd2XIRJRFC07KnwGLNPm8GpeC/vOLMjdXPUH+xfZ10PV2PGkmjmA14b5pN0QUVUi82Qgp1XsXokXM6ORXqvB4PxNhY38bG+nbePNBBNJnR9xkNCudNL2blgjJWzS9nQaVLXptPQ99zOpLndlwH4KqqKhYtWpS3beHChfzud78DoLKyEoDW1laqqqr027S2tuaVRIw7PZPg1FgIyIZhk0n6AIvTp6VVgi8dJvxmMwDmKgclty/AXC6Nz4UQYrSkMirvNXbpobe+tTtvf5nLysp52clrl8/14bHLP1vjzbgOwJdddhn19fV52xoaGpgxYwaQnRBXWVnJ+vXr9cAbCoV45513+NznPjfWhzt8PX2A+wZgS89/NdIFQgxXqi1K5xP7SJ3Ivr3mvLQazw0zUcyGAh+ZEEJMPi3BuB543zjQQTjRO7fCoMA504tZ1dOxYVGVG4NBRnnHs3EdgL/85S9z6aWX8u1vf5uPfexjvPvuu/z0pz/lpz/9KQCKovClL32Jf/qnf2Lu3Ll6G7Tq6mpuueWWwh78SWjWngAc7/2P0dyzIIH0ARanomka0fdaCTx3EC2lYnCYKP7IPOwLSwt9aEIIMWmkMypbmwJs6Am9e0/k15+WOixc1RN4r5zrw1tkGeKRxHg0rgPwBRdcwO9//3sefPBBHn74YWbOnMkPfvAD7rzzTv02X/3qV4lEInzmM58hEAhw+eWX8/LLL4/bHsCAvhCGmugtjDfpI8ASgMXQ1Fiart/vJ7ajAwDrHC8lH5uH0S29fYUQ4my1dcd5rb6djfXtvL6/ne547yivosDyGq++5PDSaR4Z5Z3AxnUABvjgBz/IBz/4wSH3K4rCww8/zMMPPzyGR3WWekaANS37i6MYDFhMshSyOLnEkRCdT+wjE0iAQcG9ZgauK2tQ5AVYCCHOSEbVeP9oFxv2tbOxoY1dx/NHeYuLzFw5Lzt57Yq5PkqdMtgwWYz7ADwpmaykFQsq2eBiNJqwGLN1m1IDLPrTVI3ujUcJ/ekIqGAssVHyiflYp49+yz4hhJhsOsIJXm9oZ0N9O3/e304gmr+4wrIaDyt7RnmX13gxyiDDpCQBuEDSxiIyPT2ADSYjZmP2FywpI8CiDy2l0vm/DcS2twNgX1FG8S1zMEjfSCGEGJaMqrHjWIAN9e28Vt/GjuNB+i4B5rGbuWKuj1Xzy7lyXhllLhnlnQrkr2iBpExFqFp2OWaD0aSXQMgkOJGTiaTw/3oPycYQGBSKb5kjvX2FEGIYuiJJXt/fzoZ9bby+v4POSDJv/+Jqt17Lu6LWi8ko3XOmGgnABZIyFqFqAQAMxt4RYKkBFgDpjhgdv9xNuiOGYjNS+hcLsc0pLvRhCSHEuKSqGk1h+M8NB3n9gJ/3jwbyRnldVhNXzPNlSxvmlVHuHscT5cWYkABcIClDEWjZ/zgNRiMWU64GWALwVJc4EsL/2G7UaBqj14rvnsWYKxyFPiwhhBhX4qkMbx3sYO3uVv60t5WOsAk4qO9fUOli5fxyVs0v49wZxZhllFf0IQG4QFLGIgw9k+AMRpP+i5lKyyS4qSy6o53O39ZDWsNc48R312KMLuktKYQQAMFoilfrW1m3p5WN9e15Sw5bDRpXzq/gmoUVXDW/jCqPvYBHKsY7CcAFkjIVYe5pg2Y0GfUALJPgpiZN0+h+7RihlxsBsC0qpeQT8zFYjIU9MCGEKLDmQIx1e1pZu6eFdw51klZ7B4oq3TbWLK5g1Twfnfve4UMfXIHZLMsOi1OTAFwgKWMRRq3vCLDUAE9VWkYl8OxBIu+2AOC8rBrPB2ZJf18hxJSkaRoNrWHW7m5h7Z5Wdh4P5u2fX+FizeIKrl1UwdJpHhRFIZVK8WJDgQ5YTEgSgAskZSzCogdgY58+wBKApxI1nsb/+D4SDV2ggOeDs3BdNq3QhyWEEGMqo2psOdLF2t0trNvbyhF/VN+nKHD+jGLWLKrk2kUV1PlkToQ4exKACyTbBaLPCLBJFsKYatLBBP5Hd5NqiaCYDZTcvgD7otJCH5YQQoyJeCrDG/s7WLunhfV72/D3aVVmMRm4Yo6PNYuzNb0+WYFNjDAJwAWSMhaRoacLhMmI3Zyt9ey77riYvJLNYTp+uRs1lMTgNOO7ezGWGlehD0sIIUZVIJpk/d421u1p5bWGdmKp3klsHruZaxaUs2ZxBVfMLcNhlYgiRo/8dBVIus8IsNFootKT7UnojyRIplW9LZqYfGL7Oul8fB9aMoOpogjf3YsxFUtPSiHE5HSsK5qdxLa7lXcbO8n0mcRW7bGxZnElaxZVcMHMEmlVJsaMBOACyS+BMFLqsGAxGkhmVFpDcWpLigp8hGI0hN9uJvDsQdDAOsdL6Z0LMdjl11AIMXlomsa+lm7W7s52btjdHMrbv6DSpYfexdVuWd1SFIT85S2Q/gFYURQqPTaaOqOcCEoAnmw0VSP48mHCrx8HoOi8Coo/PAdFRvqFEJNAOqPy3pEuPfQe64rp+wwKnF9XwppFFaxZVMn0Uvn7JgpPAnCBpIxFZPoEYIAqPQDHTnZXMcFoqQydT9UT2+UHwH3tDFxX18qohxBiQoslM/x5fztr97Syfm8rXdGUvs9qMnDF3LLsJLYF5ZTKJDYxzkgALpCUsQg1NwmuJwdV9dQBnwjGC3VYYoRlwkn8v9pDsqkbjAolH5lH0TnlhT4sIYQ4I12RJOv3tbF2dwuv728nnupt3ektMnPNgoqeSWw+iiwSMcT4JT+dBaIaLKhKdrUaA9kXkCpvdtnGFgnAk0KqPUrHo7vJdMZR7CZ8n1yIdZa30IclhBCn5WhnlLV7Wlm7u4XNjZ30mcPGNK+dNYuzpQ0X1BVjkklsYoKQAFxAqjE74mtUegJwzwhwc0BKICa6xKEgHb/egxZLYyyx4btnMeYyqXsTQox/mqax50Sop563lb0n8iexLapyc+2i7EjvoiqZxCYmJgnABZQxZkd8DWT7IFZ5ekaAQzICPJFFt7XR+b8NkNGwTHdR+qlFGJ2WQh+WEEIMKZ1RebexU29XdjyQP4ntwpkl+kpsMklbTAYSgAsoNwJs0LKLX/SOAEsAnpA0CG88RmT9MQDsS0op+fh8lJ5FToQQYjyJJtO83pBdie3VfW0E+kxis5kNXDm3jDWLK7l6QTklDvknXkwuEoALKGO0AtqAANwRlsUwJhotrTLjoINIezb8Oq+swXN9HYpB3hoUQowf/nCiZxJbK3/e304i3TuJrbjIzOqFFaxZXMnlc3zYLfLPu5i8JAAXkKpYgThGLftfd4nDgsVkIJmWxTAmEi2VIfB4A752KyjgvXk2zourC31YQggBQJM/yto9Lazd08p7/Sax1ZbYWbMouyjFeTNkEpuYOiQAF5BqzAZgRc0GYEVRqPLYOOKXxTAmCjWZybY5OxBANWiU3LkA52JpcyaEKBxN09jdHGLt7mzo3dfSnbd/cbU7G3oXV7Cg0iWT2MSUJAG4gDI9bdCMWlLfVunOBWDpBDHeqYkMHb/cTfJwEMViYP+cAFfOKy70YQkhpqBURmXz4U69XVlzn3aaRoPCRTOzK7GtXlRBTbEMrgghAbiA9D7AakLfVt3TC1gWwxjf1Hiajkd3kzwSQrEa8X5qAeFdfy70YQkhppBIIs3rDb0rsYXiaX2f3WzkqnnZldiuXlCOt0gmsQnRlwTgAtIDcKY3AFf2TISTxTDGLzWaov0Xu0gdC6PYTJR9eglKpQ12FfrIhBCTXUc4wfq92VZlfz7QQbLPJLZSh4XVCyu4dlEFl8/1YZMONEIMSQJwAalK9tufNwIsi2GMa5lIio6f7yTVHMFQZML36aVYpjlJpVKnvrMQQpyBxo4Ia/e0sG5PK+8d6ULrM4ltekkR1y3Odm44d3oxRuk8I8SwSAAuoFwANmZ6w26lLIYxbmXCSTr+eyepligGp5myv1qKudJR6MMSQkwymqax83iwZyW2Fhpaw3n7l07zsGZRNvTOq3DKJDYhzoAE4AJSyb49Zcj0hl1ZDGN8yoSStP/3DtJtMQwuC2X3LsVcLhNJhBAjI5VReedQpz7S23ceiMmgcPGsUtYsrmD1wgp9rogQ4sxJAC6gDNl+i4Z0VN8mi2GMP+lAgo6f7SDtj2P0WPDduwyzT/4ACSHOTjiR5rX6dn0ltu4+k9iKLEZWzi9jzaJKVs0vx1NkLuCRCjH5SAAuIFXrCcCZGGgaKIoshjHOpDvjtP/3TjKdcYzFVsruXYapxFbowxJCTFBt3XHW721j7e4W3jzgJ5npncTmc1q4dlEFaxZVcsnsUpnEJsQokgBcQKqWrdsyaGlIRcHikMUwxpF0R4z2n+0kE0xgLLVRdu9STF4Jv0KI03OoPczaPa2s29PK1qb8SWx1pUVctzi7KMWKWpnEJsRYkQBcQLnlKI2KBvEgWLITqnoDsHSCKJRUW5T2/96JGkpiKrNTdu9SjG5roQ9LCDEBqKrGjuNBfSW2A235k9iW13hYszi7/PCccpnEJkQhSAAuoEwmW+9lyAVgdzUAVR5ZDKOQUi2RbPgNpzBVFFH2V0sxuqSJvBBiaMm0yqZDftb1TGJrDfW2tzQZFC6ZXcqaxZVcu7BC7/cuhCgcCcAFpPXUfhnoCcA9chPhTkgv4DGXbA7T8fOdqJE05ioHvk8vweiU8CuEGCgUT7Gxvp21u1t4rb6d7kTvJDaHxcjKBeWsWVTByvnleOwyiU2I8UQCcAGp/UeAe+gBWEaAx1TyWDftP9+FFktjrnFS9pdLMMjMayFEHy3BOOv2trJ2dwtvH/KTyvQW9Ja5rKxeWMGaRRVcOqcUq0kmsQkxXkkALqBMJgOAUVEhHtK3SwnE2Es0hej4+S60RAbLdBe+v1yCwSa/HkJMdZqmsb8tzLo92dC7/Vgwb//sMke2tGFRBStqvBhkEpsQE4L8hS+g/BHggL69UkaAx1TicJCOR3ejJTNY6tz47lmMwSq/GkJMVRlVY2tTF2t3Z+t5G/29vdoVBc6p9eqhd3aZs4BHKoQ4U/JXvoDUdHYEuH8JRG6Vn45wgkQ6I2+jjaL4gQD+x3ajpVSssz2U3rUYg0W+30JMNfFUhjf2d7B2Twvr97bhjyT1fRaTgcvn+Lh2UQXXLCyn3CWT2ISY6CQAF5CaGTwAFxeZsZoMJNIqbaGE9AIeJfGGLjp+tQfSKtZ5xfg+uRBFGs8LMWV0RZKs39fGuj0tvN7QQSyV0fe5bSauWVjBtYsquHJeGU55V0iISUV+owtIL4Ho1wUitxhGoz9KcyAmAXgUxPb68f9mL2Q0bAtKKL1zIYpZlp0WYrI72hVl4wmFx3+xmfeOBMiovZPYqj02vT/vBTNLMBvlNUGIyUoCcAENNQIM2TrgRn+UlpDUAY+02K4O/E/sg4yGfXEpJbcvQDHJHzohJiNN09jdHNJXYtt7IgQYgS4AFla5e5YfrmBxtVsWpRBiipAAXEC5EWDjIAG4uqcTRHOgsAFY0zTi8TjBYJBQKEQoFEJVVcxmMxaLBbPZnHe970eTyTTu/phEt7fT+dQ+UMG+vIySj81DkVEeISaVVEbl3cOdrOsJvcf79FQ3KDDLpfLxyxdy/ZJqeYdNiClqQgXgf/7nf+bBBx/ki1/8Ij/4wQ8AiMfjfOUrX+HJJ58kkUhw3XXX8aMf/YiKiorCHuwwqLmFMBQVEqG8fblOEC2jvBxyOp3G7/fr4bZv0M1dT6VSZ/z4Q4XjoQK01WrF4/Hg9Xrxer3Y7fYR+1ojW1vperoBNCg6p5zij85DkZZFQkwKkUSa1xraWbenlfV7WwnFexelsJuNXDnPx5pFlVwxp5hNG//EjZfMwGyWPt9CTFUTJgBv3ryZ//qv/2LZsmV527/85S/zxz/+kaeffhqPx8P999/PrbfeyptvvlmgIx2+oWqAAaq8o9sLuKWlhW3btrFz506i0egpb2+323G73bjdbkwmE6lUimQySSqVyrueTCb1/saAvv9MWa1WPQwPdrHZbMMaZY7uaO8Nv+dXUHzrXAm/Qkxwbd1x1u9tY+3uFt486CeZVvV9pQ4Lq3smsV0+14etZ4Lr2bweCSEmjwkRgMPhMHfeeSc/+9nP+Kd/+id9ezAY5Oc//zmPP/44V199NQCPPvooCxcu5O233+biiy8u1CEPy8lqgKvcI98LOBqNsmvXLrZt28aJEyf07bmQmQu4Ho9Hv567WCzDXw44k8nowbd/OD5ZcE6lUnq5RSAQIBKJkEgkaG1tpbW1ddDnGk5AThwI0PlUPWjguLAS7y1zJPwKMUEdbO9dlGLb0QBa7xw26kqL9P68504vxii/50KIIUyIAHzffffxgQ98gNWrV+cF4C1btpBKpVi9erW+bcGCBUyfPp1NmzYNGYATiQSJREL/PBTKlh+c7WjlcKVSKTRN0wOwUdHQ4kHSyWS2yzpQ5syemhPB2Fkdk6qqHD58mO3bt9PQ0KCPzhoMBubNm8fy5cuZNWsWBsPJ62BP9xiMRiNGoxGb7cz7ZaZSKT0MB4PBAdeHE5AtZgvOpAWnwYbX56WiwoBvv4rP58Plco1YjXLu+yOjS5OLnNfCU1WN7ceDrN/bzrq9bRzqiOTtXzbNzeqF5axeWM6cMof+O61m0qiZgY8n53RykvM6+Qx2Tkfy/I77APzkk0+ydetWNm/ePGBfS0sLFosFr9ebt72iooKWlpYhH/M73/kO3/rWtwZsX7t2LUVFYzQhos+whUHRUDIpXv7js6iG7EhrOAVgoiOc5LkXXuR0mxQkEgn8fj+dnZ15PzA2m43S0lJKSkowmUw0NDTQ0NAwAl/Q2LDZbNhsNioqKlBVlWQySSKRIJlMDrik02mSqSSdSpJOY5imYAesO6A/lsFgwGq16o9ps9mwWq1YrdZT/kMwlHXr1o3UlyrGETmvYyutQkNQYWenwq4uhVCq9x9Vo6Ix162xtERjSbGG19oJkU72v7eP/afxHHJOJyc5r5NP33M6nJLN4RrXAfjo0aN88YtfZN26dWc1ktjfgw8+yAMPPKB/HgqFqK2tZc2aNbjd7hF7nqGkUileeekl/XOlJ2tdf9XF4KoEst0XHn5/PYm0yjmXraS2eHjBvLW1lbVr19LU1KRvs9lsLFmyhGXLllFZWTnuOjOMhkwwQdtPdxDqDhH1QeYiD6FICL/fj9/vp6urC1VVicVixGL5Ew0VRcHr9VJaWorP58v7ONSkvFQqxbp167j22mtlYs0kIud17IRiKTY2dPCnvW28vr+DSLJ3+NZpNXHVPB/XLiznyrmluGxnfi7knE5Ocl4nn8HOae4d+5EwrgPwli1baGtr49xzz9W3ZTIZXn/9df7zP/+TV155hWQySSAQyBsFbm1tpbKycsjHzY3y9ZfrSDAWNLV3sobR5oZEJ+ZMDPo8f24xjPZwmlnlpz6uYDDIE088QSSSfYtwzpw5rFixgvnz50+pF4RMJIX/V/UYQhl8Ph9l/2cZRmd+DXMmk6Grq4v29nY6OjryLolEgq6uLrq6ujhw4EDe/RwOBz6fb8DF4XAAY/szJMaOnNfR0RyI6a3K3j7kJ91nUYoKt7WnP28lF88qxTLCvbrlnE5Ocl4nn77ndCTP7bgOwNdccw07d+7M23bPPfewYMECvva1r1FbW4vZbGb9+vXcdtttANTX19PU1MQll1xSiEMevj4B2GBzQaJz4EQ4j33Yi2EkEgkef/xxIpEI5eXl3HnnnXg8nhE/7PFOTWbwP7abdFsUo9uC79NLBoRfyNYo58JrX5qmEQ6H9TDcNyCHQiEikQiRSIQjR47k3c9kMmE2m0mlUlRUVFBeXk55eTkej+eMyymEmGw0TaO+tZu1u7Ohd+fx/Ne8eRVOPfQunebBIJPYhBCjZFwHYJfLxZIlS/K2ORwOSktL9e2f/vSneeCBBygpKcHtdvOFL3yBSy65ZNx3gNC0ngCsKBjsHggySADOln2cajEMVVV55plnaG1txeFwcMcdd0zJ8KulVfy/2UuyqRvFbsL36SWYik+vdEZRFFwuFy6Xi5kzZ+bty9VV9x8x9vv9pNNp0uk0u3btYteuXfp9zGazHob7XpxO55QoRREinVF570hXtnPDnhaOdvaWHCkKnD+jmDWLsp0b6nyOAh6pEGIqGdcBeDj+7d/+DYPBwG233Za3EMZ4lyuBMBqNYOsJq/FA3m2qvMNbDGP9+vXU19djNBr5xCc+MWBS4FSgqRqdTzeQaOhCMRvw3b0Yc8XI/jG1Wq1UV1dTXV2dtz2TydDR0cErr7zC9OnT8fv9tLW10dHRQSqV4vjx4xw/fjzvPjabbdBgPGaTMIUYRbFkhtf39y5K0RXtnYhrNRm4Ym52UYqrF5bjcw4sRxNCiNE24QLwxo0b8z632Ww88sgjPPLII4U5oDPVMwJsMJrA1hPU+o0AV+aWQz5JL+Bt27bpi37cfPPN1NbWjsLBjm+aphF84RCx7e1gUCj9i4VYZ4z+ZMYco9FISUkJHo+Hyy67TK9RymQydHZ20tbWlnfp7OwkHo/T1NSUN1kRwOl0DgjFZWVlg9asCzGe+MMJ1u9rY92eVv68v514qrfMy1tk5uoF5axZVMmV83wUWSbcnx4hxCQjr0IFkhsBNpiMYPNmN/YLwNX6csiDB+DGxkaef/55AK688soBq+RNFd2vHiX8VjMAJR+bh21+SYGPKMtoNFJWVkZZWRmLFy/Wt6dSKX2UuO8lEAgQDocJh8McOnQo77E8Ho9eW1xRUUFFRQWlpaXZdxCEKJAj/kjPohStvHekkz5z2KgptuulDRfUFWMySi28EGL8kABcIHoANpp6SyAS+e09Kj251eAGlkB0dnby1FNPoaoqixYtYuXKlaN6vONV+O0ThNZlJ6R5b5pF0YryAh/RqZnNZiorKwd0KkkkErS3tw8IxuFwWF/8o2/P5txEvlwgzgXkkVzgQ4i+NE1j5/GgHnrrW7vz9i+udrNmUSVrFlewoFJ+DoUQ45cE4EIZtAa4/whwtgSiI5wkkc5gNWVH++LxOI8//jixWIzq6mpuueWWKdlpILqjncCz2TZlrqtrcV42rcBHdHasVis1NTXU1NTkbY9Go3oYzq1619bWRjKZHHQVPLvdnheIcx9PZzlrIXKSaZV3DvtZu7uVP+1tzVue3WhQuHhWCdcurGD1ogpqhtmvXAghCk0CcIH0lkCYwNZTr9ovAHuLzFhNBhJpldZggumlRWQyGZ5++mk6OjpwuVx84hOfmJLBJr6/i86n6kEDx0WVuK+dUehDGjVFRUXU1dVRV1enb1NVlWAwmBeIW1tb8fv9xGIxGhsbaWxszHuckpKSvBKK8vJySkpKpuQ/T+LkuuMpXmtoZ+3uVjbUt9EdT+v7iixGVs4vY82iSlbNL8dTJD1XhRATjwTgAsm1QTMYhh4BVhSFaq+dwx0RTgRjTC8t4pVXXuHgwYOYzWZuv/32MVm5brxJHuvG/+u9kNGwL/XhvXnOlHur1WAwUFxcTHFxMQsWLNC3p1IpvYyibzgOh8N0dnbS2dnJvn379NubTKa8UeJcOM4t7CGmjrZQnHV7s6UNmw76SWZ6J7H5nFauXZSdxHbJ7FJsZqk9F0JMbBKAC6S3BnjoAAxQ6bb1BOA4mzdv5t133wXgwx/+8IB2XFNBqi1Kx6O70JIZrHO8lHx8Poo0y9eZzeZBW7VFIpEBo8VtbW2k02mam5tpbm7Ou32uG0Xf+mKfzycrLE0imqZxsD3MKz2LUrx/NJC3f5bPwbWLs4tSnFPrlUUphBCTigTgQtGy06WzJRBDB+BcL+DGw4fYvvNVILtC3qJFi8bmOMeRdDBBxy92oUbSmGuclH5yIcoIL486WTkcDmbNmsWsWbP0baqq6m3acuG4tbWVrq6uQbtRKIpCaWnpgNFir9c75UbgJ6qMqvH+0S59JbZDHZG8/edM9+qdG+aUOwt0lEIIMfokABfIcEeAqzw2PEqMrt3vo2gay5cv5/LLLx/LQx0XMpEUHT/fRSaQwFRmx3f3YgxW+fE9GwaDQV8Ouu8/VLluFP1HjGOxmL763e7du/XbWyyWAaPF5eXl2O32QnxZop94KsObBzpYtyc7ia0jnNT3WYwGLp1TyppFlaxeWE65+/RWThRCiIlKEkSh6F0g+o4AhwbcrLzIyDXm/SiZFLW1tdx0001TbrRNTWbwP7abdFsUo9uC7y+XYHROvYl/Y2WwbhSaptHd3Z0XiFtbW+no6CCZTHLs2DGOHTuW9zhut3tAN4rS0lJMJnnZGW3+cILXGrIrsb3W0E40mdH3uWwmrllQzrWLKrlqfhlO+UdSCDEFyStfgeQvhNETgNMxSCfA1LvqV1H4GG5DgggWbrnto1MuPGhpFf9v9pJs6kaxm/B9egmmYhmlGmuKouB2u3G73cydO1ffnslk8Pv9A0aLg8EgoVCIUCjE/v379dvnRp37T7zzeDxT7h+7kaSqGrubQ2yob+PVfW1sPxbIVVkB2XeS1iyqYM3iSi6cWYJZFqUQQkxxUytNjSN5C2FYXIACaNlRYGeZfrt4R3ZUbU+qnM3HolzvnTpdHzRVo/PpBhINXShmA757FmOukO4E44nRaNSXbF66dKm+PRaLDdq7OJFI6Nt37dql395qtQ4IxVJGcXKheIo39newYV8bGxvaae9O5O1fVOXmmoXlXLe4ksXVbvkHQwgh+pAAXCB6GzSjEQwGsLohEczWAfcE4GQyqfdyPaZ6eW77ca5fUjnUQ04qmqYRfOEQse3tYFAo/YuFWKdPnfA/0dntdmbMmMGMGb39mTVNIxgMDgjFHR0dJBIJjh49ytGjR/Mex+12DwjGPp9vyr0TAtnv34G2sD7K+15jF+k+aw87LEYun+tj1fxyVs4v11eSFEIIMdDU+ysyXuRqgHN/yG2e3gDc49ChQ2QyGRwuN8G4jfV72+iOp3DZJn8rqu5XjxJ+K9uaq+Rj87DNLynwEYmzpSgKXq8Xr9fLvHnz9O3pdFovo+jboq1vGcWBAwf02xsMhgHdKMrLyydlN4pYMsOmQx1s2NfOhvo2jnXlL4s+q8zB1fPLWbWgnAvqSrBIVxQhhBgWCcAFktcFArIBOAjEA/ptcrWTixbMZzYODrZHWLu7ldvOq2EyC799gtC6IwB4b5pF0YryAh+RGE0mk0mfLNdXPB4fMFrc2tqqd6lob2/Pu33/bhS56xOtjOJoZ1Qf5d100E8i3bsghcVk4JJZpayaX8aqBeXMKJWSICGEOBMSgAtED8CGPgEYIJHtBKFpGg0NDQDMmzePm23w/XUNPLu9eVIH4OjOdgLPZkf7XFfX4rxsWoGPSBSKzWZj+vTpTJ8+Xd+maRqhUGjAaHF7e/uQ3ShcLteA0eKysrJxU0aRTKu819iph96D7fm9ead57axaUMaq+eVcOtuH3SKrsAkhxNkaH38BpqJcDXDfEgjQSyBaWlro7u7GbDZTV1fHh4qTfH9dA28e6KC9O0GZyzrYo05o8QMBOp+sBw0cF1XivnbGqe8kphRFUfB4PHg8nrwyir7dKPqOGgeDQbq7u+nu7s4ro+i7qEf/bhQGw+iXEbSF4mysb+fVfW28caCDcCKt7zMaFM6fUcyqBeVcvaCcueXOSVfaIYQQhSYBuEAGlkD0TPDqCcC58oeZM2dmQ7DPzPJaL9uPBnhx5wnuurRurA95VKU6Yvh/sxcyGvalPrw3z5E/+mLY+naj6CtXRtF3tLi1tZV4PH7SRT369i1Op9P9n+60ZVdgC7CxZ5R3d3N+z2+f08LK+eWsml/O5XN9eOyTv85fCCEKSQJwgfR2gRh8BLhv+UPOzcur2X40wLPvH59UAViNp/E/thstnsYy3UXJx+ajGCT8irM3VBnF6S7qcejQIb10IheQy8rKTlpf3BVJ8vr+djbsa+O1hna6oil9n6LAshovq+aXcfWCcpZUezDIz7wQQowZCcCFoneB6FcDHA8SiUT0P8B9Fx344LIq/umPe9jaFKDJH2V6adGYHvJo0FQN/+P7SLfHMHoslH5yEYpZZrKL0XOqRT36jxYHAgEikQiHDx/m8OHDeY/lcrnygnHYUMT2do2NB7rY1tRFny5luGwmrpxXxtXzy7lqfhk+5+QrYxJCiIlCAnCB5C2EAXkBOFermKtJzCl327h0to83DnTw/I5m7ls1Z0yPeTQEXzqsL3RR+qnFGF2yxLEojL5lFEuWLAEglUrx/PPPc95559HZ2alPuGtrayMUCun1xQcPHsx7rJmaBa/JjrHIw4xpVVy4aCaXL5mJ3SahVwghxgMJwAUyaBs0gHho0PKHnA+tqOaNAx38YdtxPr9y9oSuk41saSX85+MAFH90HpZpzgIfkRADGY1GqqurmTFjBpqmcagjwoZ9bWzbe5wDTc04tRjFSgyvEqPYEMOupHAqSZzGJCSCpA418eahd3jzBSguLh5QSlFaWorZLDW/QggxliQAF4gegPt1gcjEghw8mh1NGiwAX7e4kr///S72t4XZ19LNwqqJuTpa4kiIrmeyE/1cV9dStKzsFPcQojBSKry+v4M/H8i2Kjvij/bZ62RGaTmLehajuGhmCWoqoY8S9x0xjkajdHV10dXVRX19vf4IiqJQUlIyIBiXlJSMm1ZtQggx2cira6H0TIIz9hsBPtptIB6PU1RUxLRpA3vgeuxmVi0o45XdrTz7fvOEDMDpQAL/r/dARsO2uBT3aml3JsaP3JLDbxzo4LX6Nt48YCT1zlZ9v9mocNHMUlYtKGfV/DJmlfV758JcNGAZaIBwODxoMI7H4/j9fvx+P3v37tVvn1vxrn8wLi4u7n3dEEIIcUYkABdIbgRYyS2E4awEYH93tkZwzpw5Q/YjvXnFNF7Z3crz25v56nXzJ9TscTWZwf+r3ajhFOZKh3R8EONCWyjOGwc6eONAB28e6KA1lOizV6HCbeXqBeWsnF/OZXN8OK2n/9LpdDpxOp3MnDlT35brSDFYME4mk4OueGc0GvH5fHnBuKysjOLi4jHpYSyEEJOBBOBC0bLTw425tzhL54DNS0M8u8pb39np/V29oByn1cTxQIytTV2cX1cy6oc7EjRNo+vpBlLNEQwOM6V3LcJglZEsMfYiiTTvHPbzxn4/bxxop6E1nLffajJw4cwSLp5ZDC17ufcj12KxjPwEzb4dKWbPnq1v1zSNYDA4IBi3t7eTSqX01m19mUwmysrK8kaLfT4fXq9XgrEQQvQjAbhABkyCMxjoqrqC9sM+FCU7AjwUm9nIdYsr+d3WYzz7fvOECcDd65uI7ewAo0LpJxdiKrYV+pDEFJHOqGw/FuTNAx28sb+DrU1dpPv0KFMUWFLt4bI5Pq6Y6+O8GcXYzEZSqRQvvrh3zCebKoqC1+vF6/Xm/TOsqiqBQGDQYJxOpzlx4gQnTpzIeyyj0UhpaSk+nw+fz5d33WqVrhRCiKlJAnCBDGiDBuy3LAHiTLfHTtpgH+DmFdX8busx/rjzBN+4aRFm4/ge4Ynu7CD0pyYAim+Zg7XOc4p7CHHmct0a3jzQwZ/3d/D2QT/difwV3WqK7Vwx18flc8q4ZHYpJY7x34LPYDBQUlJCSUkJ8+fP17erqkpnZ+eAYOz3+8lkMvq2/lwuV14gzl3cbreMGgshJjUJwAXS2wWitwSgIeIE4sxN7cuWSJxk1OnS2aX4nBY6wkneONDBqvnlQ9620JLNYbp+m5317rysGscFlQU+IjEZdYQT+gjvmwc6aA7G8/Z77GYum1PKZXN8XD7Hx4xSR4GOdOQZDAY9vC5cuFDfnhsxzi377Pf79euRSETvY9zY2Jj3eCaTaUAwLi0tpbS0VEaNhRCTggTgQtG7QGRPQTKZ5PCJLgDmpXZC5yEonT3k3U1GAx9cVs0v32rkufebx20AzoST+H+1By2lYp3rxXPjrEIfkpgkYskM7xz266O8+1q68/ZbjAbOryvWyxoWV3swTrEJl31HjPu3VYzFYnmBOHfp7OwknU4PWmcM4Ha7B5RS5EaNJ3JfciHE1CIBuED61wC3traSyWRwGZOUZfzQ9PZJAzBkF8X45VuNvLK7hVgyg90yviaUaWkV/6/3kgkkMPnslN6+AMUofyDFmcmoGjuPB3sCbztbjwRIZtS82yyqcnP53OwI7wV1JePud2I8sdvt1NTUUFNTk7c9k8nkjRr3HTmORqOEQiFCoRCHDh3Ku5/ZbB5y1Hg0JhAKIcTZkABcIP0XwohEIgC47WaUMNC0Cc6586SPcU6tl9oSO0c7Y/xpbys3La8e1WM+HZqm0fWHAySPhFBsxmzHhyJZ7UoMn6ZpHPFH+fOBDt7c38FbBzsIxfPreKd57Vw+x8dlc31cNruUUqe8PX+2cpPmSktL8+qMAaLR6IBSio6ODrq6ukilUrS0tNDS0jLgMT0ez6CT8Fwul4waCyEKQgJwofQbAc4FYIe7GMJkR4BPQVEUPrS8mkc2HOTZ95vHVQAOv9lM9L1WUKD0joWYy4oKfUhiAuiMJHmzpxfvn/d3cDwQy9vvspm4dHYpl8/xcfncMupKiyRAjaGioiKmT5/O9OnT87ZnMhm6uroGHTWOxWIEg0GCwSAHDx7Mu5/FYhm0nKKkpESWhxZCjCoJwAWiafkBOBrNLq9aVFIFzYB/P0Q6wOE76ePcvGIaj2w4yGsNbQSiSbxFhX+rMd7QRfCP2bdHPTfOwjavuMBHJMareCrD5sbO7CIU+zvY3RzK2282Kpw7vbgn8PpYOs2DaZx3PJmKcotz+HwDX68ikcigk/C6urpIJpM0NzfT3Nw84H5er3dAOYXP58PpdMo/PUKIsyYBuEC0nh6kuTZouRHgIncxlC2E9r3ZUeCFHzzp48yrcLGg0sW+lm5e2tXC7RdOP+ntR1uqPYr/8b2gQdF5FTgvHz+j0qLwVFVjd3OoZ9W1djY3dpFM59fxLqh06WUNF9aV4DiDVdfE+OFwOHA4HAOWhk6n0wNGjXOXRCJBIBAgEAhw4MCBvPtZrdYhR41NJvlZEUIMj7xaFIqa6wKRPwLscDhg+sXZAHz01AEYsqPA+17ex3PvNxc0AKvRFP7H9qDFM1hmuCn+8BwZqZniNE3jaGeMNw9mR3jfOthBVzSVd5tKt02fuHbpnFLKXbJAylTQd+W6vjRNG3LUOBAIkEgkOH78OMePH8+7X27xkP49jUtLS3E4HPJaJITIIwG4QHpLILKnQC+BKCoC5yWw5dFh1QED3LS8iu++vI+3D/tpCcap9Ix9gNAyGv4n9pHuiGH0WCn9i4UoJnmreqrJqBr7WkK819jFu42dvNfYSWsokXcbp9XExbNKuWKuj8vm+JhdJuFE9FIUBafTidPppK6uLm9fKpWis7NzwCQ8v99PIpGgq6uLrq4u9u/fn3c/m82mB+Li4mK9y0VZWZmMGgsxRclvfoH0XwhDL4EoKoLyi7M3an4fklGwnHwCWU1xERfUFbO5sYsXdjTzV1eMfa/d4IuHSOwPoJgNlN61CKOr8LXIYvTFUxm2Hw3w3pEu3j3cydYjXQNWXDMbFVbUerl8ThmXzy1leY1X6njFGTGbzVRUVFBRUZG3XdM0wuHwoJPwAoEA8XicY8eOcezYMf0+//Vf/4WiKJSUlAzavs3hmDwLpQghBpIAXCjq4CPADocDvNPAVQXdJ6B5K9RdfsqH+9CKaWxu7OLZ98c+AEc2txB+MzuJpfhj87FUO8f0+cXYCUSTbDmSG93tYuex4IBevE6riXNnFHNhXTHn15WwvMYr/XjFqFIUBZfLhcvlYubMmXn7UqkUfr9fD8RtbW0cOnSIdDqdt6+hoSHvfna7fdBJeMXFxXrpmhBi4pIAXCBavxrgvBFgRcnWAe/+fbYf8DAC8AeWVvGt53az83iQg+1hZpeNTQhNNAbp+kN2kop79XSKlp68a4WYWI4HYrzX2Mm7h7OBt761e8BtylxWLqwr4YKewLuwyj3lVlwT45fZbKayspLKyuwS7KlUihdffJEbbriBeDw+YAKe3+8nGAwSi8U4evQoR48ezXu83Op6g03Es9vthfgShRBnQAJwgeg1wCYTyWSSdDr7trH+ttv0S3oC8PDqgL1FJpbPbWd393ruePl7+BxOXBaXfnFb3Hmf993ms/mY5pqGQTm9t6XTXXH8v94LGQ37Uh+uqwvbgUKcHVXV2N8WZnNjJ5t7Rnj79+EFmFXm4IIZJVwwMxt6p5dIL14x8SiKgtvtxu12M2tW/rtmyWRSHzHuW2/s9/tJpVL65/0VFRXpHSmKi4vzLjIRT4jxRQJwgfRdCjlX/mA0GnuXDJ3eUwd89F1QM2AY/C23Y93HePbgszx34DmaDc2YPRBVoam787SOx26yM694HgtKFugf53jnUGQevP5YTWTw/2oPaiSFucpB8Ufnocio34SSSGfYdTzIu4e7eK+xk/eOdBGM5XdoMBoUllS7Ob+uhAvqSji/rhifrLYmJjmLxUJVVRVVVVV521VVJRQKDToJLxQKEY1GaWpqoqmpacBjms1miouL8Xq9A8Kx1+uV5aKFGGMSgAulTwDuW/6gjxCULwaLCxIhaNsDlUv1u0ZTUdYdWcezB59lc8tmfbvT7CTYsZhEYBnf/vAKKr0aoWSI7mR37yWV/dh3e1u0jVg6xvb27Wxv364/noLCDPcM5pfMzwvGPquPrt/WkzoRweA0Z5c5lhrPcS8UT7H1SFfPCG8X248GSPTrwWs3Gzl3hpcLegLvilqv9OEVoofBYMDr9eL1epk9e3bevkQioQfjXDeKrq4uAoEAwWCQVCpFW1sbbW1tgz620+nMC8R9A7LL5cJgkImjQowk+ctWIH3boEVD2brKvFnHRhPUXgAHX4Wmt9EqlrCtbRt/OPAHXml8hWg6O2qsoHBx1cXcMucWrp5+NX/79F6eb27mhy+m+fWnL2Jm7alnMqfVNE2hJvZ17qO+q576znrqu+rpiHXQGGqkMdTIK42v6Lf/TNfH+HDLSlSDRuQmOxVuWbJ0PGoNxbNh93A28O5rCdGz/oqu1GHh/LpiPfAuqnZjlg4NQpw2q9VKdXU11dUDF/9Jp9MEg8G8YNz3kkgkCIfDhMPhATXHkH13sH8o7vu5zSa9s4U4XeM+AH/nO9/hmWeeYd++fdjtdi699FK++93vMn/+fP028Xicr3zlKzz55JMkEgmuu+46fvSjHw1olTOeDFYCUVTUr9xg+iVkDr7Krw8+w9PH/kBTd+/batNd07l5zs3cNOsmqpy9b9P93Q0L2HksQKM/ykd/8ha/vOdClkzznPRYTAYTs7yzmOWdxY3cqG/viHXQ0NnAvq597OvcR0NnA9OOeflwy0oAflDxa9a9/zaO3Q6Wly3nnPJzOLf8XJaWLcVukskgY0nTNA62R/T63c2NnRztHFi/O6O0iPNnlHDhzOyEtVk+qUsUYrSZTCZKS0spLS0ddH8sFhsyHAeDQTKZjN6tYjB2u33Qsori4mI8Ho90rRBiEOM+AL/22mvcd999XHDBBaTTab7+9a+zZs0a9uzZo4+YfvnLX+aPf/wjTz/9NB6Ph/vvv59bb72VN998s8BHfxK5LhAmk14CMaDv5PSLedTj5t+TxyAJRaYirqu7jpvn3My55ecOGlymee08/dlLufvRd9ndHOITP32bn37qPC6dffrdGXx2H75pPi6ddikAqZYIrY+8D6gcnt9JrNaEs81JOBXmrea3eKv5LQBMiolFpYs4p/wczqk4h3PKz6HEVnLazy+Glsqo7G4O9YzuZut3OyPJvNsYFFhY5dZHd8+vK6bCLSNFQow3drsdu90+6OhxJpOhu7t7yIAcjUaJxWLEYjGam5sH3F9RFDwez4CAnLvY7Xb5J1hMSeM+AL/88st5n//yl7+kvLycLVu2cOWVVxIMBvn5z3/O448/ztVXXw3Ao48+ysKFC3n77be5+OKLC3HYJ6WqGf36yUaAW4tr+anXDcD9Cz7JJ8+9f8hJaX2Vuaw8+ZmL+cyvtrDpkJ+7f7GZ/7h9BdcvqTrlfYc85nga/2/2QkrFOtfL5Z+6nCuMN5NRMxwIHGBr21a2tW5jS9sW2qJt7OjYwY6OHTy25zEA6tx1nFtxLueWZy81rhp50T0N4USa95sC+upq25oCxFKZvNtYTQZW1Hq5cGYJ59eVcO50Ly6blKcIMZHlyh+8Xu+AHseAvgJeIBAYNCBnMhkCgQCBQIDDhw8PuL/FYhkyHHs8HsxmeQ0Rk9O4D8D9BYNBAEpKsiOKW7ZsIZVKsXr1av02CxYsYPr06WzatGnQAJxIJEgkepdnDYVCQLY/ZCqVGnD7kZaIx/XrGVWjuztbA2yz2fKe//vbf0LMYGB5PMFfmmsA87CPz2aEn/3FCh74352s3dPG5/9nK9+6aRGfuKDmtI9X0zSCv91PuiOGwWPBfdts0moaeuZPzXLNYpZrFh+Z/RE0TeNE9ATvt73P++3Zy4HgAb2W+Jn9zwDgs/lYUbaCFWUrOKf8HOZ652IyTLgfR13uvIzEz088lWFvSze7jofYeTzIzuMhDnZE0PrV73rsJs6bXsz5dV7On17M4mo3ln7LT4/Fz/NkNpLnVYwPk+2cGgyGIcsrcivk5cJxLgjnPg+HwySTSVpbW2ltbR308V0ul15OkQviuYvT6Rw3AxmT7byKwc/pSJ5fRdP6/1kdv1RV5UMf+hCBQIA33ngDgMcff5x77rknL9ACXHjhhaxatYrvfve7Ax7nm9/8Jt/61rcGbH/88ccH1uGOAjWV5NDT2ZHRWR+7h8amJoLBILW1tfh82VKFpnQTPw3/FEWDJ5pbKHJdxo7p95z+c2nw9CEDb7Vlg9EHajNcO03jdF6zyptt1B4pQlU06heHiLoyp75TH1E1SlOmiSPpIxxJH+F45jgZ8h/DgoVaUy0zTDOoM9ZRY6rBokz+tkBpFU5EoSmicDSs0BRWOBEFlYEnqNiiMcutMdutMculUWHPljkIIcSZUFWVZDJJIpEY9KOqqie9v6IoWK1WLBbLgI8Wi0Vqj8WIi0aj3HHHHQSDQdxu91k91oQacrvvvvvYtWuXHn7P1IMPPsgDDzygfx4KhaitrWXNmjVn/Q0dju6uLj0A3/iBD/Dr3/yGYDDIhRdeyIIFC1A1lU++8kkAPlR2Posbn0ELvUvNpf8PvKe/2MQHNI0frD/Ij147xB+PGimdNp3/e/18DMNIT8nGEF3v7AHA84GZrLyo8rSfv79EJsEe/x62tW/TR4nDqTAH0wc5mD4IZCfmLSxeqE+sW1G2Ardl9M/NmUqlUqxbt45rr712yLcM0xmVg+0RdhwPsas5yK7jIfa2dJPKDPwftNRhYek0N0unuVkyzcPSajdlLum/O9aGc17FxCLndHg0TSMajQ4YNc51swiFQmiaRjweJ97nXc2+HA7HgFHj3EjySLd2k/M6+Qx2TnPv2I+ECROA77//fl544QVef/11amp638avrKwkmUwSCATwer369tbWVn3py/6sVitW68AwYTabx+QXp+8ysRarVa8BdrlcmM1mntn/DHs79+I0O/nSyu9BawtK01uYX/kq3Pm/nNbwbY+v3rCQMreNbz2/h1+93UQgluZfP7p8wFvmfWW6kwR/ewBUsK8ow33ZyNTtms1mLpx2IRdOuxAAVVM5EDig1xBvbd1Ka7SVnf6d7PTv5Fd7f4WCwpziOZxbfi7nVZzHueXnUuEYf10+cj9DqqpxqCPCzuMBth8NsvN4kN3NQeKpgSMqHruZZTUeltV4WDrNy7IaD1Ue27h5a1GM3WuDGDtyTk/NYrHk/V3tK5PJDNnaLRAIEIvFiEQiRCIRjh8/PuD+uZ7KQ7V2O9NlpeW8Tj59z+lInttxH4A1TeMLX/gCv//979m4ceOASQDnnXceZrOZ9evXc9tttwFQX19PU1MTl1xySSEO+ZQyPcseG4xGFEXRA7DD4aA72c2/b/13AD67/LP4HGXwof+AH18KB/4EO/8Xln30jJ73nstmUuKw8JXfbue57c0EYil+8hfnUmQZ+GOgZTQ6n9iH2p3EVF5E8a1zRy2QGRQD84rnMa94Hh9f8HE0TaM50syW1mwY3tK6hcZQI/u79rO/az9P1T8FwDTnNM6rOE8PxDPcMwoSGtMZlQNtYbZ2KOx4uZ7dJ7L1u+FEesBtnVYTS6a5WVaTDbrLpnmpLZFZ2EKIicVoNFJSUqLPx+kvFosNOTEvEAigqiqdnZ10dg6+aqnNZht0Yp7X68Xj8WAyjfv4MvmoGYgH+1wC2Y+u6uy6BRPMuP8Juu+++3j88cd59tlncblctLS0AODxeLDb7Xg8Hj796U/zwAMPUFJSgtvt5gtf+AKXXHLJuOwAAfk9gDOZjP72UVFRET/a/iM6453Uueu4Y8Ed2Tv45sJVX4VX/wle/hrMvhocg/eTPJWbV0zDYzfzud9s5fWGdu742Ts8evcFFDvy621D6xpJHAqiWIyU/sXCMV3pTVEUpjmnMc05jQ/N/hAA/pifbW3b2NK6hS2tW6jvqud4+DjHw8d57uBzAJTaSvVOE+dVnMe84nkYh1hC+kyoqsbRrij1Ld3sbwtT39JNQ2s3h9ojJDMqYIT9R/Tb28wGllR7WNozurusxsvMUsewSk+EEGIiy7V267+cNGRrj0/W2i0SiRCPxzlx4gQnTpwYcH9FUXC73XnB2O126xP+iouLJSAPRtMgFYVYID/AxoN9tvXZ3n9bYojyg+V3SAAeDT/+8Y8BWLlyZd72Rx99lLvvvhuAf/u3f8NgMHDbbbflLYQxXqmZ3AiwSR/9VRSFE8kTPL73cQC+esFXMRv7DPVf+kXY9Xto2w2vfB1u/a8zfv6V88t5/N6LuOeXm3n/aIBbfvQmn7qkjg8uq6LCbSO2x0/3xmMAFH9kLuby0Z8YeCql9lJWz1jN6hnZbh/hZJjt7dv1QLyrYxf+uJ91R9ax7sg6ILs09PLy5ZxXfh7nVpzLEt8SrMZT19FqmsaJYJz61m4aWrppaA3T0NrNgbbwgNZjOUUWI2WWNJcvms7y6cUsq/Ewp8yJSVZVE0KIPAaDAY/Hg8fjoa6ubsD+XFnjUAE5t7JeMBiksbEx77779+8Hsu+out3uk14mZKlEJtUvnHYNEmIDQ29TB74zedrMRWDzgs0Ddi+UzDr7xyyAcR+Ah9Okwmaz8cgjj/DII4+MwRGdvUwmG6L69gC22+3865Z/Ja2lubLmSq6ouSL/TiZLthTiv1fDjiezZRBzVvd/6GE7Z3ox//vZS/jkz9/liD/KP76wh3/64x5urCnmb1tUTIDzsmqKlpWd8XOMJqfFyWXTLuOyaZcB2Yl1uzt2s7VtK++1vsf7bdmJdW8ef5M3j2cXRLEYLCz2LWZ52XKWly1nmW8ZqG72t/aO5ja0drO/NUz3IOULABaTgTllTuZXuphX4WJehZN5FS7KHSZefvklbrxx4cR8URVCiHHCYrFQXl5OeXn5gH251m79ex93dnbS0tJCJpMhk8no9ceDjSDn2O32U4bkweYLnRVVhWT38EZcBwuxqejZH4PB1BtgcyE2d71vsNW3Ffdus7qzeWQSGPcBeDJS070jwLlV4Ew2E28efxOTwcTfnv+3g9+x5ny46LPwzo/hhS/D598Gi2Pw2w7DnHIXL33xCp59v5nntjez80gXtx1NYsLILjI83d7BjVusrFlcMe4XVLAardnyh4pz+aulf0VGzdDQ1cDWtq28c2IzW1u3Ekx2sa1tG9vatun3U5NeMrEZZGLTycSmo8arABMmg8JMn4N5lS7m9wm600uKBh3Vld6TQggx+hRFweVy4XK5mD69tytSKpXixRdf5IYbbiCVShEKhU56SaVS+gp6Q/VAhuyk+b6B2OVy4XbYcdsMuM0abnMKuxpFSfQNq4Ghg20iBNrJ28sNi9U9RIgdxjZz0RlNpp9sJAAXgJobATb1jgDTky8Xly6mzlM39J2v/nvY9wIEmmDDt+G6/++sjsVbZOGuS+u469I6jj2+B3b4CRngH9Qo7Q0R1jd0YPm9gavnl3PT8mquXlCOfQzrgU9G0zRC8TQtwTgngjFagnFaQvGez+M9n5cRjF0LrEaxdGC0H8FoP4rRfgSDtRWDJYDBEsDs2Q6ASbEwx7OAC6rO4dyKFSwvq6WsaHyOggshhMinKAoOhwOHwzFo/TGAlk4RD3UQ8jcT8rcTCnTSHQoSCkcIReKEYmlCCY14RiGRSNDe3k57e/uQz2kihZswbrp7Pva/HqaIKHlDJybbSUZcvScPsTYPjOD8lqlKAnAB9NYA9wbgtCm7rdo5cC34PFYnfPDf4H8+Am//CJbcCtPOO+tjimxugR1+UGDWXy7hSY+ZF7af4LntxznYHuHl3S28vLsFh8XI4moPRVYjRRYjdrOJIosx+3nPdbvFiMPau6/v9extTdjNxrx2cAO+R6pGZzTZJ8zGaAn1CbY9YTeaHN6iHEUWE5WeOmaULNJHdWtLjUSVw+zr2sn29u3s6NhBMBFkX2AH+wI7+PXebK/makd1tmyiPFs2saBkQX59thBCiLGVTmZHWGNdEO1ECbdT6/8zhnePQip80npYJdmNHbADJ2ummcBMN05COAnh6vnY97qLKHbSmOmkmE6Kh3wsg0HB7SjC7Xbh9hTj9hYPKLdwOp0j2htZnJwE4ALIjQAb+5RAxA3ZThDTnNNO/QBzr4WlH4WdT8Nzfw2f2QhnEciSzWG6ns0uQOFeMwPbnGJmA19cPZe/vmYOe0908/yOZp7f3syxrhjvNg7etuZ0WU2GnlDcG46NBoW27gStofigC0QMxltkptJto9Jjo8pjo9Jtp8pjoyL3uceGy2oaotVYFVdNvxTIjig3hhqzYbh9B9vbt3MgcIDmSDPNkWZeanwpe9xGK4tKF+m1xMvLluM1e0fkeyKEEFNKJpUNprEuiHX2fMyG2sG39dw22Z33MCbgXICm03hui/MkI64erDYvVpsH32Cjs1YXKAqpVIru7u6TlluEw2FUVSPQHSHQHYHjLYMejqIoOJ3Ok9Yku1wu6XAxQuS7WABqeuAkuG6yv8ynHAHOuf6f4cB6aN0Fb/0Qrnjg1PcZ7Fhiafy/2QtpFduCElxX1ebtVxSFRdVuFlW7+ep189lxLMixrhjRZJpYKkMkkSGWTBNNZogke69n9/W93rMvlSE3rzGRVkmkVbqig9fPKgr4nNZsmHX3htnez+1Uum0jVpKhKAozPTOZ6ZnJLXNuASCSirCzYyfb27bnjRL3ryWuclThS/rw7/WztHwpC0sW4rQ4R+S4hBBi3Muk80Zk9dB60lAbGLq11rAo2TBaVIJq9dAeTlNWOxtDUckpSgm8YHOf1cBRjtlsPmk/ZMhOfO/u7j5pUO7u7tbbw3V3dw+6eEjOqTpcuFwuLJbJMVFtNEkALoBMTwmEYjTqI8ABNQDANMcwRoABHD64/jvw+/8DG/8ZFt0MpbNP6zg0TaPz6QYynXGMxVZKPjYP5SRlCYqisLzWy/Ja72k9T//nTKTVfuE4TSyZIZrMkMqolLmsVHpslLtsJ12pbiw4zA4urrqYi6su1o9/sFHiE5ETnOAEO7ft1O9b565jYelCFpcuZlHpIgnFQojxT82cYkR2iFHZRPDsntfmAXsJ2Iuzl6Lc9cG29Vz61MJmUinefvFFbrzxRgzjrBOP0WjUl4IeiqqqRCKRU07eO5sOFy6XK+9zm802Cl/txCEBuAA0NVcC0TsC3JZpA05jBBhg2cdhx1Nw8NVsKcRdz8Np1A+F32gmvscPRoXSOxdiKBr9Fw1FUbCZjdjMRs5sKY/CGmqUeFvLNn731u9Qy1T2du7lROQEjaFGGkONvHT4Jf3+de46FpUuYlHpIhaXLmZh6UIc5jPv5CGEEIPKrdo1rBHZPtvjZxlkrR4o6hNShxNqZVIXBoNB724xbdrgA2GaphGNRkesw4XFYjllGzi7ffKuVCoBuAB6SyB6F8IIEwagyjn4rNVBKUp2QtyPLoEjb8C2X8N5dw3rrommEMGXDgPgvWkWlhrXaXwFoi+H2cFFlRfht/m58YobMZvNdMY72evfy27/bvb497DHvycvFL94+EUAFBRmuGew2LeYRSXZYCyhWAihU9Xe0oJYoDeonirUxoPA8OZRDMrqzh9tzRt9HSLU2jxglFgxWobV4ULTSCQSpwzJ8XicZDJJR0cHHR0dQz6nyWQ6ZUguKiqakJP35Ce1APouhJErgUgYEpTZy4a1Ulme4jpY9X9h7f+Fdf8A864DV+VJ76JGU3Q+vg9UDfsyH46LTiN0i2EpsZXkLdQB0Bnv1MPw7o7d7OncQ0ukRQ/Ffzz0RyAbius8dfooca58oshc+BX5hBBnSFWzZQJ6UO0aGGAHDbUBzirIWlw9QfVk4bXfdrt3ROpjxdhTFAWbzYbNZht0IZGcZDJ5ypAcjUZJp9N0dnbS2Tn05Pf58+dz++23j8aXM6okABeAqtcA944AJ4wJZjhnnNkDXvRZ2PW/0LwNXvoqfOxXQ95Ur/sNJDCV2ii+de6kfXtjvCmxlXD5tMu5fNrl+jZ/zK+H4j3+Pez276Y12srh4GEOBw/nheLp7unM8c5htnc2c71zme2dTZ27TlqyCTGWNA2SkWxAjXYODK/9J3rpI7KBs1sAweLMH5Edbp2svD6IQVgsFnw+Hz6fb8jbDNbhYrDPXa6J+Q6yBOACyLVB04xGtFT2P/ukMTn8CXD9GU3woR/Cf10Fe56FvS/Awg8OetPwG8eJ7+0Ek0LJHQsx2ORHoJBK7aVcUXNF3tLXuVDct3yiNdrKkdARjoSOsL5pvX5bk2JihnsGs72zmeOdw5zibECe7pqOySDnVoiTynUuyAXZIT925X+eSZ75c5od/UZkh1Enay+eNMvPioljuB0u0j2r20408heyAHIjwFquZsYEqqKe3gS4/iqXwmV/DW/8G7z4N1B3WfZFs49s3W8jAN4PzsYyTToSjEeDheKOWAcHAgc40HUg+zFwgIOBg4RTYQ4GD3IweJC1R9bqtzcbzMz0zMwbLZ7jncM05zSMU3yyiZiEBhuVzRuR7d1ujPhZ7T+Oac/9Z9eCy2jJhtSikp6PfUJrbttgo7Km0yxzE2IcMxqNGI0T82+KBOACyE2CyxiMQIaMKfv5WQVggKu+lh0B7jwEP74s2yZt4YdAUQap+z15nbAYX3x2Hz67T2/HBtlyltZoqx6G93ft52AgG4Zj6RgNXQ00dDXwEr1dKGxGGzM9M5lb3BuK53jnUOWoklIYMT5k0r0lBP1HYvMC7ZmNyhqAAVNMbZ7eUVc90Pb9WDxwu8WRnYgshJiQJAAXgD4CrBiAzOmtAncyZjt85FF4+i7oaoTffgrmrEa7/rt0vpCQut9JRlEUKh2VVDoq8+qKVU2lOdycDcWBnlDcc4ln4uzt3Mvezr15j1VkKtIDcd9R4/KicvlZEWfmNEZl80Lt2fSTHWpUtk94TVvcbHq/nouv/gBmd3l2UQTpXCDElCO/9QWQqwFOkw0WESXbCeKsR4ABqlfA59/OlkK88W9w4E+E/+M7xJN3Z+t+75S638nOoBiocdVQ46rhqtqr9O0ZNcOx8DG9jCIXkBtDjUTTUXZ27GRnx868x3KZXXpdcW60eLZ3NqW2UgnGU8nJRmWHCrexrrOrlc0tjDDUiOwZjspqqRSdBzTwzYVxtmCCEGLsSBIqgFwAzvQE4KiS7QRR5RihdmRmO6z6Oiz9GIn//VeCjR8HwOt4Aks0AVwzMs8jJhSjwcgM9wxmuGdwzYzen4GUmqIp1KSH4lyNcVOoie5U94BlnwGKrcXM9s4eUGPstXnH+KsSp0XTIBk+xYhs/6B7tqOy1vya2EFGZQd8lFFZIcQok1eYAugNwFkJYwKf3YfNNLLLEmbsM+js+ksggd3yDo74b+A3v4FFt2Trg90jMOIsJjyzwayH2b6SmSSHg4fzQvGBwAGOdR+jK9HFe63v8V7re3n38dl9A0LxbO9sXJaJ2SZn3NI0SEX79Iwd4jJYwFVTZ/68pxqVHWy7uUhqZYUQ444E4ALI1QDnGockDcmRKX/oQ1M1up5uIBNMYPLZKf6rT6O83Qnv/AT2/AEO/AlWPggX/R/pEykGZTFamF8yn/kl8/O2x9IxDgcP94biruzIcXOkmY5YBx2xDt458U7efXx2HzXObFlGras2W6LhzF732X1Tt5xC0yDRfYogG+j3+QiUF/QdlT1ZeO3b1UBGZYUQk4i8mhVAbiW4dE9P9IQxwWzH7JPc4/SF/3yc+L5cv98FGLxOuP7bsOJ2eOEBOPZudvW49x+HG74LdZfLKI0YFrvJzqLS7LLNfUVSEX2yXW7y3YGuA7TF2vRg/H77+wMez2a0Mc05bdBwXO2sHvF3RkaFmskuPTtoYD3FRcuc8uGHZDAPXByh72peMiorhBCDkgBcALkSiJSaTcAJY2JER4ATR0IEXzkMgPem2Viq+/T7rVwKf/kKvP8bWPcNaNsNj30Q3DUw/3qYdwPMvEJ6VYrT5jA7WFa2jGVly/K2h5IhjoaOcjR8lGPdx3ov4WOciJwgnonrvYwHU24v1yf19Q3HNa6akZ2MlysriAf1ixLuoKbzTQzvHoVkaOgSg3iQs1qu1mQbuBTtkMG2z0VacQkhxBmRAFwAas+qKaMRgDORFJ2P7wUV7MvLcFw4SL9fgwHO/RTM/wC8+jBsfwpCx2Dzf2cvFifMXgXzb4S5a8Ax9FKJQpyK2+JmsW8xi32LB+xLZVKciJzQA/HR7qN51yOpCG2xNtpibWxt2zrg/naTXQ/FNa4aaouqqbF6qDG5mGawYklGsiOyeqgN5AXc/H3BAfWxJuA8gCPD/GL15Wq9wwuwuYvZflrfUyGEEGdHAnAB6CPAmWwAThqSZ98DmJ6639/Wkwkms3W/t845+eiYoxRu+ne4/p/h8OtQ/yLUvwzhFtj7fPaiGKDmQph/QzYQ++bKiJMYMWajmenu6Ux3T89uyJUSxANosQCBcDPHgkc41n2Mo9EWjsX9HEsFOJqO0KoliaVj7O/az/6u/QMeW9E0KjIZalJpatJpans+5q57VZVBf5IVYzbA2jyoVjcd4TS+2rkYHKUnD7E2ryxXK4QQE4QE4AJQM2k0xYCqZd8yHakR4O6NR4nXd4HJkK37tQ7z9JrtMO+67OUDKpx4Hxpezgbilp1w9O3s5U8PQcksmH4pFM8A74yej9PBWZkdWRZTl5rJTuhKhiERzn7MXU+EBo629h2Nze1LdusPpwDFPZelgzxdEmg2mThqNnHMZOJYz8fs52ZiBoUWk4kWk4n3Brm/w2ChxlZKTVEFtc5aajwzqPHOprZ4LlXOasxGM5lUik0vvsiNN96IQXrGCiHEpCEBuADUTAbNlP3WZ5QMaSV91j2A4we6CK3Lvk9bfEu/ut/TYTDAtHOzl1Vfh8DRnjD8EjT+ObvMcuehgfczWsFbmw3DfYOxty57vahURo7HG1WFVKQ3rPYPr3mfR7LhNNEn1Pb/PB0buWMzO7Itt3KXnhHZ7CV73WLzUGfzUNd/n9WFphjojHdmSyrC2ZrjvuUVbdE2ImqS+ugJ6qMnoOP9vKc3KAYqiyqpdlaTiWQ4vP0wVc4qyovKKXeUU1FUQYmtBIMi//QJIcREJAG4ANRMBq2nnVDCkMBXdHY9gDPBBJ1P1IMGRedX4Dh/kLrfM+WthQvvzV4S3XBoI7Ttha4jEOi5BI9DJgH+A9nLYMyObCDOjRz3v273jtwxT1a5SVr9R1dPGV77BNZkJP/+o8FgytbCWl09H51gdQ8ZZPXruX1W91mXEihAqb2UUnspK8pXDNgfT8dpDjfn1x33hONj3ceIZ+I0R5ppjjQDsG33tgGPYVJMlBWVZUNxUTYU971eUVRBuaMcq1EmlAohxHgjAbgA+gbgpDFJtePMyx+0jIr/8X2okRTmSgfFN49sO7U8VhcsvCl76SuThtDxbBjWg3FT7/XuE9mRxva92ctgbJ5sGPbUZGe2m6zZmfEmW5/rw/04yDaDaegRaE3Lvn2vqdmWVHnX1SG2Z7L3y11PJfBEG1GOvQtaCtIJSMchFc9+TCeyI6TD3t5nf6pnfyqSff6RphjA4soG1VxgtTgG2XaSzy2O3sBrso770X6bycYs7yxmeWcN2KdpGh2xDo6Fj9HY1cjr779OcW0xHfEOWqOttEXb8Mf8pLU0JyInOBE5cdLn8lg9ejCuLKrUr/cNzV6rd+r2QhZCiAKQAFwAaiaNZsrWEyYMZ1f/G3y5keSREIrVSOlfLEQxG0fqMIfPaMqO5hbPgJmD7E/FIXgMAo35wbirJyhHO7L1ny07spfRoBiyQVgx9ITXnjCrZjir9lU9zMBKgPqzfqjhsfQNov1GW08VUPvfx2wf94F1LCmKQllRGWVFZSwpXoJhn4EbL7gRc58a4JSawh/z0xptpTWSDcVt0TY9IOeuJzIJgokgwURw0Il6ORaDZfCRZEeF/nmZvQyzLFojhBAjQgJwAWT6lkAYE8xzzjujx4nt6iD85+MAlHx0HibfOG2lZLaBb072MphEOBuEA03ZkeTcaOjZfswkep9DU7PlA2dKMfRcjGAw9rme3a4pRuLJFDaHB8Vsz46C5j7qo9K27PdCH5nus99sy7/dYPe3OHoCa5FMOCwws8FMpaOSSkcllA1+G03TCCVDA0Jx38DcFm2jK9FFUk1myy/Cx076vCW2kt7yikFGkiscFTjNThlNFkKIU5AAXABqOj8An0kLtHRHjM6nGwBwXjEN+5IJ3KvX6oSKRdnLSFLV7HKxfUsNNC0bXg3GbIDVrxtOvv0UgSKdSrG2p1uAWboFCLIjyR6rB4/Vw7ziof/JTWaSg44e69cjrbTF2kiraTrjnXTGO9nbOUQpEdneyLlAXGIrwWv14rV5sx+tXoqtxXhsHv263WSXwCyEmHIkABdAtgSipwbYkDztEggtlcH/P3vREhksM9x4rq8bhaOcBAwGMPSMrgoxTlmMFn2Vu6GomkpXvCt/JDmaP5LcGm2lO9lNLB2jMdRIY6hxWM9vNpjzQvGAyyDh2WV2SWgWQkxoEoALQO1XAnG6Abjr2YOkTkQwOMyU3rEAxShvhwsxmRkUg97VYmHpwiFvF01F8wJxV7yLQCJAMBGkK9HV+zGe/ZhSU6TUlL7a3nCZFBNuqzsvKBfbivFYPUNuc1vcGA0FmKMghBCDkABcAGomjdozmSVhTJxWF4jI5hai77WCAiW3z8fokRZLQoisInMRdZ466jx1p7ytpmnE0jECiYAeivXriSBd8ezHQCKQd4mlY6S13nKM4VJQcFvd2VFkq0f/2HeUuf82j9WD2SAlRUKIkScBuACyC2Fk+5za7LZh9wBONofpevYgAO5rZ2CbUzxqxyiEmNwURaHIXESRuei03oVKZBIE4r2BuG94HmpbOBVGQ9M7YpwOl9k1ICifbNTZa/NK72UhxClJAC4ANZNBM2e/9cWu4YVYNZ6m83/2QlrFNr8Y18ra0TxEIYQYlNVozbZnc1QM+z6pTIpgMpgXnPXLYNsSAUKJEBoa3aluulPdp+yQ0ZfdZB+yjtllcnE4eZiSEyWUOkr1UWeZDCjE1CIBuAAymUy2dy5Q5hmih1IfmqbR9XQDaX8co9dK8cfmoxjkhVoIMTGYjWZ8dh8++/C71WTUDKFk6ORBud+2YCJIRssQS8eIpWMnXaTk6Q1P531uMViGLMUYatRZWs4JMXFJAC6AtJpdzUtDo6q46pS3D7/RTGy3H4wKpXcuxOiQmjghxORmNBgpthVTbBt+qZeqqYRTYX2S32BBuSvWxcETBzE6jXqNc0pNkVST+gTC4TIppmw9c79SDKfZSZG5CIfZgd1kz143ObIlJ6bs9iJzkb5P6pyFGHsSgAsg3bPwWEpJnrT1EUD8YIDgS4cB8H5wFpZa12gfnhBCTEgGxYDb4sZtcVPL4GViqVSKF/v07M5NBtQDc/+R5nj+CHNukmBuMqA/7scf95/VcVsMFj0c5+qy9aBsKsrf1/Mxb19ue5/bSMcNIU5OAnABpHIB+BQ9gFNtUfy/3guqhn1FGY6LTz1aLIQQYvj6TgY8nUWJ4ul4XigOJAL6yHM0FSWajhJJRfTrfT9G0tntKTUFQFJNkkwkCSQCI/Z12Yy2AcE5b+S5z+e563azfdDg7TA7sJlsGBRpuSkmDwnABZDp+ZhUhu4BnOlO0vGLXWjxNJYZbkpumyu1ZkIIMU7YTDYqTT3LYZ+hVCY1ZDjWtw9jXyQV0bdltOxfmHgmTjwTp5Pht6o7FbvJPvxR6aH29Rm1thlt8ndNFIwE4AJIk/2FTxqSg/YAVpMZOh7bTSaQwFRqo/RTi1DM8naWEEJMJmajGY8xu1z2SNA0jZSaygvEueuxVCwvQOdGp2Pp2MmDdzqKqmXnreQmF44Ug2LIC8V6wO47Kp2roT5FuDZrZpJakrSaxozUVItTkwBcAJncf7xmBvQA1lSNzif2kToWxlBkovSeJTLpTQghxCkpioLFaMFitFDMyPSJ1zSNeCY+eClHn6A93H2RVEQP0blJi+FUGEYoVz/85MMYFaP+fbAarL3Xjb3XT7bParRiMZz5dqvRislgktHtcU4CcAGoPXVUFtvAYBt84RDxvZ1gUii9azFmn32sD08IIYQAsqHabrJjN9kppXREHlPVVOLp+IA66bzR6sFGsXOj1YOE63gmrj9+31Z4haSHZMMggfkU2wcN2WcQ3KVue2gSgAtAM2S/7UVFRXnbu984TvitZgBKPjYf6wz3mB+bEEIIMZoMikGvAz6d3tAnE0/Eef6l51m1ehWqQSWZSZLMJElkEtlJhj3XE5kEqUyq97qaOun2vMfJXVcH355Uk3nHlHusQjIZTPqotNlgPu0gPpztZUVlzPLMKujXeSYkAI8xTdPQjNl6XrczG3DVRIbgS4eJvJ1t2u65YSZFy069QIYQQgghsn2jLYoFj9WD2VyYskFVU7M9pfsH5tx1dejtpxPQT/ZYiUwCDU0/prSaJq2miaQio/Z13zjzRr575XdH7fFHy6QJwI888gj/8i//QktLC8uXL+eHP/whF154YaEPawA1k0YzZn85S70+EocCdP7vfjKd2bdvnFfV4Lxy+K14hBBCCFF4BsWgj7a6KEzPfk3TSGvp0xq5Hk5Azxvp7hvW1RRVjonZonVSBOCnnnqKBx54gJ/85CdcdNFF/OAHP+C6666jvr6e8vLyQh9eHjWdQTNlv+0Lm2po37ATILvE8UfmYpszMhMXhBBCCDG1KIqCWTFjNphxmB2FPpxxbVIE4O9///vce++93HPPPQD85Cc/4Y9//CO/+MUv+Lu/+7sCH12+RCSCZsx+28sPZjtAGCvTmGfFiDRsJtJQyKMTZ0rNZCjasYeAwYLBKC3rJgs5r5OPnNPJSc5r4ZgrK3Gds3TCdb2Y8AE4mUyyZcsWHnzwQX2bwWBg9erVbNq0adD7JBIJEonewvRQKARkl8hMpVKjerzN7+yDnh8SVTXzVjRN+z4N9k34UzHFmYCLaDhe6OMQI0vO6+Qj53RykvNaOB3c868JzNaR/ccjl8f65rKRzGgTPnV1dHSQyWSoqKjI215RUcG+ffsGvc93vvMdvvWtbw3Yvnbt2gGdGUaav+UIZSknSVK8HlJJj+qzCSGEEEKMrldeeQXDKCXKdevW6dej0eiIPe6ED8Bn4sEHH+SBBx7QPw+FQtTW1rJmzRrc7tFtPaZpGtFgjA2vb+Tqqy8o2GxVMbJSqRSvvvoqV199tZzTSUTO6+Qj53RykvNaWCaLYcRLIFKpFOvWrePaa6/Vz2nuHfuRMOEDsM/nw2g00tramre9tbWVysrB12i3Wq1YrdYB281m85j84iheBYMJipw2+UWdJFIpo5zTSUjO6+Qj53RykvM6efXNZiN5bif8EiEWi4XzzjuP9evX69tUVWX9+vVccsklBTwyIYQQQggxHk34EWCABx54gLvuuovzzz+fCy+8kB/84AdEIhG9K4QQQgghhBA5kyIAf/zjH6e9vZ1vfOMbtLS0sGLFCl5++eUBE+OEEEIIIYSYFAEY4P777+f+++8v9GEIIYQQQohxbsLXAAshhBBCCHE6JAALIYQQQogpRQKwEEIIIYSYUiQACyGEEEKIKUUCsBBCCCGEmFIkAAshhBBCiClFArAQQgghhJhSJAALIYQQQogpRQKwEEIIIYSYUiQACyGEEEKIKUUCsBBCCCGEmFIkAAshhBBCiClFArAQQgghhJhSTIU+gPFA0zQAQqHQmDxfKpUiGo0SCoUwm81j8pxidMk5nZzkvE4+ck4nJzmvk89g5zSX03K57WxIAAa6u7sBqK2tLfCRCCGEEEKIk+nu7sbj8ZzVYyjaSMToCU5VVZqbm3G5XCiKMurPFwqFqK2t5ejRo7jd7lF/PjH65JxOTnJeJx85p5OTnNfJZ7Bzqmka3d3dVFdXYzCcXRWvjAADBoOBmpqaMX9et9stv6iTjJzTyUnO6+Qj53RykvM6+fQ/p2c78psjk+CEEEIIIcSUIgFYCCGEEEJMKRKAC8BqtfLQQw9htVoLfShihMg5nZzkvE4+ck4nJzmvk89on1OZBCeEEEIIIaYUGQEWQgghhBBTigRgIYQQQggxpUgAFkIIIYQQU4oEYCGEEEIIMaVIAB5jjzzyCHV1ddhsNi666CLefffdQh+SOA3f/OY3URQl77JgwQJ9fzwe57777qO0tBSn08ltt91Ga2trAY9Y9Pf6669z0003UV1djaIo/OEPf8jbr2ka3/jGN6iqqsJut7N69Wr279+fd5vOzk7uvPNO3G43Xq+XT3/604TD4TH8KkR/pzqvd99994Df3euvvz7vNnJex5fvfOc7XHDBBbhcLsrLy7nllluor6/Pu81wXnObmpr4wAc+QFFREeXl5fzt3/4t6XR6LL8U0WM453TlypUDflc/+9nP5t1mJM6pBOAx9NRTT/HAAw/w0EMPsXXrVpYvX851111HW1tboQ9NnIbFixdz4sQJ/fLGG2/o+7785S/z/PPP8/TTT/Paa6/R3NzMrbfeWsCjFf1FIhGWL1/OI488Muj+733ve/zHf/wHP/nJT3jnnXdwOBxcd911xONx/TZ33nknu3fvZt26dbzwwgu8/vrrfOYznxmrL0EM4lTnFeD666/P+9194okn8vbLeR1fXnvtNe677z7efvtt1q1bRyqVYs2aNUQiEf02p3rNzWQyfOADHyCZTPLWW2/x2GOP8ctf/pJvfOMbhfiSprzhnFOAe++9N+939Xvf+56+b8TOqSbGzIUXXqjdd999+ueZTEarrq7WvvOd7xTwqMTpeOihh7Tly5cPui8QCGhms1l7+umn9W179+7VAG3Tpk1jdITidADa73//e/1zVVW1yspK7V/+5V/0bYFAQLNardoTTzyhaZqm7dmzRwO0zZs367d56aWXNEVRtOPHj4/ZsYuh9T+vmqZpd911l3bzzTcPeR85r+NfW1ubBmivvfaapmnDe8198cUXNYPBoLW0tOi3+fGPf6y53W4tkUiM7RcgBuh/TjVN06666irti1/84pD3GalzKiPAYySZTLJlyxZWr16tbzMYDKxevZpNmzYV8MjE6dq/fz/V1dXMmjWLO++8k6amJgC2bNlCKpXKO8cLFixg+vTpco4niMOHD9PS0pJ3Dj0eDxdddJF+Djdt2oTX6+X888/Xb7N69WoMBgPvvPPOmB+zGL6NGzdSXl7O/Pnz+dznPoff79f3yXkd/4LBIAAlJSXA8F5zN23axNKlS6moqNBvc9111xEKhdi9e/cYHr0YTP9zmvM///M/+Hw+lixZwoMPPkg0GtX3jdQ5NZ3lsYth6ujoIJPJ5J0wgIqKCvbt21egoxKn66KLLuKXv/wl8+fP58SJE3zrW9/iiiuuYNeuXbS0tGCxWPB6vXn3qaiooKWlpTAHLE5L7jwN9nua29fS0kJ5eXnefpPJRElJiZzncez666/n1ltvZebMmRw8eJCvf/3r3HDDDWzatAmj0SjndZxTVZUvfelLXHbZZSxZsgRgWK+5LS0tg/4+5/aJwhnsnALccccdzJgxg+rqanbs2MHXvvY16uvreeaZZ4CRO6cSgIU4DTfccIN+fdmyZVx00UXMmDGD3/72t9jt9gIemRDiZD7xiU/o15cuXcqyZcuYPXs2Gzdu5JprringkYnhuO+++9i1a1fenAsxsQ11TvvW3S9dupSqqiquueYaDh48yOzZs0fs+aUEYoz4fD6MRuOA2amtra1UVlYW6KjE2fJ6vcybN48DBw5QWVlJMpkkEAjk3UbO8cSRO08n+z2trKwcMHE1nU7T2dkp53kCmTVrFj6fjwMHDgByXsez+++/nxdeeIENGzZQU1Ojbx/Oa25lZeWgv8+5faIwhjqng7nooosA8n5XR+KcSgAeIxaLhfPOO4/169fr21RVZf369VxyySUFPDJxNsLhMAcPHqSqqorzzjsPs9mcd47r6+tpamqSczxBzJw5k8rKyrxzGAqFeOedd/RzeMkllxAIBNiyZYt+m1dffRVVVfUXajH+HTt2DL/fT1VVFSDndTzSNI3777+f3//+97z66qvMnDkzb/9wXnMvueQSdu7cmffPzbp163C73SxatGhsvhChO9U5Hcz7778PkPe7OiLn9Awm7Ykz9OSTT2pWq1X75S9/qe3Zs0f7zGc+o3m93ryZjGJ8+8pXvqJt3LhRO3z4sPbmm29qq1ev1nw+n9bW1qZpmqZ99rOf1aZPn669+uqr2nvvvaddcskl2iWXXFLgoxZ9dXd3a9u2bdO2bdumAdr3v/99bdu2bdqRI0c0TdO0f/7nf9a8Xq/27LPPajt27NBuvvlmbebMmVosFtMf4/rrr9fOOecc7Z133tHeeOMNbe7cudrtt99eqC9JaCc/r93d3drf/M3faJs2bdIOHz6s/elPf9LOPfdcbe7cuVo8HtcfQ87r+PK5z31O83g82saNG7UTJ07ol2g0qt/mVK+56XRaW7JkibZmzRrt/fff115++WWtrKxMe/DBBwvxJU15pzqnBw4c0B5++GHtvffe0w4fPqw9++yz2qxZs7Qrr7xSf4yROqcSgMfYD3/4Q2369OmaxWLRLrzwQu3tt98u9CGJ0/Dxj39cq6qq0iwWizZt2jTt4x//uHbgwAF9fywW0z7/+c9rxcXFWlFRkfbhD39YO3HiRAGPWPS3YcMGDRhwueuuuzRNy7ZC+4d/+AetoqJCs1qt2jXXXKPV19fnPYbf79duv/12zel0am63W7vnnnu07u7uAnw1Iudk5zUajWpr1qzRysrKNLPZrM2YMUO79957Bww+yHkdXwY7n4D26KOP6rcZzmtuY2OjdsMNN2h2u13z+XzaV77yFS2VSo3xVyM07dTntKmpSbvyyiu1kpISzWq1anPmzNH+9m//VgsGg3mPMxLnVOk5ICGEEEIIIaYEqQEWQgghhBBTigRgIYQQQggxpUgAFkIIIYQQU4oEYCGEEEIIMaVIABZCCCGEEFOKBGAhhBBCCDGlSAAWQgghhBBTigRgIYQQQggxpUgAFkKICeruu+/mlltuKfRhCCHEhGMq9AEIIYQYSFGUk+5/6KGH+Pd//3dkMU8hhDh9EoCFEGIcOnHihH79qaee4hvf+Ab19fX6NqfTidPpLMShCSHEhCclEEIIMQ5VVlbqF4/Hg6IoeducTueAEoiVK1fyhS98gS996UsUFxdTUVHBz372MyKRCPfccw8ul4s5c+bw0ksv5T3Xrl27uOGGG3A6nVRUVPDJT36Sjo6OMf6KhRBi7EgAFkKISeSxxx7D5/Px7rvv8oUvfIHPfe5zfPSjH+XSSy9l69atrFmzhk9+8pNEo1EAAoEAV199Neeccw7vvfceL7/8Mq2trXzsYx8r8FcihBCjRwKwEEJMIsuXL+fv//7vmTt3Lg8++CA2mw2fz8e9997L3Llz+cY3voHf72fHjh0A/Od//ifnnHMO3/72t1mwYAHnnHMOv/jFL9iwYQMNDQ0F/mqEEGJ0SA2wEEJMIsuWLdOvG41GSktLWbp0qb6toqICgLa2NgC2b9/Ohg0bBq0nPnjwIPPmzRvlIxZCiLEnAVgIISYRs9mc97miKHnbct0lVFUFIBwOc9NNN/Hd7353wGNVVVWN4pEKIUThSAAWQogp7Nxzz+V3v/sddXV1mEzyJ0EIMTVIDbAQQkxh9913H52dndx+++1s3ryZgwcP8sorr3DPPfeQyWQKfXhCCDEqJAALIcQUVl1dzZtvvkkmk2HNmjUsXbqUL33pS3i9XgwG+RMhhJicFE2WERJCCCGEEFOI/HsvhBBCCCGmFAnAQgghhBBiSpEALIQQQgghphQJwEIIIYQQYkqRACyEEEIIIaYUCcBCCCGEEGJKkQAshBBCCCGmFAnAQgghhBBiSpEALIQQQgghphQJwEIIIYQQYkqRACyEEEIIIaaU/x/cxM4PFg8o7QAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", + "\n", + "def plot_simulation(results):\n", + " \"\"\"\n", + " Plot the state trajectories from the simulation results.\n", + "\n", + " Parameters:\n", + " results (dict): Simulation results from run_simulations.\n", + " \"\"\"\n", + " # Extract the simulation results for the specific condition\n", + " sim_results = results[simulation_condition][1]\n", + "\n", + " # Create a new figure for the state trajectories\n", + " plt.figure(figsize=(8, 6))\n", + " for idx in range(sim_results[\"x\"].shape[1]):\n", + " time_points = np.array(sim_results[\"ts\"])\n", + " state_values = np.array(sim_results[\"x\"][:, idx])\n", + " plt.plot(time_points, state_values, label=jax_model.state_ids[idx])\n", + "\n", + " # Add labels, legend, and grid\n", + " plt.xlabel(\"Time\")\n", + " plt.ylabel(\"State Values\")\n", + " plt.title(simulation_condition)\n", + " plt.legend()\n", + " plt.grid(True)\n", + " plt.show()\n", + "\n", + "\n", + "# Plot the simulation results\n", + "plot_simulation(results)" + ] + }, + { + "cell_type": "markdown", + "id": "f57c07211b781ab5", + "metadata": {}, + "source": "`run_simulations` enables users to specify the simulation conditions to be executed. For more complex models, this allows for restricting simulations to a subset of conditions. Since the Böhm model includes only a single condition, we demonstrate this functionality by simulating no condition at all." + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "2f2e1c7023ad261b", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:50:58.505973Z", + "start_time": "2024-11-19T09:50:58.501775Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "llh, results = run_simulations(jax_problem, simulation_conditions=tuple())\n", + "results" + ] + }, + { + "cell_type": "markdown", + "id": "0b729e1b-3c75-4a87-a33b-0a54622609e7", + "metadata": {}, + "source": [ + "## Updating Parameters\n", + "\n", + "As next step, we will update the parameter values used for simulation. However, if we attempt to directly modify the values in `JAXModel.parameters`, we encounter a `FrozenInstanceError`." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "75df1ab9e8a738a0", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:50:58.685750Z", + "start_time": "2024-11-19T09:50:58.575034Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error: cannot assign to field 'parameters'\n" + ] + } + ], + "source": [ + "from dataclasses import FrozenInstanceError\n", + "import jax\n", + "\n", + "# Generate random noise to update the parameters\n", + "noise = (\n", + " jax.random.normal(\n", + " key=jax.random.PRNGKey(0), shape=jax_problem.parameters.shape\n", + " )\n", + " / 10\n", + ")\n", + "\n", + "# Attempt to update the parameters\n", + "try:\n", + " jax_problem.parameters += noise\n", + "except FrozenInstanceError as e:\n", + " print(\"Error:\", e)" + ] + }, + { + "cell_type": "markdown", + "id": "b91941cf707704c3", + "metadata": {}, + "source": [ + "The root cause of this error lies in the fact that, to enable autodiff, direct modifications of attributes are not allowed in [equinox](https://docs.kidger.site/equinox/), which AMICI utilizes under the hood. Consequently, attributes of instances like `JAXModel` or `JAXProblem` cannot be updated directly — this is the price we have to pay for autodiff.\n", + "\n", + "However, `JAXProblem` provides a convenient method called `update_parameters`. The caveat is that this method creates a new JAXProblem instance instead of modifying the existing one." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "feb125b6-4f84-427c-b870-421a328eee81", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:00.631866Z", + "start_time": "2024-11-19T09:50:58.702698Z" + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsAAAAIjCAYAAAAN/63DAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAADxiUlEQVR4nOzdd1hUZ/bA8e9UGLo0AQVERUQIdrGDUWNJTEzMGk1MNBpNsmvKmmjKxlg21fxck5ieYElvm7gx1S5GBcWuKChiB1Fp0qfc3x8Do8QGCgzlfJ6HR+bOnXvPzHXg8M55z6tSFEVBCCGEEEKIJkJt7wCEEEIIIYSoS5IACyGEEEKIJkUSYCGEEEII0aRIAiyEEEIIIZoUSYCFEEIIIUSTIgmwEEIIIYRoUiQBFkIIIYQQTYokwEIIIYQQokmRBFgIIYQQQjQpkgALIRq8efPm0b59eywWi71DuawjR46gUqlYsmRJtR+7bt06VCoV69atq/G4riY2NpbY2Ng6PWdt6tmzJzNmzLB3GEKIekISYCFEg5afn8/rr7/OM888g1pt/ZF2vclmQ5GSksI///lPevfujaOjIyqViiNHjtg7LACKioqYPXv2DSXsBQUFzJo1i6FDh+Lp6XnV6xkbG8uECROuecxnnnmGd999l8zMzOuOSwjReEgCLIRo0BYtWoTJZGLs2LH2DqXObN68mbfffpvz588THh5u73AqKSoqYs6cOTeUAJ89e5a5c+eyf/9+OnbsWCNx3XHHHbi5ufHee+/VyPGEEA2bJMBCiAZt8eLF3H777Tg6Oto7lDpz++23k5uby549e7jvvvvsHU6N8/f3JyMjg6NHj/LGG2/UyDHVajV33303n376KYqi1MgxhRANlyTAQogGKz09nd27dzNo0KCr7jd79mxUKhWpqamMGzcOd3d3fHx8mDlzJoqicPz4cdsIoZ+fH/Pnz7/kGFlZWUyaNInmzZvj6OhIx44dWbp06SX75ebmMmHCBNzd3fHw8GD8+PHk5uZeNq4DBw5w99134+npiaOjI926deOnn3665vP29PTE1dX1mvtV1UcffUSbNm0wGAz06NGDDRs2XLJPWVkZL774Il27dsXd3R1nZ2f69evH2rVrbfscOXIEHx8fAObMmYNKpUKlUjF79mwAdu/ezYQJE2jdujWOjo74+fkxceJEzp07V+lcDg4O+Pn51djzqzB48GCOHj3Kzp07a/zYQoiGRRJgIUSDtWnTJgC6dOlSpf3vueceLBYLr732GtHR0bz00ku8+eabDB48mBYtWvD666/Ttm1bnn76aeLj422PKy4uJjY2ls8++4z77ruPN954A3d3dyZMmMBbb71l209RFO644w4+++wzxo0bx0svvcSJEycYP378JbHs27ePnj17sn//fp599lnmz5+Ps7MzI0eO5Mcff7zBV6bq4uLiePjhh/Hz82PevHn06dOH22+/nePHj1faLz8/n08++YTY2Fhef/11Zs+ezZkzZxgyZIgtofTx8eH9998H4M477+Szzz7js88+46677gJg5cqVHD58mAcffJCFCxcyZswYvv76a4YPH14no7Jdu3YFYOPGjbV+LiFEPacIIUQD9cILLyiAcv78+avuN2vWLAVQpkyZYttmMpmUli1bKiqVSnnttdds23NychSDwaCMHz/etu3NN99UAOXzzz+3bSsrK1N69eqluLi4KPn5+YqiKMqyZcsUQJk3b16l8/Tr108BlMWLF9u2Dxw4ULnpppuUkpIS2zaLxaL07t1bCQ0NtW1bu3atAihr16697HN74403FEBJT0+/6mtwOWVlZYqvr6/SqVMnpbS01Lb9o48+UgAlJiam0vO4eB9Fsb5WzZs3VyZOnGjbdubMGQVQZs2adcn5ioqKLtn21VdfKYASHx9/2Ri3bt16yWt3I/R6vfLoo4/WyLGEEA2XjAALIRqsc+fOodVqcXFxqdL+Dz30kO17jUZDt27dUBSFSZMm2bZ7eHgQFhbG4cOHbdt+/fVX/Pz8Kk200+l0PP744xQUFLB+/XrbflqtlkcffbTSeR577LFKcWRnZ7NmzRpGjx7N+fPnOXv2LGfPnuXcuXMMGTKEgwcPcvLkyeq9GNchKSmJrKwsHnnkEfR6vW17RQnHxTQajW0fi8VCdnY2JpOJbt26sX379iqdz2Aw2L4vKSnh7Nmz9OzZE6DKx7hRzZo14+zZs3VyLiFE/aW1dwBCCFFXgoKCKt12d3fH0dERb2/vS7ZfXJd69OhRQkNDbW3WKlR0YDh69KjtX39//0sS8rCwsEq3Dx06hKIozJw5k5kzZ1421qysLFq0aFGNZ1d9FXGHhoZW2q7T6WjduvUl+y9dupT58+dz4MABjEajbXtISEiVzpednc2cOXP4+uuvycrKqnRfXl5edcO/LoqioFKp6uRcQoj6SxJgIUSD5eXlhclk4vz581WaFKbRaKq0DajVmtSKBTuefvpphgwZctl92rZtW2vnvx6ff/45EyZMYOTIkUyfPh1fX180Gg2vvvoqaWlpVTrG6NGj2bRpE9OnT6dTp064uLhgsVgYOnRonS1ikpube8kfPEKIpkcSYCFEg9W+fXvA2g0iKiqq1s4THBzM7t27sVgslUaBDxw4YLu/4t/Vq1dTUFBQaRQ4JSWl0vEqRld1Ot01O1jUpoq4Dx48yM0332zbbjQaSU9Pr9SD9/vvv6d169b88MMPlUZQZ82aVemYVxpdzcnJYfXq1cyZM4cXX3zRtv3gwYM18lyq4uTJk5SVldW73slCiLonNcBCiAarV69egLWWtTYNHz6czMxMvvnmG9s2k8nEwoULcXFxISYmxrafyWSydUIAMJvNLFy4sNLxfH19iY2N5cMPPyQjI+OS8505c6aWnkll3bp1w8fHhw8++ICysjLb9iVLllzSuq1ipPzikfHExEQ2b95caT8nJyeAKj0e4M0337yRp1At27ZtA6B37951dk4hRP0kI8BCiAardevWREZGsmrVKiZOnFhr55kyZQoffvghEyZMYNu2bbRq1Yrvv/+ejRs38uabb9rKL0aMGEGfPn149tlnOXLkCB06dOCHH364bH3ru+++S9++fbnpppuYPHkyrVu35vTp02zevJkTJ06wa9euK8aTl5dnS6orWnq98847eHh44OHhwdSpU6v0vHQ6HS+99BIPP/wwN998M/fccw/p6eksXrz4khrg2267jR9++IE777yTW2+9lfT0dD744AM6dOhAQUGBbT+DwUCHDh345ptvaNeuHZ6enkRGRhIZGUn//v2ZN28eRqORFi1asGLFCtLT0y8b2zvvvENubi6nTp0CYPny5Zw4cQKAxx577JJJeheLjY1l/fr1lyTbK1euJCgoiM6dO1fp9RFCNGJ27EAhhBA37D//+Y/i4uJy2RZbFSraoJ05c6bS9vHjxyvOzs6X7B8TE6NERERU2nb69GnlwQcfVLy9vRW9Xq/cdNNNl23Nde7cOeX+++9X3NzcFHd3d+X+++9XduzYcdlWXmlpacoDDzyg+Pn5KTqdTmnRooVy2223Kd9//71tn8u1QUtPT1eAy34FBwdf+cW6gvfee08JCQlRHBwclG7duinx8fFKTExMpTZoFotFeeWVV5Tg4GDFwcFB6dy5s/Lzzz8r48ePv+ScmzZtUrp27aro9fpKLdFOnDih3HnnnYqHh4fi7u6u/O1vf1NOnTp12bZpwcHBV3yO12r51rVrV8XPz6/SNrPZrPj7+ysvvPBCtV8fIUTjo1IUWRNSCNFw5eXl0bp1a+bNm1epnZloms6fP4+npydvvvkm//jHP2zbly1bxr333ktaWhr+/v52jFAIUR9IDbAQokFzd3dnxowZvPHGG3XWSUDUX/Hx8bRo0YLJkydX2v76668zdepUSX6FEADICLAQQjRC2dnZlSa2/ZVGo8HHx6cOIxJCiPpDEmAhhGiEKiaCXUlwcDBHjhypu4CEEKIekQRYCCEaoW3btpGTk3PF+w0GA3369KnDiIQQov6QBFgIIYQQQjQpMglOCCGEEEI0KbIQBmCxWDh16hSurq5XXMZTCCGEEELYj6IonD9/noCAgErL0l8PSYCBU6dOERgYaO8whBBCCCHENRw/fpyWLVve0DEkAQbbMqbHjx/Hzc2t1s9nNBpZsWIFt9xyCzqdrtbPJ2qfXNPGSa5r4yPXtHGS69r4XO6a5ufnExgYaMvbboQkwGAre3Bzc6uzBNjJyQk3Nzd5ozYSck0bJ7mujY9c08ZJrmvjc7VrWhPlqjIJTgghhBBCNCmSAAshhBBCiCZFEmAhhBBCCNGkSA2wEEIIIRods9mM0Wi0dxiiGjQaDVqttk5a0to1AY6Pj+eNN95g27ZtZGRk8OOPPzJy5MhK++zfv59nnnmG9evXYzKZ6NChA//9738JCgoCoKSkhKeeeoqvv/6a0tJShgwZwnvvvUfz5s3t8IyEEEIIYW8FBQWcOHECWey24XFycsLf37/Wk2C7JsCFhYV07NiRiRMnctddd11yf1paGn379mXSpEnMmTMHNzc39u3bh6Ojo22ff/7zn/zyyy989913uLu7M3XqVO666y42btxYl09FCCGEEPWA2WzmxIkTODk54ePjIwtcNRCKolBWVsaZM2dIT0+nVatWtXo+uybAw4YNY9iwYVe8/1//+hfDhw9n3rx5tm1t2rSxfZ+Xl0dcXBxffvklN998MwCLFy8mPDychIQEevbsWXvBCyGEEKLeMRqNKIqCj48PBoPB3uGIajAYDOh0Oo4ePVrr5Sv1tgbYYrHwyy+/MGPGDIYMGcKOHTsICQnhueees5VJbNu2DaPRyKBBg2yPa9++PUFBQWzevPmKCXBpaSmlpaW22/n5+YD1TVMX9UIV55DapMZDrmnjJNe18ZFr2jhdfF3NZjOKoqAoChaLxc6RieuhKMpl36s1+b6ttwlwVlYWBQUFvPbaa7z00ku8/vrr/P7779x1112sXbuWmJgYMjMz0ev1eHh4VHps8+bNyczMvOKxX331VebMmXPJ9hUrVuDk5FTTT+WKVq5cWWfnEnVDrmnjJNe18ZFr2jitXLkSrVaLn58fBQUFlJWV2TskUU1lZWUUFxezadMmoPJ7taioqMbOU28T4Iq/2u644w7++c9/AtCpUyc2bdrEBx98QExMzHUf+7nnnmPatGm22xVL691yyy11thLcypUrGTx4sKxY00jINW2c5Lo2PnJNG6eLr6vZbOb48eO4uLhUmjMkGoaSkhIMBgO9e/cmPj6+0nu14hP7mlBvE2Bvb2+0Wi0dOnSotD08PJw///wTAD8/P8rKysjNza00Cnz69Gn8/PyueGwHBwccHBwu2a7T6er0B2Jdn0/UPrmmjZNc18ZHrmnjpNPpUKvVqFQq1Go1arUsd9DQVFw/rdaaol78Xq3J92y9/Z+h1+vp3r07KSkplbanpqYSHBwMQNeuXdHpdKxevdp2f0pKCseOHaNXr151Gq8QQgghxI04c+YMjz76KEFBQTg4OODn58eQIUN4+eWXUalUV/1at24dACdOnECv1xMZGWk77uzZs6/5+Cvt1759+8vG+uqrr6LRaHjjjTdq/XWpDXYdAS4oKODQoUO22+np6ezcuRNPT0+CgoKYPn0699xzD/3792fAgAH8/vvvLF++3HaR3d3dmTRpEtOmTcPT0xM3Nzcee+wxevXqJR0ghBBCCNGgjBo1irKyMpYuXUrr1q05ffo0q1evJiIigoyMDNt+TzzxBPn5+SxevNi2zdPTE4AlS5YwevRo4uPjSUxMJDo6mqeffppHHnnEtm/37t2ZMmUKkydPviSGiIgIVq1aZbtdMRL7V4sWLWLGjBksWrSI6dOn3/Bzr2t2TYCTkpIYMGCA7XZFXe748eNZsmQJd955Jx988AGvvvoqjz/+OGFhYfz3v/+lb9++tscsWLAAtVrNqFGjKi2EIYQQQgihKArFRrNdzm3Qaarchzg3N5cNGzawbt062zyn4OBgevTocelxDQZKS0svKfdUFIXFixfz3nvv0bJlS+Li4oiOjsbFxQUXFxfbfhqNBldX18uWi1ZMIrya9evXU1xczNy5c/n000/ZtGkTvXv3rtLzrC/smgDHxsZec5WWiRMnMnHixCve7+joyLvvvsu7775b0+EJIYQQooErNprp8OIfdjl38twhOOmrlmpVJKnLli2jZ8+el52rdC1r166lqKiIQYMG0aJFC3r37s2CBQtwdnau8jEOHjxIQEAAjo6O9OrVi1dffdW2+m6FuLg4xo4di06nY+zYscTFxTW4BLje1gALIYQQQjQVWq2WJUuWsHTpUjw8POjTpw/PP/88u3fvrvIx4uLiGDNmDBqNhsjISFq3bs13331X5cdHR0ezZMkSfv/9d95//33S09Pp168f58+ft+2Tn5/P999/z7hx4wAYN24c3377LQUFBVV/svVAve0CIRo3s8lEWUkxZUVF1n+LizEWF2E2m3H3bY6HXwBamaEthBDiBhl0GpLnDrHbuatj1KhR3HrrrWzYsIGEhAR+++035s2bxyeffMKECROu+tjc3Fx++OEHW6cssCancXFx13xshYtX542KiiI6Oprg4GC+/fZbJk2aBMBXX31FmzZt6NixI2BtURscHMw333xj26chkARYVIuxpITz2WcpK7YmrWUl1sS1tLiYsuIijCUXtl/8r7G4qNI28zVWc1Gp1Lg3b45nQEuaBbTEM6AlngEt8GwRiMHVTdZ2F0IIUSUqlarKZQj1gaOjI4MHD2bw4MHMnDmThx56iFmzZl0zif3yyy8pKSkhOjratq1iNbzU1FTatWtX7Vg8PDxo165dpYYFcXFx7Nu3r9LkOIvFwqJFiyQBFo1PXlYmST8vY++6lZguWkb6Rml1enQGA3qDAb2jAZVKTe7pU5QVF5ObmUFuZgZs31rpMY4uruWJcQtrYtwiEM+AFrj7+qG5wmxVIYQQoiHq0KEDy5Ytu+Z+cXFxPPXUU5ckyn//+99ZtGgRr732WrXPXVBQQFpaGvfffz8Ae/bsISkpiXXr1tm6TgBkZ2cTGxvLgQMHrtg2rb6RbEFcVeahVLb+/CMHEzaiKNbV+fQGA3onZ/SO5Ymrwemi7w3l3zuhu2TbRfeVf3+5hFVRFApzssk+dZLsUyfIPnWcnPLv889kUVJwnlOp+zmVur/S49QaDR7N/a0jxi0qRo2tX44XzX4VQggh6ptz587xt7/9jYkTJxIVFYWrqytJSUnMmzePO+6446qP3blzJ9u3b+eLL764JAEdO3Ysc+fO5aWXXrpiS7MKTz/9NCNGjCA4OJhTp04xa9YsNBoNY8eOBaxJdo8ePejfv/8lj+3evTtxcXENpi+wJMDiEorFwuEdSST9/AMnkvfatrfq2IVuI+4iKLJjrZYgqFQqXDy9cPH0IigyqtJ9xtIScjJOWRPjkyfIyThJ9skTZGecwFRaWp4wnyAtqfIxndw98AxoiU+rEFqERdCifQdcmnkihBBC1AcuLi5ER0ezYMEC0tLSMBqNBAYGMnnyZJ5//vmrPjYuLo4OHTpcdvT1zjvvZOrUqfz666/cfvvtVz3OiRMnGDt2LOfOncPHx4e+ffuSkJCAj48PZWVlfP755zzzzDOXfeyoUaOYP38+r7zySoNYZVESYGFjMhrZ/+dakpb/SPbJ44B1VLV9nxi63XYnPsEhdo4QdA6O+LZqjW+r1pW2KxYL57PPXZQYW//NPnWCguxzFOXlUpSXy4n9e9nx23IAPJr706K9NRlu0b4DzfxbSG2xEEIIu3BwcODVV1/l1Vdfvea+S5YsqXR74cKFV9zXz88Ps7lyH+QjR45cdt+vv/76isfR6/WcPXv2ivfPmDGDGTNmXPH++kYSYEFJQQG7Vv7Kjt+XU5ibA1jLHKIGDaPLsNtx9fK2c4TXplKrcfP2wc3bh1ZRnSvdV1ZcRE7GKc6dPE7GwRROHtjHmWNHyD2dQe7pDPatt654Y3Bzp0VYB1qGR9AirAO+IW1Qa6o3g1cIIYQQ9Z8kwE3ctl+WsfGbzzGWlgDg4ulFl+F3EDVwCA5OVW+cXZ/pDU40b92W5q3b0qGfdeXB0qJCTqXs52RKMicPJJNxKIXi/DwObd3Moa2bAetos39oWPkIcQT+oWHoHQ32fCpCCCGEqAGSADdh+zesZd2nnwDgHdSK7iPuIqx3PzTa+l+7c6McnJwJ6dyNkM7dAGv5x+nDhzh5YJ/1KyWZ0sJCju3dxbG9uwDrKHPzkDa2hLhFWAec3D3s+CyEEEIIcT0kAW6iMg6m8MeHbwPQ/fZR9Lt3QpOuf9XqdLQIC6dFWDjccTeKxcK5E8c4cSC5PClO5vy5M2SmHSQz7SDbfvkfAM0CWtIirAN+oWEYC/KvubS3EEIIIexPEuAm6Py5s/zv/17CbDTSpls0/caOb9LJ7+Wo1Gq8g1rhHdSKTrcMByD/bBYnL0qIzx4/Ss6pE+ScOsHetSsAWLp5HSGduhAc1ZmgyI6NpoxECCGEaEwkAW5ijKUl/O//XqIwNwfvwGCGT30KlVpt77AaBDdvX9z6+hLeNxaA4oLz1jriA/s4sX8fGWmp5J85za6Vv7Fr5W+o1Gr8Q9vTqmNnWnXsQvPWbVGrZVKdEEIIYW+SADchiqLwx/tvcfrwIQyuboycMRO9wcneYTVYBhdX2nTtQZuuPTAajfz8v/8RERTA8b27Obp7BzkZJzmVksyplGQ2ffsFji6uBN3UyZoQR3VpEN01hBBCiMZIEuAmJPGHb0jZvAG1RsPt057H3dfP3iE1KmqdjpDO3WnXozcAeVmnObp7B0d2befY3l2UFJwndfMGUjdvAMCrZRDBUdbR4ZbhEegcHO0ZvhBCCNFkSALcRBzcsomN334OwMBJj9KyQ6SdI2r83H2bEzVoKFGDhmIxm8k4lMqRXds5ums7mWkHOXfiGOdOHGP7r/9Do9PRon0ErTp2oVXHLngHBktdthBCCFFLJAFuArKOHObXd+YD0HnYCKIGDrVzRE2PWqOxdZnoM/o+igvOc2zPLo7u3s6RXTs4f+4Mx/bs5NiencR/vgjnZp60iupMcPmXk5u7vZ+CEEII0WjI7KdGrigvl2Vv/BtTaSnBUZ2Jvf8he4cksNYPh/Xqyy0PP87kdxcxYf77DBg/mZBOXdHqHSjMyWbf+tX8uvD/eH/KOD5/7kk2fvMZmWkHUSwWe4cvhBCiFpw5c4ZHH32UoKAgHBwc8PPzY8iQIbz88suoVKqrfq1btw6AEydOoNfriYy88Env7Nmzr/n4K+3Xvn37SjG2atXKdp9GoyEgIIBJkyaRk5NTZ69TTZAR4EbMZDTyv/mvcP7sGZr5B3DbE8/I0r71kEqlwqtlIF4tA+ky/A5MZWWcTEm2lkvs3sGZo+mcPnyI04cPkfDDN7g086RNt2jadI0mMLIjWl3jX7hECCGaglGjRlFWVsbSpUtp3bo1p0+fZvXq1URERJCRkWHb74knniA/P5/Fixfbtnl6egKwZMkSRo8eTXx8PImJiURHR/P000/zyCOP2Pbt3r07U6ZMYfLkyZfEEBERwapVq2y3tdpLU8W5c+cyefJkzGYzqampTJkyhccff5zPPvusRl6HuiAJcCOlKAqrPnmXUynJODg5M3LGizi6uNg7LFEFWr2e4Js6EXxTJwAKc3M4sms7h7dtIX3Xdgpysm2t1nSOBkI6dqFNt2hCunTH4OJq3+CFEKK+URQwFtnn3DonqOJ8jtzcXDZs2MC6deuIiYkBIDg4mB49elyyr8FgoLS0FD+/ypPZFUVh8eLFvPfee7Rs2ZK4uDiio6NxcXHB5aIcQKPR4OrqesnjwZrwXm77xS5+bIsWLRg/fjxfffVVlZ5nfSEJcCO1/df/sW/dKlQqNbc9MQPPgJb2DklcJ2ePZkTEDCQiZiCmsjKO79tN2rZE0pISKcjJJjVxI6mJG1Gp1bRo34G23XrSpms0Hn7+9g5dCCHsz1gErwTY59zPnwJ91RZEqkhSly1bRs+ePXFwcKj26dauXUtRURGDBg2iRYsW9O7dmwULFuDsXPVFmQ4ePEhAQACOjo706tWLV199laCgoCvuf/LkSZYvX050dHS147UnqQFuhNJ3JLH+s0UAxNw/iVaduto5IlFTtHo9IZ27MeihfzDlvSXc98oCet51Dz5BrVAsFk4k72Xdp58Q98Rkljz1d/78+lMyDqZI3bAQQtRzWq2WJUuWsHTpUjw8POjTpw/PP/88u3fvrvIx4uLiGDNmDBqNhsjISFq3bs13331X5cdHR0ezZMkSfv/9d95//33S09Pp168f58+fr7TfM888g4uLCwaDgZYtW6JSqfjPf/5T5fPUBzIC3MhknzrBz2/NQ1EsRA64hS7Db7d3SKKWqNRq/NqE4tcmlD733E9eViZpSYmkbUvkePJeW5u1xB+/xdmjGa279qBtt54ERXZEq9fbO3whhKgbOifrSKy9zl0No0aN4tZbb2XDhg0kJCTw22+/MW/ePD755BMmTJhw1cfm5ubyww8/8Oeff9q2jRs3jri4uGs+tsKwYcNs30dFRREdHU1wcDDffvstkyZNst03ffp0JkyYgKIoHD9+nOeff55bb72V+Ph4NA1krpEkwI1Mwg/fUFZcRIv2HRj00KPSS7YJcff1o8vwO+gy/A5KCgpI35nEoaREjuxMojA3hz2r/2DP6j/QOjjQKqoLbbv3JKRzN2mxJoRo3FSqKpch1AeOjo4MHjyYwYMHM3PmTB566CFmzZp1zST2yy+/pKSkpFIpgqIoWCwWUlNTadeuXbVj8fDwoF27dhw6dKjSdm9vb9q2bQtAaGgob775Jr169WLt2rUMGjSo2uexB0mAGxFFUTi6ewcAfUaPQ6OV7gBNlaOLC+F9YwnvG4vJaORE8h4OlY8OF5w7y6Gtmzm0dTMqlZqAsHDadIumbbdomvm3sHfoQgghLtKhQweWLVt2zf3i4uJ46qmnLkmU//73v7No0SJee+21ap+7oKCAtLQ07r///qvuVzHqW1xcXO1z2IskwI3I2eNHKcrLRevggH+7cHuHI+oJrU5nW2Fu4MRHyEpPsyXDZ44c5uSBfZw8sI/4zxfh2SLQlgz7tw1DpZZpAkIIURfOnTvH3/72NyZOnEhUVBSurq4kJSUxb9487rjjjqs+dufOnWzfvp0vvvjikr69Y8eOZe7cubz00kuXbWl2saeffpoRI0YQHBzMqVOnmDVrFhqNhrFjx1ba7/z582RmZtpKIGbMmIGPjw+9e/e+vidvB5IANyIVo78twyOlN6y4LJVKRfPWbWneui19Rt9H/pks0rYlcigpkRPJe8g+eZzsk8fZ+r/vcXL3oHWXHrTpFk3wTR3ROTjaO3whhGi0XFxciI6OZsGCBaSlpWE0GgkMDGTy5Mk8//zzV31sXFwcHTp0uCT5BbjzzjuZOnUqv/76K7fffvV5QSdOnGDs2LGcO3cOHx8f+vbtS0JCAj4+PpX2e/HFF3nxxRcB8PHxoXv37qxYsQIvL69qPmv7kQS4ETm6ZyeArX+sENfi5uNL56Ej6Dx0BCWFBRzZuY1DSYmk70iiKC+XvWtXsHftCrR6B4KjOtOmWw/adOmBk7uHvUMXQohGxcHBgVdffZVXX331mvsuWbKk0u2FCxdecV8/Pz/MZnOlbUeOHLnsvl9//fU1z32lxzY0kgA3EiajkRP79wIQHNXZztGIhsjR2YX2fWJo3ycGs8nIieR9HEpKIG1bIufPniEtKYG0pARQqQgIbW8tlejeU3pMCyGEaHAkAW4kMlL3YyotxcndA+/AYHuHIxo4jVZHcFQngqM6cfODD3PmaDppSYkcSkogKz2NU6n7OZW6nw1fLqGZfwvr0szdoglo1x61umG0wBFCCNF0SQLcSBzdswuwlj9I6zNRk1QqFb6tWuPbqjW97h5L/tkzHN62hUNJCRzft4ecjJMkLf+BpOU/YHB1s9YNd4+m1U2d0TlK3bAQQoj6RxLgRuLoHusEuCCp/xW1zM3bh05DbqXTkFspLSriyK5tpCUlcnjHVorP57Nv/Sr2rV+FVqcn6KaOtOnWkzZde+Ds0czeoQshhBCAJMCNQklBAafTrE2qg6M62TcY0aQ4ODkR1qsfYb36YTaZOHkgmbSkBA4lJZJ/5jSHt2/l8PatrFKpaRkeQbuefQmN7i3JsBBCCLuSBLgROL5vN4piwbNFIK6e3vYORzRRGq2WoMgogiKjiB0/mbPHj5K2NYFDSQmcPnyI48l7OJ68h9WLPyAwPFKSYSGEEHYjCXAjUFH+IO3PRH2hUqnwCWqFT1Areo4aQ15WJqkJG0lN+JPMtIOVkuGKkeF20X0kGRZCCFEnJAGuY+dLjLy35iCpR9UMr6FjHt29E5DyB1F/ufv60f32UXS/fRR5WadJTSxPhg+lciJ5LyeS97Jm8Ye0bB9Bu559CI3ug0szT3uHLYQQopGSBLiOlZosvB+fDqhRFOWGj5eXlUnu6QxUajWBHW668QCFqGXuvs3pPuIuuo+4i/wzWaQm/ElqwkYyDqVwYv9eTuzfy5olH9EirEP5yHBvXDwbzupCQggh6j9JgOuYTqO2fW80K+hv8HgVq7/5h7ZHb3C6waMJUbfcfHzpNuIuuo24i/yzWbYyiYyDKZw8sI+TB/axdulHtAgLt9UMS527EEKIG6W+9i6iJjloL7zkZWbLDR/v4v6/QjRkbt6+dLvtTu59aT6T311M7AMP4d+uPSgKJw8ks3bJR3z06AS+enEG23/9H+ezz9o7ZCGEqHNfffUVGo2Gf/zjH9f1+CVLlqBSqWxfLi4udO3alR9++KGGI63fZAS4jlUeAb6xBFixWDi2tzwBluWPRSPi5u1D11tH0vXWkeSfPcPBxE2kJvxpXYEuJZlTKcmsXfoxAe2sI8PtevbB1UtGhoUQjV9cXBwzZszgww8/ZP78+Thex4JDbm5upKSkAHD+/HkWL17M6NGj2bdvH2FhYTUdcr0kI8B1TKNWoVFbV2orM91YApx15DAl5/PRGwz4tQmtifCEqHesyfAdjP33G0x5bwkDxk8mIKwDAKdS97Pu04/56O8T+HLm02z7ZRn5Z8/YOWIhRH2iKApFxiK7fFVnrk9sbCxTp05l6tSpuLu74+3tzcyZMysdIz09nU2bNvHss8/Srl27S0ZtlyxZgoeHB8uWLSM0NBRHR0eGDBnC8ePHK+2nUqnw8/PDz8+P0NBQXnrpJdRqNbt3776xF7sBkRFgO9BpVJgtCkbzjU2Cq6j/DYyIQqOVSykaP1cvb7oMv4Muw+/g/LmzHEzcSErCRk6lJJOReoCM1AOs+/QT/Nu1J6xnX0Kj++Dm7WPvsIUQdlRsKib6y2i7nDvx3kScdFWfn7N06VImTZrEli1bSEpKYsqUKQQFBTF58mQAFi9ezK233oq7uzvjxo0jLi6Oe++9t9IxioqKePnll/n000/R6/X8/e9/Z8yYMWzcuPGy5zSbzXz66acAdOnS5TqfacMjWZMd6DVqSoyWGx4BPrq7fPnjyE41EJUQDUulZDj7rK1M4mTK/srJcGiYrUzCzdvX3mELIcQVBQYGsmDBAlQqFWFhYezZs4cFCxYwefJkLBYLS5YsYeHChQCMGTOGp556ivT0dEJCQmzHMBqNvPPOO0RHW5P+pUuXEh4ezpYtW+jRowcAeXl5uLi4AFBcXIxOp+Ojjz6iTZs2dfyM7ceuCXB8fDxvvPEG27ZtIyMjgx9//JGRI0dedt9HHnmEDz/8kAULFvDkk0/atmdnZ/PYY4+xfPly1Go1o0aN4q233rJd2PpIXz4R7kZqgI1lpZxMSQak/68Qrp7edBl2O12G3U5B9jlSbclwMhkHU8g4mML6z+LwbxtGu559aNezL24+kgwL0RQYtAYS702027mro2fPnqhUKtvtXr16MX/+fMxmM6tWraKwsJDhw62rCHh7ezN48GAWLVrEv//9b9tjtFot3bt3t91u3749Hh4e7N+/35YAu7q6sn37dsA6Yrxq1SoeeeQRvLy8GDFixHU/34bErglwYWEhHTt2ZOLEidx1111X3O/HH38kISGBgICAS+677777yMjIYOXKlRiNRh588EGmTJnCl19+WZuh35CKiXA30gXi5IFkzEYjLl7eeAa0rKnQhGjwXDy96DJsBF2GjaAg+xwHt2wiNWEjJw7sI+NQChmHUlj/+SL82razrUDn7tvc3mELIWqJSqWqVhlCfRUXF0d2djYGw4Wk2mKxsHv3bubMmYNaXfVpXWq1mrZt29puR0VFsWLFCl5//XVJgOvCsGHDGDZs2FX3OXnyJI899hh//PEHt956a6X79u/fz++//87WrVvp1q0bAAsXLmT48OH83//932UT5vpAX5EA30AJREX5Q3Bkp0p/LQohLnDx9KLz0BF0HjqCgpzs8mT4T07s30fmoVQyD6US//ki/NqElpdJ9JVkWAhhN4mJlUeqExISCA0NJTc3l//97398/fXXRERE2O43m8307duXFStWMHToUABMJhNJSUm20d6UlBRyc3MJDw+/6rk1Gg3FxcU1/Izqr3pdA2yxWLj//vuZPn16pQteYfPmzXh4eNiSX4BBgwahVqtJTEzkzjvvvOxxS0tLKS0ttd3Oz88HrHUzRqOxhp/FpbQa67/Fpdd/vorlj1tG3FQnMYurq7gGci3qLwcXVyJvHkLkzUMozM0hbWsCB7ds4tSBZDLTDpKZdpD4LxbjG9KW0OjetO3RG6fy5ZjlujYe8l5tnC6+rmazGUVRsFgsWCw33m+/Lh07dox//vOfTJkyhe3bt7Nw4ULeeOMNPv30U7y8vLj77rsvGfQaNmwYn3zyCbfccgsWiwWdTsdjjz3Gm2++iVar5fHHH6dnz55069bN9pooisKpU6cAaw3wypUr+eOPP5g5c6bdX7OK+EwmE1D5vVqT79t6nQC//vrrtot3OZmZmfj6Vq7j02q1eHp6kpmZecXjvvrqq8yZM+eS7StWrMDJqfY/Jikt0gAqErduI/9Q9TtBmEuKOXP0MAApGVmk/fprDUcortfKlSvtHYKoBkPnXgS370jhiSMUHDtMcVYmWemHyEo/xMavP8XB0xuXoNb8WliAzrn+zisQ1Sfv1cZp5cqVaLVa/Pz8KCgooKyszN4hVZnJZOKee+4hLy+P6OhoNBoNDz/8MGPGjKFv374MHz6c8+fPX/K4YcOG8cgjj5Cenk5JSQkGg4GpU6dy7733kpGRQa9evXj77bdtg30lJSXk5+fTokULABwcHAgMDOS5555j6tSptv3spaysjOLiYjZt2gRUfq8WFRXV2HnqbQK8bds23nrrLbZv317jH/E/99xzTJs2zXY7Pz+fwMBAbrnlFtzc3Gr0XJcTdyyBE4X5RHbsxJBI/2o/PnXzBtIB76BW3D7q7poPUFSb0Whk5cqVDB48GJ1OZ+9wxHUqysslLSmBg4mbOLl/H6XZZynNPsu5nVsIaN+BsF79aNujNwbX2v85IWqHvFcbp4uvq9ls5vjx47i4uFzXIhH2otVqcXZ2ZsGCBXzyySeV7tuzZ88VHzd+/HjGjx8PgKOjIyqVivvuu4/77rvvsvs/8sgjPPLIIzUXeA2rSOJ79+5NfHx8pfdqTSbn9TYB3rBhA1lZWQQFBdm2mc1mnnrqKd58802OHDmCn58fWVlZlR5nMpnIzs7Gz8/visd2cHDAwcHhku06na5OfiA66Kw1EBZU13W+E8l7Aevqb/IDvH6pq/9Dona4e/vQZegIugwdQVFeLgc2b2DzL/+jJCuTUweSOXUgmfWffkKrjl1o3yeGNt2i0TtWb5a3qB/kvdo46XQ61Go1KpUKtVpdrYlh9UFF3Ner4rEN7XlfrOL6acvXN7j4vVqT79l6mwDff//9DBo0qNK2IUOGcP/99/Pggw8C1vYgubm5bNu2ja5duwKwZs0aLBaLrf9dfaTTlK8Edx0LYSiKwtE95RPgbupUk2EJIS7i5O7BTQOHcrzUQr/oHqRt3cyBP9eTdSSNw9u3cnj7VrQODrTpGk143xhadeyCRisJlRBCNAR2TYALCgo4dOiQ7XZ6ejo7d+7E09OToKAgvLy8Ku2v0+nw8/OzrVMdHh7O0KFDmTx5Mh988AFGo5GpU6cyZsyYetsBAm6sC0ROxinOnz2DRqulZfilEwOFEDXP1cub7iPuovuIuzh38jgHNsZzYOM6cjMzSNkUT8qmeBydXQjt2YfwPjG0CI9ArdbYO2whRAOybt26Gz7GhAkTmDBhwg0fpymwawKclJTEgAEDbLcr6nLHjx/PkiVLqnSML774gqlTpzJw4EDbQhhvv/12bYRbYyr6AF/PQhjHypc/DgjrgM6h4dQ2CdFYeLUIpM/o++j9t3s5nXaQ/RvXk7J5A4U52exZ/Qd7Vv+BSzNPwnr3J7xvLL4hbaRVoRBC1DN2TYBjY2NRlKqXARw5cuSSbZ6envV60YvLqVgJ7noWwpDyByHqB5VKhV/bdvi1bUfM/RM5kbyX/X+u5+CWjRTkZLPtl2Vs+2UZzfxb0L5Pf9r3iZFFa4QQop6otzXAjZm+vAa4uiPAFrOZ4/usM0ElARai/lCrNQRFdiQosiMDJz3KkZ3bOLBxPWnbtpCTcZLN33/F5u+/onnrtrTv3Z+w3v1x9fK2d9hCCNFkSQJsB7YRYFP1JsFlph2ktKgQR2cXfFu3qY3QhBA3SKvT0bZ7T9p270lZcRGHkhI58Oc6juzewenDhzh9+BDrv1hMy/AIwvvEEtqzDwYXV3uHLYQQTYokwHagv84a4Iryh8DIKJlgI0QDoDc40aHfADr0G0BRfh6pCRs5sHEdJw8kcyJ5LyeS97J60Qe06mRtq9a2azS6BtS3VAghGipJgO1Ad51dII7t2QVA8E2dazwmIUTtcnJzp9Mtw+l0y3Dyz2aVd5JYz5mj6RzetoXD27agdXCgbbeetO8TQ6uOnaWtmhBC1BJJgO3geibBlZUUcyr1ACD1v0I0dG7evvS442563HE3504c48DG9ezfuJ6805kc2LieAxvX4+jiSruefWjfJ4aW7SNQNeDG9kIIUd/IT1Q70F3HJLgTyXuxmE24+zbHw6/6yycLIeonr5ZB9Lnnfia99TH3vjyfLsNux8ndg5KC8+xe9TvfznmOj6ZOZP3nizidnlatzjlCiMbnq6++QqPR8I9//OOGjlNcXIynpyfe3t6UlpbWUHQNh4wA28GFhTCq/ovsaHn/Xyl/EKJxUqlU+LcNw79tGDEPTOL43j0c2LSeg4mbKDh3lqTlP5C0/AeaBbQkvE8M7fv0p5l/C3uHLYSoY3FxccyYMYMPP/yQ+fPn43id8wb++9//EhERgaIoLFu2jHvuuaeGI63fZATYDnTa6k+CO7rbOgEuSMofhGj01GoNwVGdGPLIEzzy4Wfc/tTztOvZF61OT86pE2z67gsWPfkwnz/3T7b9soyC7HP2DlmIektRFCxFRXb5qs4nNrGxsUydOpWpU6fi7u6Ot7c3M2fOrHSM9PR0Nm3axLPPPku7du344YcfKh1jyZIleHh4sGzZMkJDQ3F0dGTIkCEcP378kvPFxcUxbtw4xo0bR1xc3PW/wA2UjADbQXWXQi7IyebciWOgUhEUGVWboQkh6hmtXk9oj96E9uhNaVERh7Zu5sCmeI7u3sHpwwc5ffgg6z6LI7DDTbTvE0O76D44urjYO2wh6g2luJiULl3tcu6w7dtQOTlVef+lS5cyadIktmzZQlJSElOmTCEoKIjJkycDsHjxYm699Vbc3d1tieu9995b6RhFRUW8/PLLfPrpp+j1ev7+978zZswYNm7caNsnLS2NzZs388MPP6AoCv/85z85evQowcHBNfPEGwBJgO2gukshVyx/3DykLQZXt9oKSwhRzzk4ORERM5CImIEU5eWSkvAnBzbGcyolmeP7dnN8325Wx71PSOeutO8TQ5uuPWTJdCEakMDAQBYsWIBKpSIsLIw9e/awYMECJk+ejMViYcmSJSxcuBCAMWPG8NRTT5Genk5ISIjtGEajkXfeeYfo6GjAmlSHh4ezZcsWevToAcCiRYsYNmwYzZo1A2DIkCEsXryY2bNn1+0TtiNJgO1Ar7VOgqtqF4iK8ofgmzrWWkxCiIbFyd2DzkNuo/OQ28jLOs2BTfGkbFzPmWNHSEtKJC0pEb3BQLuefYnoP5AW7TtIJwnRJKkMBsK2b7PbuaujZ8+eqFQq2+1evXoxf/58zGYzq1atorCwkOHDhwPg7e3N4MGDWbRoEf/+979tj9FqtXTv3t12u3379nh4eLB//3569OiB2Wxm6dKlvPXWW7Z9xo0bx9NPP82LL76Iuon8nJAE2A4ujABfuzZIURSO7i3v/xslE+CEEJdy921O9Mi/ET3yb5w9doQDm+LZ/+d68s+cZu/alexduxJ33+aE97uZiP43SycZ0aSoVKpqlSHUV3FxcWRnZ2O4KKm2WCzs3r2bOXPmVDlx/eOPPzh58uQlk97MZjOrV69m8ODBNRp3fSUJsB1Upwb43IljFOZko9XpCWgXXtuhCSEaOO+gVvQNakWf0eM4eSCZffFrSE3YQF7WaRL++xUJ//2KgLAORMTcTFivfjg4Ods7ZCFEucTExEq3ExISCA0NJTc3l//97398/fXXRERE2O43m8307duXFStWMHToUABMJhNJSUm2coeUlBRyc3MJD7fmEHFxcYwZM4Z//etflc718ssvExcXJwmwqD3V6QKRlZ4GgF9oO7R6fa3GJYRoPFRqNS07RNKyQyQ3PziFQ1sTSI5fw9HdOzmVksyplGTWLv6INt2iiYgZSHBUZ9QaWWJdCHs6duwY06ZN4+GHH2b79u0sXLiQ+fPn89lnn+Hl5cXo0aMrlUgADB8+nLi4OFsCrNPpeOyxx3j77bfRarVMnTqVnj170qNHD86cOcPy5cv56aefiIyMrHScBx54gDvvvJPs7Gw8PT3r7DnbiyTAdlCdEeCivFwAXD29azMkIUQjpnNwJLxvLOF9YynIPsf+P9exb/1qzp04RsrmDaRs3oCzRzPa940lImYgPkGt7B2yEE3SAw88QHFxMT169ECj0fDEE08wZcoUOnbsyJ133nlJ8gswatQo7r//fs6ePQuAk5MTzzzzDPfeey8nT56kX79+tjZnn376Kc7OzgwcOPCS4wwcOBCDwcDnn3/O448/XrtPtB6QBNgOKlaCK6tCDXBheQLs5O5emyEJIZoIF08vut8+im4j7iIrPY198as58Od6CnNz2Pbzj2z7+Ud8WrUmov9AwvvG4OTuYe+QhWgydDodb775Ju+//36l7bt3777iY0aPHs3o0aMrbbvrrru46667Ltn3qaee4qmnnrrscfR6PTk5OdcRdcMkCbAd6MtLIKrSBaI4Pw8AJ/dmtRqTEKJpUalUNG/dluat2xIzbiLpO7eTvH41adu2cObIYdYdOcz6z+MI6dSViJiBtO4ajVans3fYQghRIyQBtoOKEghjNUognNxkBFgIUTs0Wh1tu0XTtls0xefzSdm0gX3xq8k8lMrh7Vs5vH0rjs4uhPXuR4f+A/EPDbvsR7FCCNFQSAJsB7Ya4CqMAF8ogfCoxYiEEMLK4OpGpyG30mnIrZw7cZzkDWtI3rCWgnNn2bXyN3at/I1m/i3o0P9mOvQfgJu3r71DFqJRWLdu3Q0fY8KECUyYMOGGj9MUSAJsB9UpgSiqKIGQEWAhRB3zahlIv7Hj6XPPOI7v3UNy/GpSt2wiJ+MkG7/5jI3ffk5gh5uIiBlIaHRv9I7Va/ovhBD2IgmwHVRMgrvWQhiKolBcMQLs4VHLUQkhxOWp1RqCozoRHNWJgcWPcnDLZvatX21bfvn4vt2sinuPdj160yFmIIERN6FWS0s1IUT9JQmwHdhGgE0WFEW5Yi1dWXERZpMJAIOMAAsh6gG9wYmImIFExAwk/0wWyRvWkhy/mpyMU9bvN6zFxcubDv0GEBEzEM+AlvYOWQghLiEJsB1ULIUMYLIothHhvyrMzQVAbzCg0zvURWhCCFFlbj6+9LzrHqLvHE3GwQMkx6/hwKZ4Cs6dZcuy79iy7Dv82rYjov9Awvr0x+Diau+QhRACkATYLvQXJcBlJkulhPhiRfm5ADi5edRBVEIIcX1UKhUB7cIJaBdO7AOTSdu2heT41aTv3EbmoVQyD6WydunHtOnagw4xAwnp1BWNVn79CCHsR34C2cHFI75XWw65OK+iB7BHbYckhBA1QqvXE9arL2G9+lKYm8OBjfHsi1/NmSOHObhlEwe3bMLg6kb7vjFE9B+Ib0gbaakmhKhzkgDbgVajRoWCguqqyyHbRoBlFTghRAPk7NGMrrfeQddb7+DM0XT2xa9h/4a1FOXlsuO35ez4bTleLYOIiBlIeN9YXDy97B2yEKKJuPxn76LWacsHPK7WCq2iBlhKIIQQDZ1PcAix90/i4feXctezswnr1Q+NTse5E8eI/2IxH/39Qf77yovs37geY2mJvcMVot6JjY1FpVLZvpo3b87f/vY3jh49Wq3jrFu3rtJxDAYDERERfPTRR7UUef0kI8B2olWD0cw1RoArSiBkBFgI0TioNRpCOncjpHM3SgoLSE34k33r13AqJZkju7ZzZNd29AYn2vXsS0TMzbRoHyElEkKUmzx5MnPnzkVRFI4ePcqTTz7JuHHj2LBhQ7WPlZKSgpubG8XFxSxfvpxHH32UNm3aMHDgwFqIvP6REWA7qSgDvlov4GJZBU4I0Yg5OrsQNXAoY+fOY+JbH9Fz1FjcfJpTVlzE3rUr+Gb2s8Q9/hCbvvuC3NOZ9g5XNFCKomAsNdvlS1Gu3u//YrGxsUydOpWpU6fi7u6Ot7c3M2fOrHQMJycn/Pz88Pf3p2fPnkydOpXt27fb7q8Y3f3ll1+IiorC0dGRnj17snfv3kvO5+vri5+fHyEhITz++OOEhIRUOlZjJyPAdlLeCriKI8AedRCREELYTzO/APqMvo/ed4/lxIF9JMevITXhT/KyTrP5+6/Y/P1XtGjfgQ79BxLWqy8OTs72Dlk0EKYyCx89sd4u557yVgw6h6ovCrN06VImTZrEli1bSEpKYsqUKQQFBTF58uRL9s3Ozubbb78lOjr6kvumT5/OW2+9hZ+fH88//zwjRowgNTUVnU53yb6KovDHH39w7Nixyx6rsZIE2E4u1ACbr7hPYcUIsCyCIYRoIlRqNYEdbiKww03c/ODDHNqawL71qzm6ZycnDyRz8kAyaxd/SJvuPYmIGUjwTZ1Qa2TVOdE4BAYGsmDBAlQqFWFhYezZs4cFCxbYEuD33nuPTz75BEVRKCoqol27dvzxxx+XHGfWrFkMHjwYsCbVLVu25Mcff2T06NG2fVq2tC5SU1paisViYe7cufTv378OnmX9IAmwnWhsI8BSAiGEEJejc3AkvG8s4X1jOZ99lv0b1pEcv4ZzJ46RsimelE3xODfzJLxvLBH9b8Y7qJW9Qxb1kFavZspbMXY7d3X07NmzUs17r169mD9/PubywbL77ruPf/3rXwCcPn2aV155hVtuuYVt27bh6upa6XEVPD09CQsLY//+/ZXOtWHDBlxdXSktLWXLli1MnToVT09PHn300Wo/z4ZIEmA7uVYXCLPJSElhASAJsBBCuHp60+OOu+l++yhOHz5Ecvwa9m9cT2FONknLfyBp+Q/4hrQhov/NtO8bK5+cCRuVSlWtMoT6zN3dnbZt2wLQtm1b4uLi8Pf355tvvuGhhx6q1rFCQkLw8PAAICIigsTERF5++WVJgEXtqqgBNl6hBrg4Px+wfhzo6OxSV2EJIUS9plKp8GsTil+bUGLun0j6jm3sW7+aw9u3kpWeRlZ6Gus/X0SrTl2JiBlI6y490F6m7lGI+igxMbHS7YSEBEJDQ9FcocynYntxcfEljwsKCgIgJyeH1NRUwsPDr3pujUZzyXEaM0mA7eRaI8AX1/+q1NKsQwgh/kqj1dG2e0/adu9JUX4eKZs3kLx+NZlpBzm8bQuHt23B0dmFsN79iYgZiF/bdvYOWYirOnbsGNOmTePhhx9m+/btLFy4kPnz59vuLyoqIjPT2hHl9OnT/Pvf/8bR0ZFbbrml0nHmzp2Ll5cXzZs351//+hfe3t6MHDmy0j5ZWVmUlJTYSiA+++wz7r777lp/jvWFJMB2olErgOqKSyEXywQ4IYSoMic3dzoPuY3OQ27j3InjJMevJnnDWgqyz7Fr5a/sWvkrzQJa0r5PDMayK3ffEcKeHnjgAYqLi+nRowcajYYnnniCKVOm2O7/+OOP+fjjjwFo1qwZUVFR/Prrr4SFhVU6zmuvvcYTTzzBwYMH6dSpE8uXL0ev11fap+IxWq2WwMBAHn74YWbPnl27T7AekQTYTipGgEuvUAJha4Hm0ayuQhJCiEbBq2Ug/e6dQJ8x93N87x72xa/m4JZN5Jw6webvvgDgh4N7iYwdRGh0b/SOBjtHLISVTqfjzTff5P3337/kvnXr1lX5OH379r1s71+w9huuTn/ixkoSYDu5sBDGFRJgGQEWQogbolZrCI7qRHBUJ8qKHyU1cRN7163i5P69nEjew4nkPayOe5/Q6N5ExAwksMNNUnImRBMhCbCdXGshDFsNsCyDLIQQN0xvcCIydhBhfWL437ff4K9Xc+DPteRmZpAcv4bk+DW4evnQof8AOvS/Gc+AlvYOWQhRiyQBthPtNUaAi8tLIAxuHnUUkRBCNA06F1d6DB9O77vHknHwAPvWryZl8wbOnztD4o/fkvjjt/i3DaNDzEDCevfD4OJ67YMKcYOqU+JwJVLeUHWSANvJtUaAK0ognKUHsBBC1AqVSkVAu3AC2oUzYPwU0rZtITl+Nek7t5FxKIWMQymsW/oRrbv2ICJmIK06dkWjlV+bQjQG8k62E42tDdrl/1KzTYKTBFgIIWqdVq8nrFdfwnr1pTA3hwMb17Nv/WrOHE3nYOImDiZuwuDmTnifGDrEDMS3VetKK3YJIRoWSYDtpMo1wDIJTggh6pSzRzO63jqSrreOJOvIYeuqc3+uoygvl+2//cT2337COzCYDjEDCe8bi0szT3uHLISoJrtOd42Pj2fEiBEEBASgUqlYtmyZ7T6j0cgzzzzDTTfdhLOzMwEBATzwwAOcOnWq0jGys7O57777cHNzw8PDg0mTJlFQUFDHz6T6bAthXCYBVhTlQh9gGQEWQgi78W3VmtgHHuLh95dy57OzaNerHxqdjrPHjxL/+SI+enQC/311Fgc2rsdYVmrvcIUQVWTXEeDCwkI6duzIxIkTueuuuyrdV1RUxPbt25k5cyYdO3YkJyeHJ554gttvv52kpCTbfvfddx8ZGRmsXLkSo9HIgw8+yJQpU/jyyy/r+ulUi6ZiKeTLTIIrKy7CbDIBYJAuEEIIYXdqjYbWnbvTunN3SgoKSE34k33rV3MqdT9Hdm7jyM5t6A1OhPXqS0TMIALCwqVEQoh6zK4J8LBhwxg2bNhl73N3d2flypWVtr3zzjv06NGDY8eOERQUxP79+/n999/ZunUr3bp1A2DhwoUMHz6c//u//yMgIKDWn8P10qqstb+XGwGumACnNxjQ6R3qMiwhhBDX4OjiQtSgoUQNGkpOxkmSN6wlOX4N+Wey2LNmBXvWrKCZfwsiBwymQ/+bpURCiHqoQdUA5+XloVKp8PDwAGDz5s14eHjYkl+AQYMGoVarSUxM5M4777zscUpLSyktvfBRVX5+PmAtuzAajbX3BMoZjUZbDXCp0XTJOfPPnQXA4OpeJ/GIG1dxneR6NS5yXRufmr6mLt6+9LjzHrrf8TdOpiSzP34th7ZsIifjJBu+XMKfX39KcMcudOh/MyGdu6HR6mrkvKKyi6+r2WxGURQsFgsWiyx73dBYLBYURcFU/kn4xe/VmvxZ3GAS4JKSEp555hnGjh2Lm5sbAJmZmfj6+lbaT6vV4unpSWZm5hWP9eqrrzJnzpxLtq9YsQInJ6eaDfwKtOUfjR09cZJffz1e6b6C4+kAlFoUfv311zqJR9SMv35qIRoHua6NT61d08A2BPoFUnAsnfzDKZScOc2RHUkc2ZGE2sER11ZtcWsThoOHjArXhpUrV6LVavHz86OgoICysjJ7h9QgbdmyhWHDhjFw4EC+/fbbGzpWjx49OHr0KLt376Z58+bX3L+srIzi4mI2bdoEVH6vFhUV3VAsF2sQCbDRaGT06NEoinLZ9bGr67nnnmPatGm22/n5+QQGBnLLLbfYkuvaZDQa2fj5KgC8ff0YPrxTpfv3rP6dzA2rCAhuxfDhw2s9HnHjjEYjK1euZPDgweh0MsLTWMh1bXzq+prmnDpJ8oY1HNiwlsLcHPJS9pKXspfmrdsS3n8gYb364eDsXOtxNHYXX1ez2czx48dxcXHB0dHR3qE1SN988w1Tp05l0aJFFBQUXHdJ6Z9//klpaSmjRo3ixx9/ZMaMGdd8TElJCQaDgd69exMfH1/pvVrxiX1NqPcJcEXye/ToUdasWVMpQfXz8yMrK6vS/iaTiezsbPz8/K54TAcHBxwcLq2t1el0dfZLzrYSnEW55Jyl5V0sXDyayS/dBqYu/w+JuiPXtfGpq2vqG9wK3+CJ9B87niO7trN37UrStm3h9OFDnD58iD+/WEzbHr2IHDCYoIgoVGq7Nmdq8HQ6HWq1GpVKhVqtRq1WWz9OL7VPhw6tg0OVJ0PGxsYSFRWFo6Mjn3zyCXq9nkceeYTZs2dz5MgRQkJC2LFjB506dQIgNzeXZs2asXbtWmJjYwHYt28fzzzzDPHx8SiKQqdOnViyZAlt2rRhwoQJ5Obm0rlzZ9555x1KS0u59957efvtt9Hr9bY4CgoK+Pbbb0lKSuL06dN8+umnPP/887b7161bx4ABA/j555957rnnSE1NpVOnTnzyySdERkZWek6LFy/m3nvvJSYmhieeeIJnn332mq9DxfXTli86c/F7tSbfs/U6Aa5Ifg8ePMjatWvx8vKqdH+vXr3Izc1l27ZtdO3aFYA1a9ZgsViIjo62R8hVprnKUsi2HsDSAUIIIRoFtUZD6y7dad2lO0X5eezfsI69a1dw9vhRDmxcz4GN63Hz8SUiZhARMQNx9732R8Wiakylpbw9/m67nPvxpd+jq8Yo9NKlS5k2bRqJiYls3ryZCRMm0KdPH0JDQ6/52JMnT9K/f39iY2NtA4YbN2601dICrF69GkdHR9atW8eRI0d48MEH8fLy4uWXX7bt8+2339K+fXvCwsIYN24cTz75JM8999wlifz06dN566238PPz4/nnn2fEiBGkpqbaktTz58/z3XffkZiYSPv27cnLy2PDhg3069evyq9HbbJrAlxQUMChQ4dst9PT09m5cyeenp74+/tz9913s337dn7++WfMZrOtrtfT0xO9Xk94eDhDhw5l8uTJfPDBBxiNRqZOncqYMWPqdQcIuPpCGNIDWAghGi8nN3e63noHXYbfzunDh9i7diUHNq4n/0wWm7//ks3ff0lQZEciBwymbY9e0g2oCYmKimLWrFkAhIaG8s4777B69eoqJcDvvvsu7u7ufP3117YktF27dpX20ev1LFq0CCcnJyIiIpg7dy7Tp0/n3//+N+ryTx/i4uIYN24cAEOHDiUvL4/169fbRpkrzJo1i8GDBwPWxL1ly5b8+OOPjB49GoCvv/6a0NBQIiIiABgzZgxxcXGSAAMkJSUxYMAA2+2Kutzx48cze/ZsfvrpJwDbcH+Fi4f7v/jiC6ZOncrAgQNRq9WMGjWKt99+u07ivxHaqyyFLMsgCyFE46dSqfBrE4pfm1BiHpjEoa0J7F27kmN7dnJs7y6O7d2Fg5Mz7fvEEBk7iOZtQqW38HXQOjjw+NLv7Xbu6oiKiqp029/f/5JSzyvZuXMn/fr1u2qZQMeOHStN9u/VqxcFBQUcP36c4OBgUlJS2LJlCz/++KM1fq2We+65h7i4uEsS4F69etm+9/T0JCwsjP3799u2LVq0yJZIA4wbN46YmBgWLlyIq6trlZ5TbbJrAhwbG4uiXJoAVrjafRU8PT3r/aIXl3O1EWBZBlkIIZoWnd6B8D4xhPeJIS/rNPvWr2bf+lXkn8li18pf2bXyV7wDg4kcMJjwfgPk90M1qFSqapUh2NNfk1eVSoXFYrGNzl6cF/21JZjBYLjh88fFxWEymSp9iq4oCg4ODrzzzju4V7E0Mzk5mYSEBLZs2cIzzzxj2242m/n666+ZPHnyDcd6o6Ta3k6uVgMsJRBCCNF0ufs2p/ff7uWhtz/hbzNfJrxvLFqdnrPHj7Lu00/48JEH+Gn+K6Rt24LFbLZ3uKIO+Pj4AJCRkWHbtnPnzkr7REVFsWHDhqv2yt21axfFxcW22wkJCbi4uBAYGIjJZOLTTz9l/vz57Ny50/a1a9cuAgIC+OqrryodKyEhwfZ9Tk4OqamphIeHA9ZEun///uzatavSsaZNm0ZcXNx1vw41qV5PgmvMtOrLrwRnNhkpKbR2gZAEWAghmi6VWk1QZEeCIjtSMrGAlE3x7F27ksy0gxzcsomDWzbh7NGMDjEDiYwdhGdAS3uHLGqJwWCgZ8+evPbaa4SEhJCVlcULL7xQaZ+pU6eycOFCxowZw3PPPYe7uzsJCQn06NGDsLAwwNpjd9KkSbzwwgscOXKEWbNmMXXqVNRqNT/99BM5OTlMmjTpkpHeUaNGERcXxyOPPGLbNnfuXLy8vGjevDn/+te/8Pb2ZuTIkRiNRj777DPmzp17SVeIhx56iP/85z/s27fPVhtsLzICbCe2GuC/JMDF5T3uVGo1js4udR2WEEKIesjR2YWOg4dz3ysLGP/GO3S9dSQGN3cKc3PY+r/vWfzPR/hq5nT2rFlBWXHNLRYg6o9FixZhMpno2rUrTz75JC+99FKl+728vFizZg0FBQXExMTQtWtXPv7440plFQMHDiQ0NJT+/ftzzz33cPvttzN79mzAOmo7aNCgy5Y5jBo1iqSkJHbv3m3b9tprr/HEE0/QtWtXMjMzWb58OXq9np9++olz585ddjXe8PBwwsPD68UosIwA24mm/E+Pv5ZAXFz/K/0ghRBC/JV3UCtiH3iIfveO5/D2rexdu5L0nds4lbqfU6n7WbPkQ8J69iNywCBatI+QiXMNxLp16y7ZtmzZMtv34eHhttXRKvx1rlRUVBR//PHHVc8zZ86cy66Gu3z58is+pkePHrZzVcTZt29f9u7de8m+o0aNwnyV0pzk5OSrxldXJAG2kyuOAMsEOCGEEFWg0eoI7dGb0B69KcjJJjl+DXvXrSLn1An2rV/FvvWr8PDzJzJ2MB1ibsbV09veIQtRb0gCbCe2LhB/GQG2tUDzaFbXIQkhhGigXJp50uOOu+l++yhOpR5g37qVHNi0gdzMDP78+lM2fvM5rTp2JnLAYFp3jUYrKxuKJk4SYDu50AfYgqIoto+oimQEWAghxHVSqVS0CAunRVg4A8ZPITVxI3vXruTE/r2k79xG+s5tOLq6Ed43hsjYwfi2am3vkEUdWbJkSY0c51otbBsKSYDtpKINmqKA2aKgLd8gyyALIYSoCTpHRyJiBhIRM5CcjJPlvYVXU5B9jh2/LWfHb8vxDWlD5IDBtO8Tg8HF/osTCFFXJAG2E+1F89vKzBa05bPiistLIAxuHnaISgghRGPUzL8Ffcc8QO/R93F09072rl3Joa0JZKWnsSY9jfWfxdG2W08iBwwm6KaOqNUae4csRK2SBNhOtBdNyjWaFNBbv68ogXCWHsBCCCFqmFqtIaRTV0I6daUoP48DG9ezd+1KzhxNJ2XzBlI2b8DVy4eI2IFExAzCo7mfvUMWolZIAmwnahWoVNYSiFKzGbBOSLBNgpMEWAghRC1ycnOny7Db6TLsdk6np7F37UoO/LmO8+fOkPDfr0n479cERkQRGTuI0Oje6BwaxnLCQlSFJMB2olKBTqOmzGTBaL5QTF4ok+CEEELUseYhbWge0oaYcRM5lJTA3rUrObpnJ8f37eb4vt2sXvQB7Xv3JyJ2EP6hYdJbWDR4kgDbkb48Aa7oBawoyoU+wDICLIQQoo5p9Xra9+5P+979yT+bRfL6Nexdv4q805nsXv07u1f/jmeLQCIHDKZDvwE4S8tO0UDJUmN2pCvv/FCxGlxZcRFmkwkAg3SBEEIIYUdu3r70HDWGSW9+xOgXX6FDvwFo9Q5knzxO/OeL+PDR8Sx74yUObU2w/e4SjcPmzZvRaDTceuut1/X4devWoVKpbF8Gg4GIiAg++uijGo70+skIsB3py1tBVIwAV0yA0xsM6PQO9gpLCCGEsFGp1QRGRBEYEcXNEx8hZfMG9q5dScbBFNKSEkhLSsDJ3YMO/W8mMnYQXi2D7B2yuEFxcXE89thjxMXFcerUKQICAq7rOCkpKbi5uVFcXMzy5ct59NFHadOmDQMHDqzhiKtPRoDtSFfe+qy0PAG+UP/rYaeIhBBCiCtzcHImauBQ7n1pPhPmv0e3EXfh5O5BUV4uSct/YMlTf+fLF55i9+rfKS0qsne4DUpsbCyPP/44M2bMwNPTEz8/P2bPng3AkSNHUKlU7Ny507Z/bm4uKpWKdevW2bbt27eP2267DTc3N1xdXenXrx9paWkATJgwgZEjRzJnzhx8fHxwc3PjkUceoaysrFIcBQUFfPPNNzz66KPceuutlyygUTG6+8svvxAVFYWjoyM9e/Zk7969lzwnX19f/Pz8CAkJ4fHHHyckJITt27fXyOt1oyQBtiN9eQJcUQJRnFfeA1jKH4QQQtRzXi2DiBk3kSnvLeGO6TNp060nKrWajIMprPzoHT54+H5+e2c+x/ftRrFY7BanoihYysx2+aruimlLly7F2dmZxMRE5s2bx9y5c1m5cmWVHnvy5En69++Pg4MDa9asYdu2bUycOBHTReUpq1evZv/+/axbt46vvvqKH374gTlz5lQ6zrfffkv79u0JCwtj3LhxLFq06LLPY/r06cyfP5+tW7fi4+PDiBEjMBqNl41NURR+//13jh07RnR0dDVekdojJRB2dEkJRH4uID2AhRBCNBwarZa23aJp2y2awtwc9m9Yy561K8k+eZzkDWtJ3rAW9+Z+RMYMokPMQNy8feo0PsVo4dSLm+r0nBUC5vZGpa/6oiJRUVHMmjULgNDQUN555x1Wr15NaGjoNR/77rvv4u7uztdff41OZ22t2q5du0r76PV6Fi1ahJOTExEREcydO5fp06fz73//G7XampPExcUxbtw4AIYOHUpeXh7r168nNja20rFmzZrF4MGDAWvi3rJlS3788UdGjx5t26dly5YAlJaWYrFYmDt3Lv3796/y61GbJAG2I/1fJsEVlY8ASwmEEEKIhsjZoxndRtxF19vuJPNQqrW38Kb15J3OZOO3n7Pxuy8IvqkTkQMG07ZbT7R6vb1DrleioqIq3fb39ycrK6tKj925cyf9+vWzJb+X07FjR5ycnGy3e/XqRUFBAcePHyc4OJiUlBS2bNnCjz/+CIBWq+Wee+4hLi7ukgS4V69etu89PT0JCwtj//79lfbZsGEDrq6ulJaWsmXLFqZOnYqnpyePPvpolZ5TbZIE2I7+OgJsqwGWEgghhBANmEqlwj80DP/QMGLHP8TBLZvZu3Ylx/ft5ujuHRzdvQNHZxfa940lcsBgmoe0qb1YdGoC5vauteNf69zV8dfkVaVSYbFYbKOzF5ci/LXcwGAwXGeUF8TFxWEymSpNelMUBQcHB9555x3cq5mfhISE4OHhAUBERASJiYm8/PLLkgA3dRWT4MpsNcC5ABhkBFgIIUQjoXNwpEO/AXToN4Dc05nsW7+KfetWc/7cGXb+8TM7//gZn+AQIgcMJrxvLAZXtxo9v0qlqlYZQn3k42MtG8nIyKBz584AlSbEgXX0eOnSpRiNxiuOAu/atYvi4mJbspyQkICLiwuBgYGYTCY+/fRT5s+fzy233FLpcSNHjuSrr77ikUcesW1LSEggKMja8SMnJ4fU1FTCw8Ov+jw0Gg3FxcVVf+K1SBJgO6qYBHehBthaAuFc/teSEEII0Zh4NPejz+hx9Lp7LMf27GLvulUc2rqZM0fTWbvkI+I/X0Sbbj2JHDCY4KhOqNUNO3GtKQaDgZ49e/Laa68REhJCVlYWL7zwQqV9pk6dysKFCxkzZgzPPfcc7u7uJCQk0KNHD8LCwgAoKytj0qRJvPDCCxw5coRZs2YxdepU1Go1P/30Ezk5OUyaNOmSkd5Ro0YRFxdXKQGeO3cuXl5eNG/enH/96194e3szcuTISo/LysqipKTEVgLx2Wefcffdd9fOi1RNkgDb0YWFMKwfaRTJMshCCCGaALVaQ6uOXWjVsQvFBec5sHE9e9euJCs9jdSEP0lN+BMXTy8iYgYSETuIZn7X14e2MVm0aBGTJk2ia9euhIWFMW/evEojtV5eXqxZs4bp06cTExODRqOhU6dO9OnTx7bPwIEDCQ0NpX///pSWljJ27Fhbq7W4uDgGDRp02TKHUaNGMW/ePHbv3m3b9tprr/HEE09w8OBBOnXqxPLly9H/paa7IvHWarUEBgby8MMP285nb5IA29GFGmAzcFECLF0ghBBCNBEGF1c6D7mNzkNuI+vIYfatW0Xyn+soyD5H4o/fkvjjt7RoH0HkgMG069kHveON17rWRxf3862wbNky2/fh4eFs2lS5m8Vf25NFRUXxxx9/XPU8c+bMuaT1GcDy5cuv+JgePXrYzlURZ9++fS/b+xesPY2r2wKurkkCbEc6Wx9gBbPJSElhAQAGGQEWQgjRBPm2ao3vhCn0u+9BDm9LZO/alRzZtYOTB/Zx8sA+1iz+kLBe/YgcMJiAdu1RqVT2Dlk0UJIA25FtBNhsoTg/H7AuOWlwcbVnWEIIIYRdaXU62vXsS7uefTmffZbk9WvYu24luZkZ7F27gr1rV9AsoCWRsYPo0P9mHOT3pqgmSYDtqKIGuMxksU2AM7i6oVLLAn1CCCEEgKunN9F3jqbHyL9x8sA+9q5dRUrCBnJOnWDDl0v48+tPCe7YhRIXD8wmI6jkd+jl/HVJ4+vVEMobqkISYDvSX9QGzVhSYt3WSGubhBBCiBuhUqloGR5Jy/BIbn5wCimb/2TvulWcSknmyI4kAOJ2JBIxaCjekV3sHK2o7yQBtiPdRW3QLGbrWt1qrVwSIYQQ4mr0BiduuvkWbrr5FrJPnWD36j/YufoPSgrOsz9+LV1atCIn8xRuHp44urqg0cjvVlGZ/I+wo4oaYKPZgtls7QSh0UjPQyGEEKKqPANa0mfMA+S6eBIR6M+hbVtBBaayMs6fO0NB9lkcnJxwdHXDwclZJs4JQBJgu9LLCLAQQghRI1RqNa06diUwshOH09JwaeaJpbQUY2kJJYWFlBQWotZoMLi44ujqhs7Bwd4hCzuSbMuOdNrySXBmC2ZTeQIsI8BCCCHEDVGp1Rhc3XD0ccRYWkpJQT7F589jMZspzMulMC8XnYODdR8XV/nd2wRJAmxHlUaAyxfDUEudkhBCCFFjdA4O6Bx8cPH0prSokJLz5yktKsRYWoqx9Aznz53FwdkZg6sbeoOTlEg0EZJt2dGFhTAulEBopARCCCGEqHEqlQpHZxccnV0wm02UFBRQfD4fU2kpJQUFlBQUoNZqMbi4YnB1Q/uXZX2FlaIoYFZQzApYLKBWo3ZoeCPo0izPji4shWzBUj4JTmqAhRBCiNql0WhxdvfAu2UQXi2DcHL3QK3RYDGZKMzN4ezxo5w7eZyi/Dzb7+fGTlEUJowfj0qlsn15eXoxZOAtbFu/BePpIspOFWA8WYAxsxDTmSJM50qwFBovOdaRI0cqHUev19O2bVteeumletNDWLItO6q8FLLUAAshhBB1raJEwtXLi9KiIorP51tLJEpKMJaUcP7sGRycXTC4ujbYEgnFUj5ia7agWCpGcC22kdyK0VxLkYlbYgfx8fz3ATh95jSz3vg3d947ikOJyZUPqlGj0qhQaa/8eqxatYqIiAhKS0v5888/eeihh/D392fSpEm1+XSrREaA7Uh/0UpwlvIEWHoVCiGEEHVPpVLj6OxCM78AfIJCcPXyRqvXoygKJQXnyck4xdljRziffRZTWVmNnz82NpbHH3+cGTNm4OnpiZ+fH7NnzwYujKju3LnTtn9ubi4qlYq1q9dgKTVjKTaye+tObh06HDdXN1xdXOkb3Zv9m3ZjPFXAhHvvZ+QdI5kzcxZ+rQLwDPTl0SenUlpQDGYLlA/MOjg44N/CH/+gFnTu0ZUZ02dw/NQJciwFaH2dOFl6BodAN35Yv5yYkYNw9nUnMjKS9evXX/KcvLy88PPzIzg4mPvuu48+ffqwffv2Gn/trodkW3Z08Upw0gZNCCGEqHmKomA0Xvox/bXonJzRGpwwlZVSUnCekoLzGItNlBSXkJt1Bp2jI46urjg6OV/x01udTletEeOlS5cybdo0EhIS2LxpEw9OnEiv7j1pG9IGAFN+KcazxWBWKMspsG7LLsF0poiTGacYcMvN9O/Vjz++Xo6rqyubtyZiMlrzC1SwduN6HJ0MrFr+B0dPHOehf0zB29+Hl//9MiqNCrWzDrVRi665MwAFBQV8/eO3tG3bFp+WzVGr1ajKc5fp06fz5ptv0qFDB/7zn/8wYsQI0tPT8fLyuuxzS0pKYtu2bTzwwANVfj1qk2RbdnRxDbAshCGEEELUPKPRyCuvvGKXcz///PPo/zKZ7krlCIrRwk3hkTz38NNgttBq0CjeiVrIql9XEDJuovWxxSaUkvKE1nxRLa1GzQeff4K7uztfLv0cvaMDqFV06BEFGhUqjRq1kw69g54lXyzFycmJjnRlbnYm06dP5+XXXkGltuYkP//8My4uLgAUFhbi7+/Pzz//jFpduWhg6tSpjBo1CoD333+f33//nbi4OGbMmGHbp3fv3qjVasrKyjAajUyZMkUSYPGXLhAmGQEWQgghGhNTfilqteVCva1FAcsVJoFZFCLDOoDJYtvk19yPM9lnUTlYcwOVsw6NhwMqjRqt3lqGofU2oPd3Zs/BffSL6Y/Bx/WK8XTs2BEnJyfb7V69elFQUMDx48cJDg4GYMCAAbz/vrUGOCcnh/fee49hw4axZcsW2z4Vj62g1Wrp1q0b+/fvr3S+b775hvDwcIxGI3v37uWxxx6jWbNmvPbaa1V5+WqVZFt2ZBsBloUwhBBCiFqh0+l4/vnnr/vximJNWq0TxS5MGKuYPKaYzChmBZVyaamDpljBorpM+YVKhUqjKh+dVYFGDVo1Di4GtD4Ga5mBRoXGoAMHNQ7e1qRV46xD42IdUTYXWMoPZT2vwWC47ud4MWdnZ9q2bWu7/ckn1pHljz/+mJdeeqlaxwoMDLQdKzw8nLS0NGbOnMns2bNxdHSskXivl0yCsyPdxZPgzLIQhhBCCFHTKtpwXe5Lp9WhVWnRKmo0JhWaUlAXWVCfN6PKNUG2EdXZMlTnjKhzTajzzWgKLWiKQVumQmdWo1fpcNDq0et06HU6tDoNai2gMVNqKabYXECZthTcNGibO6ELcEEX4IzOzxmdjxNaTwNadwdrRwWdGrWDFpVWXal22MfHB4CMjAzbtosnxAFERUWxYcOGq9Y779q1i+LiYtvthIQEXFxcCAwMvOrrp1arKz2u4rEVTCYT27ZtIzw8/KrXQqPRYDKZKKuFSYTVZdcEOD4+nhEjRhAQEIBKpWLZsmWV7lcUhRdffBF/f38MBgODBg3i4MGDlfbJzs7mvvvuw83NDQ8PDyZNmkRBQUEdPovrp5eFMIQQQoiap4BismAps3ZHMBeUYcorxZRdgvFMEcbMQmtP21MFmE4XYjpTjDm7BHNeKZYCI5ZiE0qZ2VqOUFGxoLYmqCpHLWonHWo3PRoPB7ReBrS+Tuj8ndG1cMGxpTsOfq7gqqFUVUyxqYCCwhyyz5zkXMZxCvOybWWPVWUwGOjZsyevvfYa+/fvZ/369bzwwguV9pk6dSr5+fmMGTOGpKQkDh48yGeffUZKSoptn7KyMiZNmkRycjK//vors2bNYurUqZXqe0tLS8nMzCQzM5P9+/fz2GOPUVBQwIgRIyqd79133+XHH3/kwIED/OMf/yAnJ4eJEydW2ufcuXNkZmZy4sQJfvvtN9566y0GDBiAm5tbtZ5/bbBrtlVYWEjHjh2ZOHEid9111yX3z5s3j7fffpulS5cSEhLCzJkzGTJkCMnJybah8/vuu4+MjAxWrlyJ0WjkwQcfZMqUKXz55Zd1/XSqraIGuNQkXSCEEEKI6jAXGq0LMmQVUZpRQJuDLpw7uocySxmmHg4YnYrRaKuwiMVlyhFU6gu3K8oRqtPNQaPT4dLME2ePZhhLSig+n09JYQFmo5GC7GwKsrPRG5wwuLri4OxyyQSzy1m0aBGTJk2ia9euhIWFMW/ePG655Rbb/V5eXqxZs4bp06cTExODRqOhU6dO9OnTx7bPwIEDCQ0NpX///pSWljJ27Fhbq7UKv//+O/7+/gC4urrSvn17vvvuO2JjYyvt99prr/Haa6+xc+dO2rZty08//YS3t3elfQYNGmR9PTQa/P39GT58OC+//HKVX8faZNdsa9iwYQwbNuyy9ymKwptvvskLL7zAHXfcAcCnn35K8+bNWbZsGWPGjGH//v38/vvvbN26lW7dugGwcOFChg8fzv/93/8REBBQZ8/lelTUABvNFswm6QIhhBBC/JWlxITxtHXU1nS6COPpQoyni7AUVP6o3wM9ppxCLK4qwMG68eIEVqOydjrQXJzwqkFFrS1uoVKp0BsM6A0GXC0+lBZal18uKy6mrLiIsuIiVOozOLq4sOL339A5VK6LvfiT8fDwcDZt2lTp/r+uqhYVFcUff/xx1ZjmzJnDnDlzLnvfkiVLWLJkSZWeW3h4OImJiZe9r1WrVvVmxbcrqbfDjenp6WRmZtr+egBwd3cnOjqazZs3M2bMGDZv3oyHh4ct+QXrXxtqtZrExETuvPPOyx67tLSU0tJS2+38/HzA2irlenoFVlfFOdSKNektM1lsTbUVlapOYhA1q+KaybVrXOS6Nj5yTesvpcyMKasYU1aR9d/T1n8t+VeuF9U0c0Dj64Ta24GUjDQiukehuGkpKjmD1teA9hoTrRQUa7lEHSVrDs4uODi7YDGZKC7vLWw2GinOz6c4Px+NTofB1Q1HF5canxOkKAqKomCxWK6981VUPN5isdzwsa50fEVRMJWXiVz8Xq3J9229TYAzMzMBaN68eaXtzZs3t92XmZmJr69vpfu1Wi2enp62fS7n1VdfvexfPytWrKjUHqS2bdwQD2ixKNZVXgAOHkrj3K+/1lkMomatXLnS3iGIWiDXtfGRa2o/Kgs4FmswFGls/xqKNDiUXvkT0DK9mWKDmWInMyVO5f8azFgqHmIBmsP6Y1vRarX4+flRUFBQLyZbXZFag9bVHY3JiLm0FHNZaXmJxDkKss+h1unRODig1jvUyAi10WjEZDLZBv2uV8U8q8LCwhs+1uWUlZVRXFxsG+2++L1aVFRUY+eptwlwbXruueeYNm2a7XZ+fj6BgYHccsstdVKYbTQaWblyJYMHDeD5pA0A+Pn5cSg9lQ4REXQZPrzWYxA1y3ZNBw9Gp9PZOxxRQ+S6Nj5yTeuOYrZgPldSaTTXlFWE+VzJhYllf6F21qFtbkDj62QdwW3uhNbHgNpw9XTl4utqNps5fvw4Li4udm+1VR2KopSXSJzHWFKMxViGxViGWl2Ig4srBhdXtA4O1338zz//vEbijIyMtC3eVRtKSkowGAz07t2b+Pj4Su/Vmky4620C7OfnB8Dp06dtxdgVtzt16mTbJysrq9LjTCYT2dnZtsdfjoODAw6X+U+k0+nq9Aei00UxWMzWjxF0er38UG7A6vr/kKgbcl0bH7mmNUexKJizS6y1uZlFGLPK63XLl+y9HJVBi665k/XLz9naRaG5k63H7fXS6XTW5XrLW3dVZXJZfeLk5o6TmzsmYxkl589TfD4fs8lEcX4exfl5aPX68hIJ10bbNari+mnLn9/F79WafM/W21cvJCQEPz8/Vq9ebUt48/PzSUxM5NFHHwWsq5Dk5uaybds2unbtCsCaNWuwWCxER0fbK/Qqq+gDDGAyldcFSx9gIYQQ9ZCiKJhzSzGeLsJ0cbJ7uqjS6mUXU+k16Jo7WfvfNndG52dNdNWu+lqbeNYYaHV6XDy9cG7mSVlxMSXlXSRMZWWcP3eW89lncTA4Y3B1w8HJybaMsag6u2ZbBQUFHDp0yHY7PT2dnTt34unpSVBQEE8++SQvvfQSoaGhtjZoAQEBjBw5ErDOQBw6dCiTJ0/mgw8+wGg0MnXqVMaMGVPvO0BA+exQjZoyswWTUVaCE0IIYX+KomA5b7R1W7B1X8gqQim9wkffWjU6X4MtydU2d7aO6Lo7WFuK2UF970JQFSqVCgcnJxycnHA1mykp7yJhLCmhtKiQ0qJC1BoNji6uGFxd0dZQvbA9VUysq+3nYdcEOCkpiQEDBthuV9Tljh8/niVLljBjxgwKCwuZMmUKubm59O3bl99//71STc8XX3zB1KlTGThwIGq1mlGjRvH222/X+XO5XnqtutJSyI31Iw0hhBD1j7nQaB3NPV1kS3aNp4tQiq+wUINGhdbbYF3FzNfJluxqPR3tluj+lU6nQ6VScebMGXx8fBp8Qngxtd4BZy8fTGVllBZaE2BjWRml2efIyz6HVqfHwdkZByfnBpdPKIpCWVkZZ86cQa1W13qJkl1fndjY2Kv+haZSqZg7dy5z58694j6enp4NYtGLK6kogzDLQhhCCCFqia2X7ulCTJlX7qVrowKtl8FavuDnbKvX1XobrL1z6zGNRkPLli05ceKErcNSY6UoCmajEWNJMcay0guTC1Wg1Tmgc3REq29Y5SZOTk4EBQU17hFgcWExDNsIsJRACCGEuE6WMjOmrKLy+lxrna7pdCHmvKv00vV0vDAhrbmztV7XxwmVrn4nulfj4uJCaGhok+r3XFpYyOEdSRzaupmzx47Ytju4uNC2aw/adu+NZ4uW9guwCjQaDVqtFlUdrIkgCbCdVSyHXJEAyyQ4IYQQ16KYLBizii4ku+UjuuacK7cY07jrbbW5tmTX1wm1Q+MceNFoNE1qUMnR0ZHOg4bQedAQzh4/yr71q0mOX0POsSNsPXaErT9+i29IGyJiBhHeNwaDa+23fa3PJNuys7+OAKu1TefNKoQQ4uoUswXTuRJbbW5Fva7pXLF18YfLULvoKo/mln9/rV66ovHwDgwmZtxE+o0dT/rObexbv4q0pC1kpaeRlZ7G+s/iaNOtB5Gxg2nVsUuTnIAv7wY705ePAFvKm0prNNKXUgghmpoLvXQvjOaaThdiPHOVXrqOWltbsYuT3RvtpSsaD7VGQ5uuPWjTtQdF+Xkc2BjPvnWryDqSxsHETRxM3ISzRzPC+w0gMnYQXi2D7B1ynZEE2M4qRoArEmAZARZCiMZLURTMeaXltbkXJbtZRSjGKvbSbW7tviC9dEV1OLm502XYCLoMG0HWkcPsW7+a/RvWUpibQ9LyH0ha/gN+bdsRETOI9r374+jiYu+Qa5UkwHZWMQKsmKUGWAghGou/9tK9ONmtSi9dbfkKafbupSsaJ99WrfFt1Zr+903g8I4k9q1bTfqOrWQeSiXzUCrrPv2Ytt16Ehk7iKCoTqjVjW9wTrItO9PZSiCkC4QQQjRUllITpYfyKE3LpSyjANPpIixFV+ilq1ah9THYShcqWo3Vp166omnQaHWEdu9FaPdeFOXlsv/Pdexdt4qzx46QsnkDKZs34OLpRYf+NxMRMwjPgBb2DrnGSAJsZxUlEIqtBEIuiRBC1HeKomDMKKQkNYfS1BxKj+SD5S+1uhf30r1oRLch9NIVTY+Tuwddbx1Jl+F3kJWeZi2R+HMdBdnn2LLsO7Ys+46AduFExA4krFd/HJyc7B3yDZFsy850msoJcENbuUUIIZoKc6GR0oM5lKRav/66iITWyxGHds3QB7lZR3cbeC9d0TSpVCqat25L89Zt6T9uIoe3b2HfulWk79jGqdT9nErdz9olHxPaoxcRsYMIiohCpW54/88l27Izh4oRYEv5CLCUQAghRL2gmBXKjufbEl7jyYJKPXZVejUObTxwbNcMx3bN0HoZ7BesELVAq9PRLroP7aL7UJCTzf4Na9m7bhXZJ4+z/8917P9zHUGRHfnbzJftHWq1SQJsZxVLIdtKIGQSnBBC2I0pt5TS1BxKUrMpOZSLUlJ5wprO3xmH8oTXIdgNlbbhjXwJcT1cmnnS/fZRdBtxF5lpqexbt4oDG+MJiuxo79CuS7Wzrd9//x0XFxf69u0LwLvvvsvHH39Mhw4dePfdd2nWrFmNB9mYVdQAY5E2aEIIUdcUo4XS9DzbKK8pq6jS/WonLQ6hzXAMbYZjOw80bg52ilSI+kGlUuHfNgz/tmHEPjDZNom/oal2Ajx9+nRef/11APbs2cNTTz3FtGnTWLt2LdOmTWPx4sU1HmRjptOoQVFAsfZ/1GhlIQwhhKgtiqLgUKymaHMGxrR8Sg/nVe6/qwJ9oKt1hLddM/QtXaUzgxBXoNXrgYa58Eq1E+D09HQ6dOgAwH//+19uu+02XnnlFbZv387w4cNrPMDGTq9Vo75oPUupARZCiJplKTFRmpZLSWoOxSk5ROZ6cH7nUdv9Gje9razBsa0HaicZiBCisat2AqzX6ykqsn5EtGrVKh544AEAPD09yc/Pr9nomgC9Ro1auZAAa6QGWAghbohiudCirCQ1m7Kj5yu1KLOoFBxbe2AI87ROXmvuJCuqCdHEVDvb6tu3L9OmTaNPnz5s2bKFb775BoDU1FRatmxZ4wE2dnqtGs1FCbDUAAshRPWZC8ooPWgd5S05eJkWZd4Ga7LbxpW1KZsZOqIXOp2M9ArRVFU7AX7nnXf4+9//zvfff8/7779PixbWVUF+++03hg4dWuMBNnY6jRo1F2YZSxcIIYS4NluLspTyFmWn/tqiTINDG3ccw6wT2CpalBmNRiyH7BS0EKLeqHa2FRQUxM8//3zJ9gULFtRIQE2NXnuhBEKt0cjHcEIIcQWm3BLrymspOdYWZaWXtiirmLwmLcqEEFdzXcONaWlpLF68mLS0NN566y18fX357bffCAoKIiIioqZjbNQurgGW0V8hhLhAMZopTc+nJCWbkoM5mLKKK91va1HWzjrKq3FrmLPRhRB1r9oZ1/r16xk2bBh9+vQhPj6el19+GV9fX3bt2kVcXBzff/99bcTZaOm0ajRcGAEWQoimSlEUTGeKbT15Sw/ngekvLcqC3Gwrr+lauEiLMiHEdal2Avzss8/y0ksvMW3aNFxdXW3bb775Zt55550aDa4pcLh4BFgrI8BCiKbFUmKi9FCuLek155ZWul/jrreO8oY1w7GNtCgTQtSMamdce/bs4csvv7xku6+vL2fPnq2RoJoSnVZlS4A1kgALIRo5xaJgPFVgS3jLjuVzUSt00KpwCHG3jfJqfaVFmRCi5lU74/Lw8CAjI4OQkJBK23fs2GHrCCGqTq/R2BbCkBIIIURjZC4oo+RgLqUp2ZQczMVSePkWZQ5hzXAIcUetl5+FQojaVe0EeMyYMTzzzDN89913qFQqLBYLGzdu5Omnn7YtiiGqTqe5aARYJsEJIRoBxWyh7Oh5W09e48mCSver9Boc2npcGOX1dLRTpEKIpqraGdcrr7zCP/7xDwIDAzGbzXTo0AGz2cy9997LCy+8UBsxNmrWhTCsrXxkBFgI0VCZsksoOZhDSUoOpWmXaVEW4IxjO08c23mgD5IWZUII+7qupZA//vhjZs6cyd69eykoKKBz586EhobWRnyNnl6jtpVASA2wEKKhsJSZKU3Po7S8ltd05i8typz/0qLMVVqUCSHqj+vOuIKCgggKCqrJWJqkSgthSAIshKinFEXBlFV0oUVZeh6YLlp6TV3eoqy8Y4MuQFqUCSHqr2pnXBMnTrzq/YsWLbruYJoinabySnBCCFFfWIpNlBzKtY3ymvP+2qLMAcewZtaR3rYeqA3yR7wQomGo9k+rnJycSreNRiN79+4lNzeXm2++ucYCayr0WvVFXSDkl4cQwn4Ui4Lx5EUtyo5fqUWZJ45hzdD6GKRFmRCiQap2xvXjjz9ess1isfDoo4/Spk2bGgmqKdFpLkyC02hlBFgIUbfM58ts3RpKD+ZgKTRVul/rY7B1a9BLizIhRCNRI0OOarWaadOmERsby4wZM2rikE2GQ6UaYFnhSAhRuxSThbJj+dakNyUHY0ZhpftVDn9pUdZMWpQJIRqfGvvMPS0tDZPJdO0dRSU6jVoWwhBC1CpTdsmFyWuHclHK/tKirIXLhVHeIFdUGmlRJoRo3KqdAE+bNq3SbUVRyMjI4JdffmH8+PE1FlhTUakLhCTAQogaoFgUSg/lUnIg29qi7OxfW5TprCuvtWuGY6gHGhdpUSaEaFqqnQDv2LGj0m21Wo2Pjw/z58+/ZocIcSnrQhjls0zUkgALIa6fOb+Mwq2ZFG7NxJx7UceGihZlYdaevNKiTAjR1FU7AV67dm1txNFk6TQqWwkEaukCIYSoHsWiUJqWS2FiBsXJ2WCx9uZVO2kxRHpbR3rbeqB2lJ8vQghRQX4i2pn+oj7AqKXuTghRNeaCMoq2ZVGwJQPzuRLbdn2wG849/XGK9Ealk58pQghxOVVKgDt37lzlXo/bt2+/oYCaGpVKhU4lJRBCiGtTFIWy9HwKEjMo3nsWzNbRXpWDBqcuvrhE+6Pzc7ZzlEIIUf9VKQEeOXJkLYfRtOlU1l9iikpGa4QQl7IUGSnckUVhYgamrAsT2nQtXXCJ9sfQ0Uf68wohRDVUKQGeNWtWbcfRpGmpSIDlF5gQwkpRFMqOn6cwMZOiXWfAZP2kSKVX49TJF+cefuhbuto5SiGEaJikBrgeqCiBUKQEQogmz1JqomjHGQoTMyotUqHzc7LW9nbylQltQghxg6r9U9RsNrNgwQK+/fZbjh07RllZWaX7s7Ozayy4puLCCLCUQAjRVJWdKqAwMYOiHWcuLFShVeMU5Y1ztL91gYoqzsUQQghxddVOgOfMmcMnn3zCU089xQsvvMC//vUvjhw5wrJly3jxxRdrI8ZGT1teA2yREgghmhRLmZni3WcoTMyk7Ph523atjwHnaH+cu/iidpIl0oUQoqZVOwH+4osv+Pjjj7n11luZPXs2Y8eOpU2bNkRFRZGQkMDjjz9eG3E2atryPsAWGQEWokkwni6kMDGTwu2nUUrKR3s1KgwRXjhH++PQ2l1Ge4UQohZVO+PKzMzkpptuAsDFxYW8vDwAbrvtNn755ZcaDc5sNjNz5kxCQkIwGAy0adOGf//73yiKYttHURRefPFF/P39MRgMDBo0iIMHD9ZoHLVNU54ASwmEEI2XYrJQtDOLrA93cXrBdgo2nUIpMaPxdMRtaCv8n+uB173hOLbxkORXCCFqWbVHgFu2bElGRgZBQUG0adOGFStW0KVLF7Zu3YqDg0ONBvf666/z/vvvs3TpUiIiIkhKSuLBBx/E3d3dNtI8b9483n77bZYuXUpISAgzZ85kyJAhJCcn4+joWKPx1JaKBNgsCbAQjY7pbDEFWzIp2paJpdBk3agGx3AvXKL9cWjrIcsSCyFEHat2AnznnXeyevVqoqOjeeyxxxg3bhxxcXEcO3aMf/7znzUa3KZNm7jjjju49dZbAWjVqhVfffUVW7ZsAayjv2+++SYvvPACd9xxBwCffvopzZs3Z9myZYwZM6ZG46ktFQmwpfoD8kKIekgxWyhOzqYwMYPSQ7m27Rp3Pc7d/XDu7ofGvWYHDIQQQlRdlRPgd955h3HjxvHaa6/Ztt1zzz0EBQWxefNmQkNDGTFiRI0G17t3bz766CNSU1Np164du3bt4s8//+Q///kPAOnp6WRmZjJo0CDbY9zd3YmOjmbz5s1XTIBLS0spLS213c7PzwfAaDRiNBpr9DlcTsU5Kv7VlC+FbLRQJ+cXNe+v11Q0DtW9rubcUoqTsijeloWloPwxKtCHeuDUvTn6UA9UGhUWwCL/V+xC3quNk1zXxudy17Qmr69Kubig9irc3d0xGo3ceeedTJo0iZtvvrnGgrgSi8XC888/z7x589BoNJjNZl5++WWee+45wDpC3KdPH06dOoW/v7/tcaNHj0alUvHNN99c9rizZ89mzpw5l2z/8ssvcXJyqp0ncxWJP/+GV/4J8jvE0qVTaJ2fXwhxAxRwz9XhnemAe64OFdZyBqPOwlnfUs76llLmaLFzkEII0fAVFRVx7733kpeXh5ub2w0dq8ojwJmZmXz33XcsXryYwYMHExQUxMSJE5kwYQKBgYE3FMSVfPvtt3zxxRd8+eWXREREsHPnTp588kkCAgIYP378dR/3ueeeY9q0abbb+fn5BAYGcsstt9zwC1oVRqORlStXMnjwYHQ6Hbv/WAWAf2AQw4cPr/Xzi5r312sqGoerXVdzfhnF28pHe/Mu9EPXt3bD0L05Du2b0VIrZU31jbxXGye5ro3P5a5pxSf2NaHKCbDBYOCBBx7ggQce4PDhwyxZsoS4uDjmzJnDoEGDmDRpEiNHjqzR/3jTp0/n2WeftZUy3HTTTRw9epRXX32V8ePH4+fnB8Dp06crjQCfPn2aTp06XfG4Dg4Ol52wp9Pp6vSNU3E+ta0LhEbeuA1cXf8fEnWj4roqFoXStFwKEzIo3n+O8rcuaictTt2a49zDH523wb7BiiqR92rjJNe18bn4mtbktb2u4YnWrVszd+5c0tPT+e233/Dy8mLChAm0aNGixgID61C3Wl05RI1Gg8Vi/a0TEhKCn58fq1evtt2fn59PYmIivXr1qtFYapO6vAbYJJPghKiXLIVGzq8/Tub8JM7G7aV4nzX51bdyw/OeMPyfi8ZjeGtJfoUQooG4oQXlVSoVWq0WlUqFoig1Xnw+YsQIXn75ZYKCgoiIiGDHjh385z//YeLEibbzP/nkk7z00kuEhoba2qAFBAQwcuTIGo2lNtkSYEVaIQlRn5QdzSck1ZkzW7aD2TpdQuWowblLc5yj/dA1d7ZzhEIIIa7HdSXAx48fZ/HixSxZsoRjx47Rv39/Pv74Y0aNGlWjwS1cuJCZM2fy97//naysLAICAnj44YcrLbk8Y8YMCgsLmTJlCrm5ufTt25fff/+9wfQABlAp1pWgZARYiPqh9Fg++SuOUnooF08cAAVdSxdcov0xdPRBrZdly4UQoiGrcgJcVlbGDz/8wKJFi1izZg3+/v6MHz+eiRMn0rp161oJztXVlTfffJM333zzivuoVCrmzp3L3LlzayWGuqCqaIMmI8BC2JUxs5C8FUcpST5n3aBRccarmLBR3XEK9rBrbEIIIWpOlRNgPz8/ioqKuO2221i+fDlDhgy5pD5XXB+VRUaAhbAn07li8lcdo2hnFiiACpy6NMcpNoBtm1YTGSClDkII0ZhUOQF+4YUXuP/++/Hx8anNeJok2wiwRUaAhahL5vxS8tccp3BLJlisNb6Gm7xxGxyMztdJmuoLIUQjVeUE+OK+uaKGlY8AG5EEWIi6YC40cn79CQo2nQKT9Q9Qh3bNcL8lGH1LVztHJ4QQorbdUBcIUTNUFjMKYLRICYQQtclSaqLgz1Ocjz+BUmr9w1Mf7Ib7kFY4tHa3c3RCCCHqiiTA9UHFCLBMghOiVihGCwWJGZxfexxLobWsQefvjNuQVjiGNUOlkveeEEI0JZIA1wflCXCZ1AALUaMUs0LR9tPkrzqGOa8UAK23AbfBwRhu8kallvecEEI0RdedAJeVlZGenk6bNm3QaiWPvhFKRQKs2DkQIRoJxaJQvOcs+SuPYjpbDIDGXY/bwGCcuvqi0ki5kRBCNGXVzlyLiop47LHHWLp0KQCpqam0bt2axx57jBYtWvDss8/WeJCNmaIoUL60c6mMAAtxQxRFoSQlh/w/jmDMKARA7azFdUAQLtH+qHSS+AohhKD6jWefe+45du3axbp16yqttjZo0CC++eabGg2uKbCYTbbvy6QGWIjrVpqex5kPd3NuyT6MGYWoHDS4DQ7Gb0Z3XPu2kORXCCGETbVHgJctW8Y333xDz549K00ciYiIIC0trUaDawosJrPt+1KLHQMRooEqO1lA3h9HKE3NsW7QqnHpHYBrTEs0zjr7BieEEKJeqnYCfObMGXx9fS/ZXlhYKDOpr4P5ohHgUrO8fkJUlTGriPyVRynec9a6Qa3CuXtz3AYGoXFzsG9wQggh6rVqJ8DdunXjl19+4bHHHgOwJb2ffPIJvXr1qtnomgCL6aISCKkBFuKaTDkl5K8+RtG20xeWLe7ki9ugILReBnuHJ4QQogGodgL8yiuvMGzYMJKTkzGZTLz11lskJyezadMm1q9fXxsxNmoVI8Bm1JSZpQ2EEFdiPl/G+bXHKUjMgPL3imMHL9xvCUbn52zn6IQQQjQk1Z4V0rdvX3bu3InJZOKmm25ixYoV+Pr6snnzZrp27VobMTZqFTXAFpWaMrMUAQvxV5ZiE3l/HCHzja3WpYvNCg5t3PH5e0e8H+ggya8QQohqu64Gvm3atOHjjz+u6ViapIouEBbUlJkkARaigqXMTMGmU5xfdwKlxPo+0QW64j4kGMe2zewcnRBCiIas2gmwRqMhIyPjkolw586dw9fXF7PZfIVHisuxmC+MABtlBFgIFJOFwq2Z5K85huW8ddlibXMn3G8JxrGDl0y2FUIIccOqnQAryuXrVEtLS9Hr9TccUFNjLp8EZ1HJCLBo2hSLQtGOLPJXHcWcY122WOPpiNugIJw6+cqyxUIIIWpMlRPgt99+G7B2ffjkk09wcXGx3Wc2m4mPj6d9+/Y1H2EjV9EFwowak0XBYlFQyy960YQoikLJvnPkrTiKKasIALWrHreBgTh380OllQUshBBC1KwqJ8ALFiwArL+sPvjgAzQaje0+vV5Pq1at+OCDD2o+wkbOfFEJBECZ2YKjWnO1hwjRKCiKQumhXPL+OILxRAEAKoMWt9iWOPcKQK2X94EQQojaUeUEOD09HYABAwbwww8/0KyZTEKpCbZJcOUJsNFswVEnv/hF41Z6NJ/8P45QejgPAJVejUvfFrj2b4na8brm5gohhBBVVu3fNGvXrq2NOJosWw1weUc6qQMWjZkxs5C8P45Qsj/bukGjwqWnP64DAtG4yBwCIYQQdeO6hlpOnDjBTz/9xLFjxygrK6t033/+858aCaypsI0Al5c9GGUxDNEImc4Wk7fqKMW7zlxYva1rc+vqbR6O9g5PCCFEE1PtBHj16tXcfvvttG7dmgMHDhAZGcmRI0dQFIUuXbrURoyNWsVCGKhkBFg0PhWLWBRuyQSL9Y87Q5Q3boOD0fk42Tk6IYQQTVW1E+DnnnuOp59+mjlz5uDq6sp///tffH19ue+++xg6dGhtxNioVYwAK+UjwGXSR1k0EiUHc8j5PhVznvVTIsewZrjd0gp9C5drPFIIIYSoXdVOgPfv389XX31lfbBWS3FxMS4uLsydO5c77riDRx99tMaDbMxsC4dUJMAmKYEQDZulzEzer+kUJmQAoPVyxOOuUBzbePx/e38eJ1dZ5v//r1P7XtX7knQ6+x5CNiCggBASQHGBQUR0kHH0Mw74UXHmM+JvRHHmK6POKKODOm6Io8giiMoeI4EBAySB7Eln7Sy9b7Xvdc7vj1q6q7u60510d/VyPR+PenTVqdOn7s5Jd7/7rutcd3EHJoQQQmSMOADb7fZc3W9NTQ3Hjh1j2bJlAHR2do7u6KaBbB9g+rRBE2Kyip300/N4A8muKAD29TW4r5sjLc2EEEJMKCMOwJdccgmvvfYaS5Ys4frrr+eLX/wie/fu5amnnuKSSy4ZizFOadkuEJo+fSpkOWQxGWkJFd+fThJ89QxooHebKPmrhVgWSLtEIYQQE8+IA/B3vvMdgsF00/r77ruPYDDIY489xoIFC6QDxDlQMyUQik4ughOTU7wpSPfjDSTb0qu42VZX4rlhHjqr9PMVQggxMY34N9TcuXNz9+12u6z+dp6yF8HlaoBlBlhMElpKI7D1NP4tp0DV0DmMlHxoAdZlZcUemhBCCDEk3Ug/Ye7cuXR1dQ3Y7vV688KxGJ5sDbCSuwhOArCY+BLtYdp/uAv/5pOgaliXl1H1+dUSfoUQQkwKI54Bbmxs7O1c0EcsFqOpqWlUBjWdZGuAFX12IQwJwGLi0lSN4OvN+F5shKSKYjFQ8oF5WC+sQFGUYg9PCCGEGJZhB+A//OEPufsvvvgibrc79ziVSrFlyxZmz549qoObDnI1wJmL4GQGWExUye4o3U80ED/hB8C8sITSmxagd5uLPDIhhBBiZIYdgD/4wQ8CoCgKt99+e95zRqOR2bNn8x//8R+jOrjpIFsDrJMZYDFBaZpGaHsrvmdOoMVTKCYd7vfOxX5Rtcz6CiGEmJSGHYBVNR3M5syZw/bt2ykvLx+zQU0n2XISnd4AmswAi4kl5Y/R8+QRog09AJhmuyi9eSGGMmuRRyaEEEKcuxHXAJ84cWIsxjFtqckEkKkBTkI8JSvBieLTNI3I7g56fn8MLZIEg4J742wc75qBopNZXyGEEJPbsLtAbNu2jWeeeSZv2y9/+UvmzJlDZWUln/70p4nFYqM+wCknFkDZ9SvmdLwEQCrZZwYYmQEWxZcKJeh+5BDdjzagRZIYZzio+uwqnJfPlPArhBBiShh2AP7617/O/v37c4/37t3LJz/5STZs2MCXvvQl/vjHP3L//fePySCnlHgIw7OfZ8WZX4Om9tYAGyQAi+KLHOii7bs7ieztBJ2Ca8MsKv9+JcYqe7GHJoQQQoyaYZdA7Nq1i3/5l3/JPX700Ue5+OKL+clPfgJAXV0dX/3qV/na17426oOcUiweABQ0iAVyXSAMshSyKCI1msT7x+OEd7YBYKi0UfrhhZhmOos8MiGEEGL0DTsA9/T0UFVVlXv8yiuvcN111+Uer1u3jtOnT4/u6KYiowXNYEVJRiDqzS2EoTPISnCiOKJHvfT89jApbwwUcLx7Ju5r6lGMI14nRwghhJgUhv0brqqqKncBXDwe5+233+aSSy7JPR8IBDAajaM/wqnIkumhHPHmFsLQSwmEGGdqPIX3D8fo/OleUt4Y+lILFf/nAjzXz5HwK4QQYkob9gzw9ddfz5e+9CW++c1v8vTTT2Oz2Xj3u9+de37Pnj3MmzdvTAY55Vg9EGxFifpyJRC5ACwzwGIcxE766XniMMnOCAD2i6txXz8XnVlf5JEJIYQQY2/YAfhf/uVfuPHGG7niiitwOBw8/PDDmEym3PM///nP2bhx45gMcqrRLB4UgKiXVOYiOEMmACdkBliMIS2p4v/TSQKvnAEN9C4TJX+1EMvCkmIPTQghhBg3ww7A5eXlvPrqq/h8PhwOB3p9/kzRE088gcPhGPUBTknZEoiot/ciOJkBFmMs3hyk5/HDJFpDANhWVeK5YS46m5QuCSGEmF5GXOjndrsHhF+A0tLSvBnh0dLU1MTHPvYxysrKsFqtrFixgh07duSe1zSNe++9l5qaGqxWKxs2bODIkSOjPo5Rle0EEfXmFsIwGKULhBgbWkrD/+dTtD+4i0RrCJ3dSNnHllB6yyIJv0IIIaalCX2lS09PD5dddhlGo5Hnn3+eAwcO8B//8R+UlPS+Xfutb32L733ve/zoRz/izTffxG63s2nTJqLRaBFHPjQtE4CJ+nILYRgyFxDKRXBiNCU6wrT/aDf+l05CSsOytIyqL6zGulyWMhdCCDF9jXgp5PH0zW9+k7q6Oh566KHctjlz5uTua5rGAw88wD//8z/zgQ98AEivTldVVcXTTz/NRz7ykXEf87DkukD0oKbSs+bGXAmELIUszp+magT/0ozvhUZIqigWPZ73z8O2qhJFkdXchBBCTG8TOgD/4Q9/YNOmTdx888288sorzJgxg7//+7/nU5/6FAAnTpygtbWVDRs25D7H7XZz8cUXs23btkEDcCwWy1u22e/3A5BIJEgkEmP4FaVpJid6QAv3kEqWAaDT6wGVWCI5LmMQoyt7zibCuUt5Y/ieOkbiRPr/tWmeG9eH5qJ3m0lm2u6J4ZlI51WMDjmnU5Oc16mn0DkdzfM7oQPw8ePH+eEPf8jdd9/Nl7/8ZbZv387//b//F5PJxO23305raytA3gId2cfZ5wq5//77ue+++wZsf+mll7DZbKP7RRQws/sMa4Du5uN4e9JVKOkey/W0d3bz3HPPjfkYxNjYvHlz8V5cg7J2E3Un7ehTCimdxpn6MJ0V3fD6ieKNawoo6nkVY0LO6dQk53Xq6XtOw+HwqB13QgdgVVVZu3Yt3/jGNwBYtWoV+/bt40c/+hG33377OR/3nnvu4e6778499vv91NXVsXHjRlwu13mP+2xShxQ4+d+U2fTYbVbiXli2bAn8bxi70831119y1mOIiSWRSLB582auueaaoiwIkwrE8T99nPhxLwDGWU7KbpxHbZll3McylRT7vIrRJ+d0apLzOvUUOqfZd+xHw4QOwDU1NSxdujRv25IlS3jyyScBqK6uBqCtrY2amprcPm1tbVx44YWDHtdsNmM2mwdsNxqN4/KNozgyZQ8xP1qm64PFYgHCJFKafPNOYuP1f6iv8O4OvL8/ihpOgl7BvXE2jnfPQNFJre9oKcZ5FWNLzunUJOd16ul7Tkfz3E7oLhCXXXYZDQ0NedsOHz5MfX09kL4grrq6mi1btuSe9/v9vPnmm6xfv35cxzoSvV0gehfCMGVOqrRBE8OVCiXoeuQg3b85hBpOYqy1U/XZVTivmCnhVwghhBjChJ4B/sIXvsCll17KN77xDT784Q/z1ltv8eMf/5gf//jHACiKwuc//3n+9V//lQULFjBnzhy+8pWvUFtbywc/+MHiDn4ouYUwfKiZNmgWczoAh+OpYo1KTCKRQ930PHkYNZAAHTjfMwvXe+pQDBP6b1ohhBBiQpjQAXjdunX87ne/45577uHrX/86c+bM4YEHHuC2227L7fP//t//IxQK8elPfxqv18u73vUuXnjhhUxJwQSVXQgDjVQyDkCpywpAZzCGqmroZAZPFKBGk3ifOU54RxsAhgorpR9ehKnOWeSRCSGEEJPHhA7AAO973/t43/veN+jziqLw9a9/na9//evjOKrzZDCTVEwYtDhqpi1VudOGToGkqtEZilHpnMABXhRF9JiXnicOk/LGQAHHZTNwb6pHMQ5cmVEIIYQQg5vwAXiqShjsGBJx1FS65MFkMlLuMNMeiNHmkwAsemlJFd/zJwi+3gyAvsRM6c0LMc/1FHdgQgghxCQlAbhIEnob1kRPLgDrDHqqXBbaAzFa/VFW4C7yCMVEkAol6PrVQeInfADYL6rG/d456MzyrSuEEEKcK/ktWiRxvQNNS/c6BtDrDVS5LOxt8tHqjxZ5dGIiSLSF6Hz4AKnuKIpZT+kti7AuLSv2sIQQQohJTwJwkSQMNlR6L3TT6Q1Uu9O9idt8EoCnu8ihbrp/cwgtlkJfaqH89qUYq+zFHpYQQggxJUgALpKE3o6q9QZgvcFAtStd99smM8DTlqZpBP+3Cd/zJ0AD0xw3ZR9bgt4ujd2FEEKI0SIBuEgSejupPgE4WwMMSAnENKUlVXp+d5TwznSLM/tF1XjeP096+wohhBCjTAJwkcT1dlStN9ikSyBkBni6SgXjdP3PQeIn/aCA+31zcVxai6JIP2ghhBBitEkALpKEwZYrgVB0OhRFyZVAtEoN8LQSbwnR9fB+Ut4YikVP2UeXYFlYUuxhCSGEEFOWBOAi6VsCoTek6zsrMwHYH00SiaewmmSBg6kusr+L7scOocVVDGUWym5fhrHSVuxhCSGEEFOaBOAiSejtuS4QOn066LosBqxGPZFEilZ/lDnlctX/VKVpGoGtZ/C/1AgamOd7KPvoYnQ2udhNCCGEGGtydU2R9K0B1hnSf4coiiJ1wNOAllDpeawB/4uNoIF9fQ3ldyyT8CuEEEKME5kBLpKEoU8JhL631KHKZeZEZ0gC8BSV8sfp+p8DxE8HQAee98/DcUltsYclhBBCTCsSgIukbx9gXZ8ALBfCTV3xpiBdv9xPyhdHsRoou20JlvmeYg9LCCGEmHYkABdJvO9FcPreSpQqt/QCnorCezvpebwBLaFiqLBSfvsyDOXWYg9LCCGEmJYkABeJpjOQ0qcDkE7X2+tVVoObWjRNI7DlFP4/nQLAvLCEslsXo7PKt54QQghRLPJbuIhUowOAPvlXSiCmEDWeoue3h4ns6QTAcVkt7uvnouhlcQshhBCimCQAF1FvANZy2ypzM8CxooxJjI6UL0bnLw+QaAqCXqHkA/OxX1Rd7GEJIYQQAgnARZUypPv86vsE4GwbtPZAFFXV8sojxOQQPx2g85cHUANxdDYDZR9binmuu9jDEkIIIUSGBOAiSgfgWP4MsNOMokAipdEdjlPuMBdvgGLEons68f3uOCRVDFW29MVupZZiD0sIIYQQfchCGEWkGtJL3upJ5bYZ9TrK7OnQK3XAk4ematSesuJ74igkVSyLS6n8zEoJv0IIIcQEJAG4iFKGTBeIPgEYoNqdDsDSCWJy0FIq/t8epaYpfT4dV8yk7K+XorPIGyxCCCHERCQBuIhUXSYAa8m87blOEBKAJzwtodL1q4NE93ahKhquG+fhuW4OitRuCyGEmOJafBGavZFiD+OcyBRVEWX7AOu1RN72qmwnCCmBmNDUWIquX+4ndswHBoVj8wO8a1VFsYclhBBCjLqUqnG4LcCOkz3saOxmR2MPTd4Id1w2m6/esKzYwxsxCcBFpOrTpQ66wQKwtEKbsNRwgs5f7Cd+KoBi0uP52EL8B18v9rCEEEKIURFNpNh12svOkz1sb+xm58keAtH8d6x1CvjCiUGOMLFJAC4iVZcJwGo8b7uUQExsqWCczp/tI9ESQrEaqPib5SjVFjhY7JEJIYQQ56YrGGPHyZ5c4N3X5COR0vL2sZn0rJ5Vwpr6EtbNLuXCWR4c5skZJSfnqKeIlM4EDAzAVW5ZDnmiSvpidP50L8mOCDqHkYq/XYGx2k4iMTn/AhZCCDH9aJpGY1eY7Y3d6XKGkz0c7wgN2K/SaWbd7FLWzi5hbX0pS2qcGPRT4/IxCcBFlMIIgF7NL3WQGeCJKdkVoeMne0l5Y+jdZsr/djnGCluxhyWEEEIMKZFS2d/sZ0djd66coTMYH7DfgkoHa2eXsm52eoZ3ZokVRZmaF3VLAC4iVZcOwDo1BqoKuvRfVdkA7A0niCZSWIz6oo1RpCXaQnT8dB9qII6hzEL5p1Zg8EiPXyGEEBOPP5rgnVPeXODdddpLNKHm7WPS67hgpjsXeNfUl+CxmYo04vEnAbiIVCUzA6yoEPOBtQQAl9WAxagjmlBp80epL7MXc5jTXvxMgM6f70MNJzFW2yj/5Ar0zunzQ0IIIcTE1uyN5GZ2tzf2cKjVj5ZfvovHZmRtfQlr6tOBd/kM97SeYJMAXESqmv7fqVM0iHhzAVhRFKpdFhq7wrT6JAAXU+yEj85f7EeLpTDWOam4Yxk6m7HYwxJCCDFN5dqRZWp3s+3I+ptVasvV7q6bXcK8Cgc66VGfIwG4iFLJdDsRHRpEvXnPVWUCcFtAWqEVS/RwD13/cwAtoWKa46b8E0vRTdKrXYUQQkxO2XZk2cBbqB2ZXqewtMaVC7xrZ5fkWqqKwuS3eRGpqfR/YL2iQaQn7zlZDKO4Ivs66frNIUhpWBaVUPaxJSjT+K0iIYQQ4yPbjixdv9vD/ubB25GtzVysdmGdB7tM0IyI/GsVkZpKAaBT1HQJRB/VbukEUSyht9vo+e1hUMG6opzSWxahGKZG2xchhBATh6ZpnOgM5a2udrxz6HZk62aXsrh66rQjKxYJwEXUG4ALl0CABODxFtzWjPf3xwCwrami5KYFKFIzJYQQYhTEkyr7m325xSZ2NPbQFRrYjmxhlSN3sdpUb0dWLBKAi0hN9i2B8OY9Vy0lEOPOv/U0/hcaAXBcWov7fXMl/AohhDhn/miCt/usrjZYO7KVdel2ZOkuDdOrHVmxSAAuolRq8Ivgqt3pZZJlBnjsaZqG/6WTBF4+DYDzPXW4NtbLX9tCCCFGZCTtyLKBd7q3IysWCcBFpCYHrwHOlkC0+2NomiZhbIxoqobvmeME/9IMgPu62TivqCvyqIQQQkx0fduRbW9Mz/IO1Y5sXSbwSjuyiUECcBHldYHoNwNc6UwH4HhKpSecoNQub4eMNk3V6HnyCOGdbQB4PjgPxyW1RR6VEEKIiSgST7H7jDcXeN8+NXQ7smzgrZR2ZBOSBOAiyrsIrt8MsMmgo8xuoisUp9UXlQA8yrSkSvdjDUT2doIOSv5qIfbVVcUelhBCiAliOO3I7CY9q6Qd2aQkZ6mIcjXABWaAIV0G0RWK0+aPsrTWNc6jG12pVIp4PE4sFhvwse/9ZDKJ0WjEaDRiMpnO+tFgMKDTjawVjJZI0fWrg0QbekCvUHbrYqzLy8foKxdCCDHRjagd2Zz0zK60I5vcJAAXUW8XiIE1wJDuBXygxT9hLoSLxWK0tLTg9/uHFWb7fkwmk2d/gXM0ksBsNphQ3/Fi7EhhNZqp+dBSdAucUmcthBDTSLYd2Y7GHnacHLodWfZiNWlHNrVIAC6ioUogoE8v4CK0QtM0ja6uLs6cOZO7tbW1ofW/nHWE9Hp9OoiazZjN5tz97EeDwUAymSQej5NIJAb9mEgkcsfMPg6Hw8MfSLai5A9vwR/AYDBgs9mw2+3Y7faC9/tuM5lM8kNQCCEmiWw7smzgLdiOzKBj5cx0O7J1s0tYPUvakU1lkyoA/9u//Rv33HMPn/vc53jggQcAiEajfPGLX+TRRx8lFouxadMmfvCDH1BVNfHrOXMBGA1iPlBToOtthZLrBTwOM8DRaJSmpibOnDnD6dOnaWpqIhIZeDWry+WitLR00AA7VLjNliyMBlVVhxWUsx9joSi+t5uJRiJE9UkS5XoiiSihUIhkMkkymcTv9+P3+4f1+nq9Pi8gW61W2traeOONN/B4PDidTlwuF06nE6PROCpfsxBCiOHJtiPb0Zjuv9vQFhiyHdm62el2ZGaDtCObLiZNAN6+fTv//d//zQUXXJC3/Qtf+ALPPvssTzzxBG63m7vuuosbb7yR119/vUgjHb5U34UwAKI+sJXmns/2Ah6rAByJRHjjjTc4cOAAHR0dA57X6/XU1tYyc+ZM6urqmDFjBm63e0zGMlI6nQ6TyYTJdPa/ztVYko6f7iPht6BzGqn42xUYq+y55+PxOKFQiFAoRDgcPuv9ZDJJKpUqGJi3bNky4PUtFgtOpzMvFPe9uVwu7HY7er384BVCiJFKqRoHWwL8b6vC5sf38PYpL80F3jmtL7OxJlPKsG52CXPLpR3ZdDYpAnAwGOS2227jJz/5Cf/6r/+a2+7z+fjZz37GI488wlVXXQXAQw89xJIlS3jjjTe45JJLijXkYcnWAOuM6aBL1JsXgHuXQ46N6utmg+8bb7xBLNZ7bI/Hkwu7M2fOpKqqatRmbItFS6h0/fIAidMBdDbDgPAL5IJ0SUnJsI6ZDcx9Q3EgEGDfvn1UVFQQDAYJBAL4/X6SySTRaJRoNFrwj4wsRVGw2+1DBmW3243FYpHSCyHEtBaKJdl12ttbznDKSyCWBPRAK5BuR7as1pULvNKOTPQ3KdLNnXfeyXvf+142bNiQF4B37txJIpFgw4YNuW2LFy9m1qxZbNu2bdAAnL1YKys7i9e/tnSsZF8j1wXCbAcNkoFONGfvIgzltvTpafVFRmVcsViM7du38+abbxKNpv86rqysZP369cyePRuHw5G3v6Zp4/LvMVa0lIbvscPEjvlQTDo8H18Mpabz/poURcHhcOT9eyUSCXp6erjmmmtyJQ+aphGLxQgEArlQnL31fRwMBtE0jWAwSDAYpKWlZdDXNplMuFwuXC4Xbrd7wEen0ykzyaMo+39lMn8fiHxyTiefFl+Ut0952XnKy9unejjUGiSl5tcz2Ex66qwJrrlwLuvmlLFypntAOzI555NLoe/V0TyHEz4AP/roo7z99tts3759wHOtra2YTCY8Hk/e9qqqKlpbWwc95v3338999903YPtLL72EzWY77zEPVyRz0VYkpQMdvPXqZjpcveEnlAAw0BNO8IdnnsNwjp1WUqkUHR0dtLe3k8rUHVssFqqrq/F4PJw6dYpTp06d51czwWhQf8xOeYcZVdE4Mt9LcM+rsGdsX3bz5s3D2s9ms2Gz2aiqqkLTNJLJZO4PsP63bB1ztpVcZ2cnnZ2dgx67b1eM7K3vY71eL7PIIzTc8yomDzmnE5OqQXMYjvsVTgTSt574wJ9XHpPGXKfGHKfGXJdGjS2JXgHiR/E2HOWVhvEfuxgbfb9XR3Sx+1lM6AB8+vRpPve5z7F582YsltF76+Kee+7h7rvvzj32+/3U1dWxceNGXK6x77ebSCTYvHkzRr2eFOAor4TuY1x0wUK0pdfn9tM0ja/t2kI8qbLqsiupKxlZOI/H4+zcuZM33ngj95+mrKyMd7/73SxZsmTE/XMnC03TCD5/knBHa3qRi48s4vIlpWf/xPOQPad9Z4DH4jWydcc+ny/vY/Z+KpU6a1cMg8GQmzUuNJPscrkmfenLaBmP8yrGl5zTiSUYS7LrtI+3T/Ww85SX3ad9hOKpvH10CiypcbJ6VglrZnlYPctDjTs/E8h5nXoKndPhXqg+HBP6t9zOnTtpb29n9erVuW2pVIpXX32V//qv/+LFF18kHo/j9XrzZoHb2tqorq4e9LjZLgX9ZWfOxouqpr/JDdb0hWWGRAD6vX61y8Kp7jBd4RRzK4c3tng8zo4dO3j99dcJhdKNvEtLS7nyyitZvnz5lA2+Wf4tpwhvS78DUHLTQuwXjF9HkLH8P2Q0GrHZbIP+39Y0jVAohM/nywvIfW/BYJBkMklXVxddXV2Dvpbdbsftdg96s9vt02oWebx/NoixJ+e0OJq8EXY0drMz05LsUKufftUMOMwGVs3ysLa+lLWzS0a0upqc16mn7zkdzXM7oQPw1Vdfzd69e/O23XHHHSxevJh/+qd/oq6uDqPRyJYtW7jpppsAaGho4NSpU6xfv74YQx6R3EIYVk96Q6HFMDIBeLi9gE+cOMGTTz5JMBgEoKSkhCuuuIIVK1ZMi9rQ4LZm/JtPAuB+31zsayZ+O7zR0rc2ecaMGQX3ybZ76x+M+94SiUTu4r7m5uaCx9Hr9UMGZLfbLb+EhJjmkimVgy2B9EITJ3t4+2QPLQV+l83wWFk7u4S19SWsqS9lUbUTvXRnEGNsQgdgp9PJ8uXL87bZ7XbKyspy2z/5yU9y9913U1paisvl4rOf/Szr16+f8B0goE8fYFumtVih5ZDdw+8F7PP5ePzxx4lEIng8Hi6//HJWrlw5LYIvQPiddry/PwaA8+pZON9VOAROZwaDgdLSUkpLC5eEaJpGJBIZMiAHAgFSqRTd3d10d3cP+lo2my0vEHs8nryb1Wodqy9TCFEE/miCd0552dmYDry7TnsJ9ytn6NudYW19KWvqS6h2S3cGMf4mdAAeju9+97vodDpuuummvIUwJjpN03IBWJ8pgSDSM2C/atfwegGnUimefPJJIpEINTU1/M3f/M20moGLHOyi+4n0VQ/29TW4Nswq8ogmJ0VRchfo1dTUFNwnmUwSCASGDMnxeJxwOEw4HB60q4XZbM4LxP1DstUqS44KMVFpmsaZnki6lCGzlHChxSacFgOrZ2VmdzPlDDbTpI8eYgqYdP8Lt27dmvfYYrHw4IMP8uCDDxZnQOeqz08Jnd2TvjPUcshn6QW8detWTp06hdls5uabb55W4Td23EfXrw+BCrYLK/DcME+C0xgyGAyUlJQM2jdZ0zSi0WheIPZ6vbmPXq+XUChELBajra2Ntra2gsfJdngZLCDbbDY5z0KMk0RK5UCznx0ne9h5Ml3D21bg99KsUlsu7K6pL2FhpVMWmxAT0qQLwFOFpva+LaSzZYJEgRKIGZ7028SHWwODHuvo0aP87//+LwA33HDDoG9vT0XxpiCdD++HpIplcSklNy9EkR+2RaUoClarFavVOugFe/F4PC8QZ2/ZbcFgkHg8Tnt7O+3t7QWPYTQahwzI0+1CPSFGky+SSHdmyCw2sfu0j0giv5zBoFNYNsOdXk64Ph14ZbEJMVlIAC4STVVz9/X2TGAtMAN8ydwy9DqFhrYAp7vD1JXmt0ILBAI89dRTAKxdu3ZAzfRUlugI0/nzfWixFKY5LspuW4yin9odLqYKk8lERUUFFRUVBZ9PJBJDBuRAIEAikaCjo2PQFfYMBsOAuuO+IdnhcEhAFoL0uzanusOZldXSF6sdbh9YzuCyGNK1u7PTtbsrZ3qwmqbHNSZi6pEAXCx9ArAuG4ALzACX2E2sm13CG8e7eelAG59815w+h1B58sknCYfDVFVVsWnTprEe9YSR9Mbo/Ok+1FAC4wwH5bcvQzHKD+Kpwmg0Ul5eTnl5ecHnsz2R+wfkbEjOLkM91KIh2X7Ig4Xk/isjCjFVxJMq+5t9uVZkO0720BkcWM4wu8zGmkwrsrX1JcyrcEg5g5gyJAAXSXYGWNHpULIlEBFfwX2vWVrNG8e72XygNS8Av/rqqzQ2NmI0GqdV3W8qGKfzZ3tJ+WIYKqyU37EMnUX+K08nRqORsrIyysrKCj6fbfd2toA8VD/kbKu3RCLBs88+S2lpaV5QdjgcU76ntpgavOE4b5/qDbu7T3uJJdW8fYx6heWZcoY1me4MFc6B/fKFmCokNRSJpqV/+Oj1BrB40htjPlBToMufydy4tIp/eeYAb53opicUp8Ru4sSJE7zyyisAvO997xt0pmyqUaNJOh/aT7Ijgt5tpvyTy9E7TMUelphgztbuLZVKDRqQvV4vfr8/1+oNYNeuXQOOkQ3I/WeQJSCLYtI0jcaucG6xiZ0nezjSHhywX4nNyJr6ElZn2pFdMNONRd5FE9OIBOAi0VLpAKwzGCC7EAZA1Ae2/F/adaU2Flc7OdQa4M+H2tm0yMOTTz6JpmmsWrWKlStXjuPIi0dLpOh8+ACJpiA6u4Hyv12OwSMXXIiR0+v1Q3aySKVSBAIBOjs7efXVV6mvrycQCOTNIp+tF7IEZDEeYskU+5r87My0Inv7VA+dwfiA/eaW2zP1u+kZ3nkVcpGomN4kABeL1icA641gckA8mK4Dtg2ctdq4tIpDrQE2728lfOBlgsEgFRUVXHfddeM88OLQUipdjxwifsKHYtZT/jcrMFbYzv6JQpwDvV6f6yRRVlbG5ZdfnldilA3Ig80gDzcgDxaOs68tAVn01xOKZ3rvptuR7T7jI96vnMGk17FipjvXmWFNfQllDilnEKIvCcBFkq0Bzq3SZvGkA3CBThAAG5dV870/H6Xt6G6O6U5jMBi4+eabMZmm/tv/mqrR89sjRA92g0FH+e1LMc2QC5RE8fQNr4X0Dcg9PT2DllicrQZZAvL0pmkaxztDuVZkO072cLwjNGC/Ursps7JaOuwunyHlDEKcjQTgIskGYJ0+cwqsHvCfKbgaHMCyWhdLnTFWxE8DcP3111NZWTkeQy0qTdPwPXOc8DvtoIOy2xZjnusp9rCEGFLf8Dp79uwBzw+3BlkC8vQSTaTY1+RjR6Y7w9uneugODSxnmFdhTy8jnOnOMKdcyhmEGCkJwEWSC8CGPjPAULAVGkAkEmEdR1AUSHnqWLVq1dgPcgLw/+kUwb80A1B68yKsSwpf9S/EZDKcGuTRCsjZOuT+H51OZ+87UKIouoKx3IVqO072sPeMj3iqXzmDQcfKme50O7LMDG+Jfeq/8yfEWJMAXCxqny4Q0HshXIESCE3TePrpp1ESEXyqmb8EavmaBvop/gd/4LUmAltOAeB5/zxsq6b+jLcQMD4BWVEUXC5XwXDs8XhwuVzTprXieNA0jWMdwVwrsp0nezjRObCcodyRLWcoZXV9CctnuDAb5A8VIUabBOAi0fpeBAdDzgC/8cYbHD58GL1ez3YW0RZS2XW6hzX1U3fJ49DONnzPHAfAdU09jktrizwiISaOkQbk7Ap6fT+qqorP58PnK9x/HMDhcBQMx9n7ZrNcWDWYaCLFnjM+dpzsZmdjDztP9eANJwbst6DSkevMsLa+hPoym5QzCDEOJAAXSW8NcOYv+0FmgNvb29m8eTMA1157LR1HDZzZ3cxLB9qmbACO7O+k58nDADguq8V5VV2RRyTE5HK2gKyqKsFgcNBw7PV6SSQSBINBgsEgTU1NBY9jsVgGDcdutxubbXqEOU3TaPJG2HXay65TXnae6mFfk49EKn8tYYtRx8qZnlw7stWzSvDYpJxBiGKQAFws/UsgBpkB3rdvH6qqMm/ePNauXUubuYU/7G5m8/427rluyfiNd5xEj3rpeuQQqGBbXYn7vXOnxS9QIcaTTqfD5XLhcrkKPq9pGuFweNBw7PV6iUajRKNRWltbaW1tLXgco9E4ZB3yZO2F7Isk2HMmHXZ3n/Gy67Sv4FLCFU5zrm537exSlta4MBkm39crxFQkAbhINDUF9LkIbpAZ4KNHjwKwfPlyFEXhioUVGPUKxztDHG0PMr9y6rQDi58J0PXLA5DSsCwto+SmhSiy7rwQ405RFOx2O3a7ndrawuVHsVhs0HDs8/kIBoMkEgk6Ojro6OgoeAy9Xn/WOuRiX6gXT6ocbPGng+4pL7vOeAu2IjPoFJbUuFhZ52b1rHQNb12pVf6AF2KCkgBcJLk+wNkaYGvmrco+M8DBYJDm5nQHhPnz5wPgtBhZP6+cVw93sPlA25QJwClfjM6HD6DFU5jnuSm7dTHKVL/KT4hJzGw2U1VVRVVVVcHnE4lErg65fzjue6FeT08PPT2F2z8qioLT6Ry0Dtntdo9qL3RN0zjZFU6XMmRuB5r9AzozAMwqtXFhnYeVdR4urPOwrNYlvXeFmEQkABfJgD7A2RKIPjPAx44dA6C6uhqn05nbfs3SKl493MFLB1r5zJXzxmO4Y0qNp+j8nwOogTiGKhtlH1+KYpS3CYWYzIxGI2VlZZSVFW5dmF0spFA4zn7MXszn9/s5ffp0wePYbLYh65CtVuugY+wOxdl92ss7p73sPp0uZyh0oZrHZmTlzHTQzYbeUmlFJsSkJgG4WLR+F8Fllz8OtIKmgaLkyh+ys79Z1yyp4itP72PXaS/tgSiVTsu4DXu0aZpGz5NHSJwJorMZKP/rpegs8t9SiKmu70Ie9fX1A55XVZVQKDRkHXI8HiccDhMOh3PvlvVnNpvTfY9dLpIGG91xPXvP+PjxgWc55oMoBqD33SaTQceyWhcrZ3pYNcvDypke6cwgxBQkSaNIBswAVy4BnQFC7eA7jeqamZsB7h+Aq90WVs50s/uMjy0H27n1olnjOvbRFHj5NJHdHaBTKL1tCYaywWdrhBDTh06nw+l04nQ6mTlz5oDnNU0jGo0OGo67e7zEohFisRhtbW20tbXlPrc+c1tngRQ6MNlwOF1UlZdSX1NBaYkDjyd9czqljleIqUgCcJEMqAE2WqFmJTTthNNv0VKqJxwOYzabqasb2AbsmqVV7D7j46X9rZM2AEf2deJ/6SQAng/MwzLPU9wBCSEmDUVRsFqtWK1W9I4STqW87PF52RVws6fZQyCWxEAKuxLHocSxKzEqTClqrSmMcT8OI8QjIfSoEA8S6QrS2NVMY0P+62Q7ZgxVh2wwyK9SISYb+a4tkt6lkPucgrqLMwH4TY50lQMwd+7cgldBb1xWzb+/dJjXj3URiiWxmyfXqYy3hOh+PP2bxr6+BsfFNUUekRBiMgjFkuxr8rErU7O765SXZl90wH4Wo44VM0ryLlSb4bGSTCZ57rnnuP7661EUBb/fP2gdcnbBkOxzJ0+eLDgmh8MxoPY4G5pdLte06YcsxGQyuVLTVNJ/IQyAuovgjR/A6Tc5ql8EDCx/yFpQ6aC+zMbJrjCvHu7guhWTJ0CmgnG6Ht6PFlcxz/fged/kv5BPCDH6UqrG4bYAu/t0ZTjcFkDNX18CRYGFlU5W1rm5sK6ElXVuFlU5MeiHvpjWYDBQWlpKaWnhRYX6Lhgy2KIhfRcMOXPmzKCv0zcQF/posUzeazmEmIwkABeJ1n8hDICZFwEQbjlCk5JeeWmwAKwoCtcsqeKnr51g84G2SROAtaRK168OkvLGMJRZKPuotDsTQqRrelt80fTMbqYzw74mH+F4asC+1S5L3szuipluHGPwLljfBUNmzRpYapZdMKTQzHF2ZjkUCpFMJunu7qa7u3vQ1zKZTEMGZJfLNaot34SY7iQAF0lvCUSfGWD3DHDXcdxnRdM0KioqcLvdgx5j47JqfvraCbYcaieRUjGeZbaj2DRNo+fpo8Qb/ShmPWW3L0NnMxZ7WEKIIghEE+w548vrudsRGLiamt2k54KZHi7MdGS4sM5DtXtizJb2XTBkxowZBfdJJpO5Vm59g3Hfj5FIhHg8PuSiIZBeevpsIVnqkYUYHvlOKRJN63cRXFbdRRzxBQBYsGDBkMdYU19Cqd1EdyjO9sZuLp1XPiZjHS3B15sJ72gDBUo/uhhjpa3YQxJCjINESuVQS4BdZ7y5coZjHUG0fqUMep3C4mpnbmb3wjoP8yoc6CfxipBnK7MAiMfjQwZkn89HPB7PLT/dt6NFf3a7fciA7HQ6i766nhATgQTgYunfBi27eeZFHN1XuP1Zf3qdwlWLK/ntzjNsPtA2oQNw9HAPvmePA+C+fg7WRYP/MhBCTF6apnG6O8I7p3vYfdrHrtM97G/2E0sOXE1tZok1F3TTq6m5sZqmXzgzmUyUl5dTXj74z/BoNDpkQPb7/SSTSUKhEKFQiJaWloLHURQFh8Mx5Eyy3W5Hp5vY7ygKcb4kABfJgD7AGW22RYRoxUiCWQV6X/a3cWkVv915hpf2t3Hv+5ZOyCuNEx1huh45CBrY1lTheFfhtwqFEJOPNxzP1O2mw+7uMz66Q/EB+7kshryZ3ZV1Hsod5iKMeHKyWCxYLJZBl57WNI1IJHLWkKyqKoFAgEAgMOhr9a197h+Qs/els4WY7CQAF4lWqAsEcLQn/QNlDqcweI9D5eIhj/PuBRVYjDqavBEOtgRYWusamwGfIzWcoOvhA2jRFKZ6FyUfmi8/NIWYpGLJFAea/bkL1Xad9tLYFR6wn1GvsLTGlXeh2uwyO7pJXMow0SmKgs1mw2azUVNT+KLo7Op6QwXkQCCQ1/ptMNnOFkOVW1gsFvl5LyYsCcBFoqnpK5v71wAfOZYuE1hAI5x+86wB2GrS8675FfzpYBubD7RNqACspTS6fnOIZGcEvdtM2ceWoBjkbTUhJgNV1TjRFcoF3d2nvRxo8ZNIaQP2nVNuZ+VMdy7wLq11YTZMv1KGia7v6nqDXbSXSqUIBoNDziSPpLPF2dq/SWcLUSwSgIulwEIY0WiU06dPAzCfRjj9Fqy5/ayH2ri0ij8dbOOlA618bsPQF86NJ9+zx4kd8aIYdZTdvhS9U37QCTFRdQZj7DqVWVwiE3j90eSA/UrtpnTQzXVmcOOxyff2VKHX63Mr3A1mJJ0tOjs76ezsHPRY0tlCFIv8ryoSLXP5c9+rcY8fP46maZS5LJT4/ekZ4GG4ekklOgX2N/tp8kaY4bGOyZhHIvhWC8G/NANQessiTLWOIo9ICJEViafY1+xj1ykvuzKrqTV5IwP2Mxt0LJ/RO7O7qs7DzBKrvK09zY2ks8VQITkWi42os8VgIdnpdI7FlymmOAnARVLoIrijR48CMH/BYtgJdB2BUBfYy4Y8VpnDzJr6ErY39vCnA23cfunssRr2sMSO+/A+ne5k4bqmHuvyidudQoipLqVqHG0P5haX2H3aS0NbgFS/5dQUBeZXOPIuVFtU7Zzw/cXFxDTczhZna/823M4Wdrs9V+OcDcX9b1ar/PEmekkALpJsDXB2IQxN03IBeMGS5XByEXQ2wJntsOjasx7vmqVVbG/s4aUDrUUNwMnuKF2/PgCqhvWCcpxX1RVtLEJMN9m63f3NfvY3+9hz2seeM15CBVZTq3Sa82Z2l89047LIwjRi/GQ7W1RWVhZ8fiSdLYLBIACHDx8e9PX0en0uDGd7Ihe6mc3SnWQ6kABcLNmlkA3pXzidnZ34/X4MBgP19fVQd1E6AJ9+c5gBuJpvPHeIN49344skcFvH/xeZGkvS+fB+1FAS4wwHJX+1UP7aFmKMxJIpjrQF2d/sywRePwdb/AWXDraZ9KzIlDJkQ2+NW67QFxPbSDpbdHd3s3XrVhYtWkQ4HM61est2tohEIqRSqbN2t4D07PVwgrLUJk9ucvaKpH8btOw3ZFlZGUajEeouhnf+J30h3DDMKbezoNLBkfYgWxva+cCF49trV1M1uh87TLItjM5ppOyvl6Kbhg3thRgLwViSA5lZ3WzYPdoeKNiRwWzQsbjGxbJaFxfMcHPhLA8LKp2TejU1IQaT7WxhsVjweDysWbMm/Tu0n0QiQTAYzAXj/rdsUI7H48Tjcbq6uujq6hryta1Wa14gLhSW7Xa7rLw3QUkALpLsUsjZGuBQKASki/2BdAAGaNoJqQTozz6je83SKo60B3lpf9u4B2D/5pNED3SBQaHs40sxuOUtJCHORWcwlith2N/sZ3+Tr2CvXUgvLrGs1s2yWhfLZrhYVutmbrkdg9TtCpHHaDRSUlJCSUnJkPvFYrFBQ3LfoJxKpYhEIkQiEdrb2wc9XrY+uX8w7h+WrVarrL43ziQAF0uuDVr6L8NsAHY4Mt0SyuaDtQQiPdC6B2asOeshNy6r5gdbj7G1oZ1YMjVufTjDu9oJvJxu31Zy00LMsyZOL2IhJipN0zjTE8mb1d3f7KPNHyu4f7XLkg66tS6WZkKvdGQQYnSZzWbMZvOQF+9la5OHCsrZm6ZpBINBgsHgoBfxQX6P5qGCstlslu/5USIBuEi0XA3wIDPAOh3MvAiOvJgugxhGAL5ghptKp5n2QIzXjnRy9ZLCS2aOpvjpAN2/TV904LhiJvZVhS9mEGI6S6ZUjnWE+oRdHwea/QX77CoKzCmzs7TW1Tu7W+uiTJYNFmJC6FubPNjS1NBbn3y2kBwKhVBVFZ/Ph8/nG/K1jUbjoDXJ2bDscDhkgZFhkABcJP3boA0IwJC+EO7Ii+kL4S75zFmPqdMpXLe8moe3neSfn97Hslo31W7L6A8+I+WL0fnLA5DUsCwuxb1p9pi9lhCTRSSe4lCrPzere6DZx6HWALGkOmBfo15hYZUzE3LTYXdJjQu7WX40CzHZ9Z3VHUoymRyyPjl7i0ajJBKJs67AB+kOG4MF5WxYdjgc07o+WX7KFkluBjjzny/bwiU/AGfqgId5IRzA3dcs4rWjnRzrCPE3v9jO43+3HscY/DLVEik6/+cAaiCOocpG6UcWochFNmKa8YbjmYvTemt2j3UEUQdem4bdpM/N6i7NzOouqHRikuXBhZjWDAYDHo8Hj8cz5H7xePysIdnv95NMJnMLjHR0dAx5zEL1yYUu5JuK9ckSgItlkBngXA0wwIzVoOjB3wS+M+CeedbDOq16/r+bq7nz13s40Bbl73+9g5/fftGoXhSjaRrdvz1C4kwQnc1A+V8vRWeR/0pi6tI0jRZfhP1N+WG30OppAOUOU65ONzu7W19qQyd/JAohzpHJZKKsrIyyssEXx9I0jVgslrtYb6hbtkQjFArR2to66DF1Oh0Oh2PQgHy2VQEnKkktRaINchFc3gywyQ7VK6BlV7oMYogA3B3t5ndHfscTh5+gKdgEteAE3tYU1v3KSqXdg8PkwGF04DK5cvedJidl1jIWly5mUckiHKazL1kc2HqayO4O0CmU3rYEQ1nxl14WYrREEymOtgc53BbgQLOP/z2g42u7t9ITThTcv67UyrKa/E4MlU65UEUIMf4URTnrAiOQrk+ORCJnDcrBYBBVVXPLWheyZMkSbrnllrH6ksaMBOAiybZB0xuMaJpWOABDugyiZVe6DGL5Tf2OofF2+9s83vA4m09uJqGmf0GbdCZUVJJqEkXRSBKmORSG0NnHVe+qZ0npEhaXLmZJ2RKWlC6hxNLbNiayvwv/iycB8HxgHpZ5nnP7BxCiyJIplcauEA2tQRraAhxuDXC4LUBjV6hfCYMOSKDXKcyvcGS6MPSWMhRj0RkhhDgfOp0Ou92O3W4fdJERgFQqRSgUGjIoD9UxYyKb8AH4/vvv56mnnuLQoUNYrVYuvfRSvvnNb7Jo0aLcPtFolC9+8Ys8+uijxGIxNm3axA9+8IMhr84str4LYUQiEdTM4wEBuH49vPXfsPe3cOU9YPUQiAd45vgzPN7wOEe9R3O7Li9bzocXfZhr51yLRW8hlorx09cP8B9b9qDoItx59UyW15kIxoME4gECiQDBeJDmUDOHug/RGmrlpP8kJ/0neaHxhdxxq+3VLCldwjr9hVyxeT46FOzra3BcPPg3jRAThapqNHkjHG4L5IJuQ1uQY+1B4qmBF6YBeGxGFlU5mV9hJ9nZyIevuZRlM0uwGKfvBSNCiOlHr9fjcrlwuaZee9MJH4BfeeUV7rzzTtatW0cymeTLX/4yGzdu5MCBA7mw+IUvfIFnn32WJ554ArfbzV133cWNN97I66+/XuTRD6FPDXB29tdsNg9cWnHRe6F8IXQe5sDme3i8vIrnTjxHJJmuPbQarFw/53puXnQzy8qW5X2qxWDhritW0+238PPXT/DfL+j41d+uY9PCwrU63dFuDnUd4mD3wfSt6yCnAqdoDbXiDfTwkRPvQpdU2G07zE9S/8HqbatZW7WWtdVrqbRJ+zNRXJqm0RmM09DaN+gGONIWIFRgeWBILxG8oMrJoioHC6ucLKp2sqjKSUWmhCGRSPDccye4YKYbo4RfIYSYMiZ8AH7hhRfyHv/iF7+gsrKSnTt3cvnll+Pz+fjZz37GI488wlVXXQXAQw89xJIlS3jjjTe45JJLijHss+rtA6zHX+gCuOx+eiPPrbmZX+/5KXt7XoOe9PZ57nncvOhmbph3Ay7T0H+Z/f/eu4Qmb5gX97fxqV/u4Km/v5R5FQNfq9RSyqUzLuXSGZfmtgXjQQ51HUT5fRdVcTs+Y5Bvzvg5PQE/xwLHeOLwE0C6dGJt1VrWVK1hXfU6qu3V5/TvIsRw+CIJjrTlB93DbUG6Q/GC+xv1CvMqHCyqdqaDbibszvBY5cI0IYSYhiZ8AO4v2yQ6e8Xhzp07SSQSbNiwIbfP4sWLmTVrFtu2bSsYgGOxGLFY72pL2cLuRCJBIlH4QpfRlEgk0NT0jJSq9b6+zWYb8PpPHX2Kfz38P2AxY9A0NmDnpg3/yerK1bmLbIYz5m/fuJxWX5TdZ3zc/vO3+O2nLxpWY32zYmbBsWoCjVHQwey/voinan7POx3vsLNtJzvbd9LQ05ArnXjyyJMAzHTMZHXlatZUrmFN1Rpq7bUj+jeabLLnYDz+/0wn0USKYx0hDrel63SPtAc53BakdZDV0hQF6kttLKh0sLDKwcJKBwuqHMwus2Es0AkllUqSKjw5DMh5nYrknE5Ncl6nnkLndDTPr6JpWoGOlROTqqq8//3vx+v18tprrwHwyCOPcMcdd+QFWoCLLrqI97znPXzzm98ccJyvfe1r3HfffQO2P/LII9hstrEZfB+apnHsNz8FYPaNH6MnEOTMmTN4PB7mzJmT2y+qRfmu/7uEtBCX6S/g6yc2U5mKsX32nTSXXDzi1w0k4Lt79XTFFOodGnctTWE6y7u6toCeRftd6DSF0/Vh2mujA/aJqBFOpk7SmGzkRPIEzalmNPL/W3kUD7MNs5ljmMMcwxxKdCVylbzISanQEYWWsJK+RdL3O6OgUfj/icekUWPTqLGR/mjVqLJy1v/TQgghJqdwOMxHP/pRfD7fedclT6oZ4DvvvJN9+/blwu+5uueee7j77rtzj/1+P3V1dWzcuHFcCr2jkUguAG/ctIk3d+zkzJkzzJ07l+uuuy633/d3fZ+QL0S9s57vvPcnmF/7Dvzvt1nb9TuSN/+/dJu0EVp3aYhbfvIWJ4MJXgrU8v2PrEQ/yFvAajhB1w/2ompxzEtKWHPrxcMKrcFEkN0du9nZvpMdbTs42H0Qr+ZlV2IXuxK7AKiyVbG6cjVrK9eypnINdc66SR2IE4kEmzdv5pprrsFolK4Ag1FVjSZfhMNtQY60BWloC3KkPcjxzhCJVOG/xUtsxrzZ3IWVDhZUOnCNQ/cFOa9Tj5zTqUnO69RT6JwO1ortXEyaAHzXXXfxzDPP8OqrrzJzZm8/3OrqauLxOF6vN28Vlba2NqqrC9ehms1mzOaBb/8bjcZx+cZJ9JmtNlssRCLpC9pcLlfu9ZuCTfz60K8B+Id1/4DNbIPLvwh7HkPxncL4xvfh6q+M+LUX1Xr48V+v5WM/fZPNB9v51ktHufeGpQP201SNrqcaUH1x9GUWym5ZjM40vP8uJcYSrqy/kivrrwQgnAizq30X29u2s6N1B/u69tEWbuP5xud5vvF5ACqsFaytXsvaqrWsq17HbNfsSRmIx+v/0EQXT6qc6g7T2BmisSuU6cAQ5EhbgPAgF6TZMxekLc7W6WY+ljtMRf+/IOd16pFzOjXJeZ16+p7T0Ty3Ez4Aa5rGZz/7WX73u9+xdevWvBIBgDVr1mA0GtmyZQs33ZTuk9vQ0MCpU6dYv359MYZ8VmoqmbvftwtE3xZoD+x8gLga5+Lqi7li5hXpjUYrbPr/4PGPw1++B6tug9K5I379i+aU8u8fXsn//c07/Pz1E9SVWrnjsvx/18DW00QbesCgo+y2Jee10pvNaMu7uC6SjLC7YzfbW9OBeG/nXjoiHTx/4nmeP5EOxOXWctZVrWNt9eQOxFNZStVo6olwvDOYCbphjneGaOwMcaYnXHA5YACTXse8Ske680Km68LCKrkgTQghxPiZ8AH4zjvv5JFHHuH3v/89Tqczt1yf2+3GarXidrv55Cc/yd13301paSkul4vPfvazrF+/fsJ2gEglewOwXq8fEIB3te/ihcYXUFD4x3X/mB/8ltwAc6+E41vhhS/DRx89pzG8f2UtZ3rCfOuFBu774wG2Hevi7o0LWVztInq0B//m9GIXJR+Yh6n27KvDjYTVYOWSmku4pCZ9fqLJKHs69rCjbQfbW7ezp2MPnZHOvBliCcTFoaoarf4ojZ2hXLht7ErfP90dHrRsAdItxmaX2ZlTbmdepSM3szu7zDaqS3MLIYQQIzXhA/APf/hDAK688sq87Q899BCf+MQnAPjud7+LTqfjpptuylsIY6JSM5edKzodik5HMBgE0gFY1VS+vf3bAHxowYdYVLoo/5MVBa77FvzwUjj8PBzZDAuuOadxfOaKefgjSX786jFeOtDG5oNt3Lqkms+ciKNoYFtbhX3d2LczsxgsXFRzERfVXARALBVLB+LWHWxv287u9t0SiMeQpml0BGM0doY50RnkRGdv6UJjV4hoovBiEQAmg47ZZbZ00K2wM6fMzuxyO3PL7bleukIIIcREM+ED8HCaVFgsFh588EEefPDBcRjR+cuWQOj06X/+UJ8+wC+ceIE9nXuwGqzcdeFdhQ9QsQgu/jvY9l/w/D/BnMvBcPaWZv0pisKXrlvMX62ZwXc3H+GFvS2864AfBQMdVh0Vl9dSeMmMsWXWm1lXvY511ev4DJ+RQDxKekJxTnSlZ3FPZG6NXSEaO8MEY8lBP8+gU5hVamN2ub1f0LVR65ayBSGEEJPPhA/AU5GaTM8A6/R6EokE8Xi6eb/erOeBtx8A4G9X/C0VtorBD3LFP8HeJ6D7GLzxA3jXF855PPMrnTx422qOPXYQ8zudBNG4K+Kj/T9f5aMXzeLO98yn0mU55+OfLwnEwxeIJtIzuQWCrjc8eP9ERYGZJdZcycKc8vRM7pwyOzNLrFKyIIQQYkqRAFwEqcwMsN7QewGcXq/n8WOP0xJqodpezV8v/euhD2JxwYb74Om/g1e+DRfcAq5zX2wisq8T8zudACQ31VN/tIWmY108vO0kj+04ze3rZ/N/rphHqd10zq8xWs4nEK+uWs2qylXM98xHr5ucDWMj8RQnu0Oc6Aj1C7phOoOFF4jIqnZZesNtuY055Q7mlNuoK7VhNkzOfw8hhBBipCQAF4Ga7C2ByNb/2uw2frbvZwB8bvXnsBiGMeN6wS2w4+dw5i3YfC/c9NNzGk+iM0L3E4cBcLx7BjPfU88j76nnL0c7+fZLDbxzyst/v3qcX795ir951xxuX18/rFXkxsu5BGK70c4F5RewqnIVF1ZeyAUVF2A3jryv8liIJlK0+KK0eCM0+6I0eyO0+CKc7ApzojNEi2/gYiR9lTtMuZnc2eW9M7r1ZTZsw2xlJ4QQQkxl8tuwCLIXwekMvR0gMEE4GabOWcf1c64f3oF0Orj+2/DjK9PlEGvugNmXjWws8RTdvzqIFkthmu3Cfe3s3HOXzi/nqXllvNzQzr+/eJgDLX6+t+UI39tyhCqXmUXVLhZn2lgtqnYyv9KBxVj8WcShAvHb7W+zp2MPoUSIbS3b2NayDQCdomNhyUIurLiQVZWrWFW5imp79aiXTSRTKu2BGC2+CM3ebLhNf2z2RWjxRukKxc96HJfFwJwKB3PK0rO4s8ttucDrskgPTCGEEGIoEoCLoPciuN4ArBrTV9rP88xDp4yg3rL2QlhzO+z8BTz//+DTr4B++KfV+4djJFpD6BxGyj66GKVfraeiKFy1uIorF1bywv5WvrflCIdaA7T5Y7T5O3j1cEduX50Cs8vtmVDsYlF1OhjPKrUNutrceOgbiAFSaooj3iO80/4Ou9p3sat9F82hZg51H+JQ9yEebUi3lqu0VebC8IUVF7KodBEG3eD/tpoGXaE4naFwOtBmw212FtcboS0QIzVYg9w+rEY9NR4LMzxWatwWatxW6kptudncEptxWtQ0CyGEEGNBAnARZGeA9X0WwYjp07Wbdc66kR/wqnth/9PQtg92PgQXfWpYnxZ6u43wjjZQoPQji9C7Bi9r0OkUrl9Rw/UravBHExxpC3CoNUBDa+9HXyTB8Y4QxztCPLe3Nfe5FqMuvbJXZqY4e6twFKdNll6nZ3HpYhaXLubWxbcC0BZq452O3kB8qPsQ7eF2Xmx8kRcbXwTAqDNTY15EqWEhdm0eSmw2wYiRnnCc7mCcDr+exBtbz/r6Bp1ClSsTbj3pcDsj8zEbet1WCbhCCCEmOE0DNQn6yffOowTgIsguhKHrcxFcUEnXAs90zBz08wZlL4Or/hme+wf487/CshvT24aQaA/jffooAK6rZ2GZXzLsl3NZjKypL2VNfW+TNE3TaA/EMmHYT0NrkIY2P0fagkQTKnvO+Nhzxpd3nFK7aUAonlfuwKBX0DLHTH8ENNDQ0DQGPKeln8x7rGmgaumP9NkeiqXSgTUUpyccpyeU6PO4hJ7wZfSELiIWDhI3NKK3NqK3nUJvPUmCKKciezjFnswxFdRkJanUbFLMImWYDYlSyh2WAYG27/1yh7moM+JCCCEEyTjE/Olb1A+xQOZxIPPY3+9xoN++vvTHC2+DD/xXsb+aEZMAXAR9SyCyF8F1q93AOc4AQ7r+d+cv0rPAf/463PCfg+6qJVJ0P3IQLa5inufGedWsc3vNPhQlPatZ5bJwxcLe9m0pVaOxK8ThPjPFDW0BGrtCdIfibDvexbbjXef9+qNPD/F5KNH5uOMmPHE9NnsXiqWRmP44Pu0wgVQbekv6RsmbADhNLpaXLWN5+XKWlS1jWfkyqmxVMpsrhBBidKgqxAMFQquvQIjNhtYCz6WG7ho0bLHA6BxnnEkALoLePsC9M8DtyXYwwUznOcwAQ7ru9/pvw0PXwc6H00smz99QcFfvH4+TaA2jcxgp/chilDGcjdTrFOZVOJhX4eC6FTW57ZF4iqPtQQ61+nOhuKE1QHtg5N+QigIK6RCu5B6nNyqATlFy+1hNBkrtRkpspvTNbsp7XGpPbyuxGSmxm3CaDYOG185IJ7vbd/NO+zu83fY2B7oOEIj78y6uAyizlLGsfFk6EGdCcbm1fMRfpxBCiElM0yAR6RNEz3HmNT7KgdPkALMTzK50i9XsfbMTLO5+j7PPu/s8do3ueMaJBOAi6K0B1uPPBOCAEkBBYYZjxrkfuP5SWPFh2Ps4/OqvYP2dcNVXwNjbUi28q53QW63put9bFqF3Fqevr9WkZ8VMNytmuvO2RxMpNC0dYqE3zBYMuUWeVS23lnN1/dVcXX81iUSCPzz7B+avn0+Dt4H9XfvZ37mfo96jdEW7ePXMq7x65tXc51bZqnJheHnZcpaWLcVj8RTvixFCCDG4VHKI0DrEzGtu/8w2dfBVN0dMb+oXTF2DhNg+z+WF2MzHSdoT/3xJAC6C3hrg3i4QUX2UKnsVJv15BtIbHkgH3rd/mV4q+djLcOOPoXo5iY4wPU+l636d76nDsmD4db/jZSK0UTtXBsXA0tKlrKxamdsWTUY51H2I/V37OdB1gP2d+znuO05buI22cBt/Pv3n3L4zHDN6SyfKlrGkbAlOk7MYX4oQQkwNqgqJ0CChtFBo9RWeeU1GRnFQysCgOqLQmtnfMHH68U9GEoCLIFsDrOiMhMNhAGK6GIuci87/4CY7vP/7sPA6+MNnoX0//OQ9aFd8he53LkOLpzDNceG6uv78X0uclcVg4cLKC7mw8sLctlAixMGug+lZ4kwwPuk/SVOwiaZgU67rBMBs1+xc+cTi0sXM88yj1FJa4JWEEGKKGXCRVoEygcx9fcTHJWeOoX/4wXSJQN/9OHvryWEz2gYPqgVDbP9yAWe65ECuCyk6CcBFkC2B0Ax6tFj6GzOuj59bB4jBLL4eZq5Lh+DDz+N9sZlEKozOqqPsI4tR9PLNVyx2o5211WtZW702t80X83Gw+yD7O3tDcVOwiUZ/I43+Rp49/mxu31JLKfM885jnnsd8z3zmedIfpYRCCDEhaBrEgwXqVwvMvg62PRaA5NCrXvalA6oA/IPtYOgXTN2FywHOVi4wCdt9icIkABdBdgZY1ekAFc2ooSnauV8ANxhHBdz6G8K/f5LQG1UAlCrfQH/iY7DyI/IX6ATiNru5pOYSLqm5JLetO9qdK5vY37WfIz1HaAo20R3tpru1m+2t2/OOUWYpSwdjT34wdpvd/V9OCCEKSyVGHlb7348HQFNHb0wmR4GAmh9WU0Y7uxsauWDdZRjsJQNnXg0W+Z0n8kgALoLsDHBK0QMqCUMCOI8WaENIdkfpeacWSOH0vIYl+ho8/Rocfh7e9wDY5O30iarUUsq7ZryLd814V25bOBHmhP8Ex7zHOOo9yjHvMY55j9EUbKIr2kVXaxdvtb6Vd5xya/mAUDzPMw+XaXJeuSuEKEDTIB7qVybg61cyUKh8oF+IHc1a1+ysa99SgMHqXgt2Gxj+RVpqIsHpzudYsfh6MMosrTg7CcBFkL0ILpVZ8jispOuAR7UEAtCSKl2PHEKLpTDVu3B98h/gDQNs/Tc48Hs49SZ84EFYULhdmph4bEZb7iK5vsKJMMd9x3OhOPuxJdRCZ6STzkgnb7a8mfc5ldbKATPG8zzz5MI7IcbbgA4Dg9e6Fg60maA7mrOuRvvwwuqAffpepCWzrmLikgBcBGo2AGceB5R0T7/RngH2PXeCRFMQnc1A6a2LUUxGuPwfYd7V8NSnoesI/PomWPMJuOROqFg4qq8vxo/NaGN5+XKWly/P2x5KhHKzxLkZY98xWkOttEfaaY+05/UsBqi0VQ6YLZ7nnofD5BjPL0mIiU9NpWtdY8HMTGqgzwIFIwixifDojUnRF7gwa+jygQEh1uRM95YXYgqT/+FFkCuBIP2XcUwfw2F0jGqtZmRfJ8G/NANQ8uFFGDx92qXMWA3/51X409fgrf9OryC38xdQuxpW3grLbzrrUspicrAb7VxQcQEXVFyQtz0YD3LMlx+Mj3qP0h5uz93+0vyXvM+ptlenQ7E7PxzbjLbx/JKEOD+ahl6NQbANUtHejgGFbvHg0M/Fg6M7NqNtiLDqHt52o1VmXYUYBgnARZC9CC6R6cwS1Uepc9aN2sIOye4o3b89AoDj8hlYFxeo8zXZ4PpvweL3whs/gCObofnt9O3Fe2DBpvSFcgs3Sa/BKchhcrCyYiUrK1bmbffH/Rz3Diyl6Ih00BpqpTXUyutNr+d9Tq29lrmeuXmzxnPdcyUYi9GVjPfOsJ41sPrzZ2Vzz/kxxAK8T1Nh9yiOTWfMzKA6GbSOdTizstJhQIhxIwG4CLI1wEktnYBj+hgLnAtG5dhaUqXrN4fQoklMs5y4N80e+hPmXpG+BTtg329h92+gZTc0PJu+WTzpGeGVt8LMtTKzMMW5TK4BfYsh3aat/4V3x3zH6Ix00hxqpjnUzGtNr+V9zgzHjFxd8WzXbGodtcxwzKDaXo1RJ7/op4VcicC5BNa++wYhNfJl0gvJ/gTTUFDMLjA78sNr344Dec+5+iwZm30us59MEggx6UgALoJsCURCzQRgXWzUWqD5XmgkcTqAYs3U/ep1w/tERwVc8pn0rf1gOgjveRwCLbDjZ+lb6bx0EF50HZQvBENxllEW489tdrO6ajWrq1bnbfdGvQVLKbqj3bmFPfouAQ2gU3RU2aqY4ZhBraOWmY6ZzHDOoNZey0znTCqsFein6dKcE4KmpWtSs8EzW6c60sAaC6RX4BptuYUInH0CaaEg6+wXVtP7JXQWXtz6Fza970MYTfIzTIjpSgJwEeQCcOZjVB8dlQ4QkX2dBF9rAqD0rxZiKLGc24Eql8A1X4ervwonXoHdj8HBP0D3MXj5X9M3nQHKFkDVUqjM3KqWgnsW6IYZusWk57F4WGNZw5qqNXnbe6I9eYH4TPAMTYEmmoPNxNU4LaEWWkIt0DbwmAadgRp7DTMcM3K37OzxTOdMyixlo1YuNGWoqXQLrNwt2PuxUGAdUEbQ77nR7CYA/UoECsyu5s265gfWvFlXk+P8L85KJEjppTuBENOdBOAiUFNJNCCeSv+SienPfwY40Rmh+4nDQKbud9koXMSm08O8q9K32H/AwT/Cnseg6e10252Og+kbT/Z+jskBFYszwXhZb0C2l5//eMSkUWIpYV31OtZVr8vbrmoqXZGu3OxwUzAdirMBuTXUSlJNcjpwmtOB0wWPbdabqXXU9s4e95lJrnXU4jF7Jm5A1jRIxfuF1Oz98CDbC93v93g0e7fmKOcXVvvepERACDHBSAAuglQyBTodap8a4PNpgaYlUnT/6mC63+9s19nrfs+F2QEX3pq+aRr4m6DtALTvz3w8CJ0N6V/KTTvSt77slb2huHJJ+n7FkvTFeGLa0Ck6KmwVVNgqBtQZA6TUFO3h9ryA3Dcot4XbiKVinPCd4ITvRMHXsBlszHDOYIZ9Rq60YoZzRi4gD7vPcaZ+1ZzwQc8JUGNnD6LDeU5Nnse/4FkouvRb/yZ7+nvrXGtazc50qcFE/UNCCCHOkwTgIlBTSbTM1b5JJQn6dIupc9Xz+2MkWkPoHEbKPjqCut9zpSjgnpm+LdzYuz2VgK5j6VDcfrA3IPc0QqgdjrfD8a19DwSlc/JLKCoWg60crB65Inoa0uv01DhqqHHUsJa1A55PpBK0Bpto8p6gyX+SpsCZdEAOt9Ic6aAj7iOcDHOk5whHeo4UfA0XBmYoRmZgYIamUJvSmJlIUpuIUxuLYktEIBGBVAwjcC3AvrH4Ys2ZoOrIfLQP4/FZ7hvMElqFEGIYJAAXgZpMohnS//QxfYwae805XxUf2tFKeEdbOkt+ZBF6VxHfatQboXJx+tZXLAgdDX1mizMBOdQB3cfTt0PPDDyeyZHuQmEtSQdiqyfzOLMt77mS3ufMbqlDHg+qmn47PxVP//GTdz+Wvz0ZTYfKRCR9gVXuY7TP4z7PJSMF9zcmItSl4gz2fklUUWgx6GkyGPrc9DQb0/d79Hr8JPFrSQ5mP0mfuVkAp5HSlI4ZCTMzkklqk0mqkinKFSOVOjNleisVRjuW3CzrOQZWo10WGhBCiCKSn8BFkEqlUDOzm1F99Jzrf+MtIXqePgaAa0M9lvklozbGUWV2wMw16VtfwY4+oThz6zyari+G3kbz/jMjfEElvVRnoXBstIHelA7remP64pzsY51hkOcy23XG3uf0psz+mfsqGFJhiPogqU+XiUDmo9bvIwW2ne0j6bfkU/FMuOwfOAuF0GHeT8ZGcIw+27QURWewphv/G21gtGAxWpljtDEnt83aZx8rIb2RZpI0aQma1AhNqRDNiQBNcS9NsW4CyQjdej3dej17KfTHZArw4zRqlNtMlFsNlFsdVFgrqLBWUGYtS5d4WCsot5bjMrkmbj2yEEJMYxKAi0BN5c8An0v9rxpN0v3rg5BUsSwqwfme0V1GeVw4KsBxJcy9Mn97Kpm+aj3SAxFv+mPU2/t4sPuRnszFQFp6e9QLPePzpRiB9wLsGZ/Xm5Dy/oAw9fnDwpgLoPQNptn7BkufbbZ++1oHPpcNtAbLiGf67cCCzK0Qf9yf61ZxJniG0/7T7D2xF4PbQFe0i85IJ7FUjEAiQMAXGLQOOcukM1FuLc/dKmwVvfetFZTbyim3lFNmLcOgkx/HQggxXuQnbhGoqRSaIT0DfC4dIDRNo+fJIyQ7I+jdZko+vAhFN4VmmfQGsJWmbyOVjA0dmhOR9Cymmp3JTKY/qonM7GZmu5rsM+M5jP3RhhxWvsy5UpT0/eF+1Onzg6XelO7F3H/bsO4b0zWoI/68vq9t7jMbbpwSZScukwtXmYslZUsASCQSPNf+HNdvvB6j0YimaQQSATrDnXRGOumIdNAZ6XM/3LvNH/cTV+O5hUKGoqBQYinJzRwXCsvZGWZZYU8IIc6fBOAiUJO9F8FFddERzwAHX28msrcT9Aqlty1Gb5eLxXIMZnBWpW/jKBGL8sJzf+Taa6/DaDQVDrHyVvikpyhKOiSbXMz1zB1y31gqlgvH2WDcEemgK9KVvh9O3++KdpHSUnRHu+mOdtPQ0zDkce1Ge15QHiwsu81uKb8QQohBSAAuglS/EoiRLIIRO+nH91z6bVfP9XMwz3KNyRjFCOn0qLrMrKhB/iAR6X7F2YU8hpJSU/TEenpnksMdA2eXM9uiqSihRIhQIkSjv3HI4xp0BjxmD06TMxfaXWYXTqMTl9nVuy2z3WVy5fa1G+0SnoUQU5oE4CJQU6ncDPBISiBSoQTdjxwEVcO6ohz7pbVjOUwhxDjQ6/S52duhaJpGKBEqGIz7BuaOSAe+mI+kmsw9N1I6RZcfnLPhuF9QdplduIyuvEDtMDmknlkIMeHJT6kiUJMptMzKSAaLYViN+TVVo/uxBlK+OIZyKyU3LZAZGiGmEUVRcJgcOEwO5rjnDLlvPBWnK9KFL+7DH/Pjj/sJxAP44358MV/ufvYWiAdy+yXUBKqm4ov58GU7soyQ3WjPD8rZ2ec+j50mJ26ze0DANutl1TghxNiTAFwEaiqJanEAUOoa3oVegZdPEzvcg2LUUfaxJegscuqEEIWZ9Kb0giLUjOjzNE0jloqlg3HMTyDRG4xzt36Bum+ADifDALkyjZZQy4jHbtabBwTlvjPMebPP/co4bAabTAwIIYZFUlQRJJPJXBP8Ks/ZL9aKHunB/6eTAHg+OB9jtX1MxyeEmJ4URcFisGAxWKi0VY748xNqgkA8kDej3H+2ecD2Po810gE8FomdU+mGXtHnAnL/oJyrbzbYORo/SmlLKaW20rz99Tr9iF9TCDE5SQAugqSabpmloTKjdOgLZJLeGN2PNoAG9nXV2NeMb3cDIYQYLqPOSKmllFLLyFsYqppKKBEaUJLRt3RjsJnnbOlGSkvhjXnxxrxnfb3HXn5swDaH0ZEXnPtfMNh/NrpvaYeUbggxuUgALoJkpmdsXIkzyzVr0P3USJLOh/ahhhIYa+x43j902yUhhJisshfeDeeaiP76l24MNfPsj/lpbGnE4DQQTATzSjeCiSDBRPCcSzeyodhmtKVn0vXp2XSrwZq7X/Cx3pq73/ex1ZD+aNab0SmTv8+2EBOJBOAiSGrpGrW4bvAOEFpSpet/DpBsC6NzmSi7fSmKUd6eE0KI/kZSupFIJHjuuee4/vr04iYwstKNQjPU2dKNbK/nsdA3MFv0veH4XIJ132PkjmOwYNRJC0cxfUgALoJk5mNcV3gZZE3T6PntYWLHfSgmPeWfWIbBYxnfQQohxDQx2qUbkWSESCpCNBnN3fIep6JEkoUfR5IRoqn09lgqlnudaCq9H7EhBnOeDIohLyD3nYU+59lsgzVvm8xmi4lCAnARJDNL4Sb0cSqsFQOe9794kvCuDtAplH1sCaZax3gPUQghxDCcT+nG2aiamgvIBYP0WYJ1NkwPFraz91VNBSCpJXNlIGNptGezrQYrek2PV/XSHe3Gqlkx6owYdUb0il46g4iCJAAXQSrz16/BbBhw1XHwzRYCW08DUHLjAiwLS8Z9fEIIIYpPp+iwGW3YjLYxew1N00iqybwg3XcWun/Izs1UnyWIF2s2+9+f+ve8xwoKJr0pF4iNOiNGfb+Pmdug+/W7fz7HM+lMA45rUAwS0otAAnARqJnQa7XllzVEDnbhffooAK4Ns7CvlY4PQgghxo6iKOlApjfiMrnG7HXONpsdSUYGBu9+j89WOhJLxkiRynvdXGu91BjWjoyCASF5iAA+IGCfw34mvQmDznDW181+rkFnmHKlKxKAx5mmaWiZHsAOR/qHjaZqBP/SjP/FRtDAtrYK59WDd4cQQgghJpOxns3OXtx43XXXoRgUEqkECTVzSyWIq/H8bWqCeCqe9zj3/FD7pQrsX+hYBY6XHUNcjQ8cf+ZzchcJTUAGxVAwbF9VdxX/sO4fij28EZsyAfjBBx/k29/+Nq2traxcuZLvf//7XHTRRcUe1gBqKoVmSP+zl7nLSLSG6HnyCPHTAQAsi0sp+dB8eTtECCGEGCFFUXIBbaLSNI2UlsoLzkk1OSAon3cIz4TtvscutN9gY8jWhmcltSTJZJIIkbztPbGe8fznGzVTIgA/9thj3H333fzoRz/i4osv5oEHHmDTpk00NDRQWTny1YzGkppKounT35hLO+pp+947oGooZj3u6+dgX1eNopPwK4QQQkxFiqJgUAwYdAasBmuxhzOolJoaMoQn1SRxNY7b7C72UM/JlAjA3/nOd/jUpz7FHXfcAcCPfvQjnn32WX7+85/zpS99qcijyxcPhdAM6QBc22AHNHRlKUzzI8S69hJ7YW9xByjOiZpKYdtzAK/OhE4v/ZqnCjmvU4+c06lJzuvYM2ZuA7ZX69FWaZPunetJH4Dj8Tg7d+7knnvuyW3T6XRs2LCBbdu2FfycWCxGLNZbEO/3+4F0DVEikRjT8Z558xBk/pMoqpG3IklavBocm/SnYpozABdzuKnY4xCjS87r1CPndGqS81o8ndzx7zGM5tH9wyObx/rmstHMaJM+dXV2dpJKpaiqyu+YUFVVxaFDhwp+zv3338999903YPtLL72EzTZ27WYAulpPUpFwECfOKwGVhDamLyeEEEIIMaZefPFFdGOUKDdv3py7Hw6HR+24kz4An4t77rmHu+++O/fY7/dTV1fHxo0bcbnGrg0MpIvfw74IL7+6lauuWpdbilNMbolEgj//+c9cddVVck6nEDmvU4+c06lJzmtxGUy6US+BSCQSbN68mWuuuSZ3TrPv2I+GSR+Ay8vL0ev1tLW15W1va2ujurq64OeYzWbMZvOA7UajcVy+cRSPgs4ANodFvlGniERCL+d0CpLzOvXIOZ2a5LxOXX2z2Wie20nf1dhkMrFmzRq2bNmS26aqKlu2bGH9+vVFHJkQQgghhJiIJv0MMMDdd9/N7bffztq1a7nooot44IEHCIVCua4QQgghhBBCZE2JAHzLLbfQ0dHBvffeS2trKxdeeCEvvPDCgAvjhBBCCCGEmBIBGOCuu+7irrvuKvYwhBBCCCHEBDfpa4CFEEIIIYQYCQnAQgghhBBiWpEALIQQQgghphUJwEIIIYQQYlqRACyEEEIIIaYVCcBCCCGEEGJakQAshBBCCCGmFQnAQgghhBBiWpEALIQQQgghphUJwEIIIYQQYlqRACyEEEIIIaYVCcBCCCGEEGJakQAshBBCCCGmFUOxBzARaJoGgN/vH5fXSyQShMNh/H4/RqNxXF5TjC05p1OTnNepR87p1CTndeopdE6zOS2b286HBGAgEAgAUFdXV+SRCCGEEEKIoQQCAdxu93kdQ9FGI0ZPcqqq0tzcjNPpRFGUMX89v99PXV0dp0+fxuVyjfnribEn53RqkvM69cg5nZrkvE49hc6ppmkEAgFqa2vR6c6vildmgAGdTsfMmTPH/XVdLpd8o04xck6nJjmvU4+c06lJzuvU0/+cnu/Mb5ZcBCeEEEIIIaYVCcBCCCGEEGJakQBcBGazma9+9auYzeZiD0WMEjmnU5Oc16lHzunUJOd16hnrcyoXwQkhhBBCiGlFZoCFEEIIIcS0IgFYCCGEEEJMKxKAhRBCCCHEtCIBWAghhBBCTCsSgMfZgw8+yOzZs7FYLFx88cW89dZbxR6SGIGvfe1rKIqSd1u8eHHu+Wg0yp133klZWRkOh4ObbrqJtra2Io5Y9Pfqq69yww03UFtbi6IoPP3003nPa5rGvffeS01NDVarlQ0bNnDkyJG8fbq7u7nttttwuVx4PB4++clPEgwGx/GrEP2d7bx+4hOfGPC9e+211+btI+d1Yrn//vtZt24dTqeTyspKPvjBD9LQ0JC3z3B+5p46dYr3vve92Gw2Kisr+cd//EeSyeR4fikiYzjn9Morrxzwvfp3f/d3efuMxjmVADyOHnvsMe6++26++tWv8vbbb7Ny5Uo2bdpEe3t7sYcmRmDZsmW0tLTkbq+99lruuS984Qv88Y9/5IknnuCVV16hubmZG2+8sYijFf2FQiFWrlzJgw8+WPD5b33rW3zve9/jRz/6EW+++SZ2u51NmzYRjUZz+9x2223s37+fzZs388wzz/Dqq6/y6U9/ery+BFHA2c4rwLXXXpv3vfub3/wm73k5rxPLK6+8wp133skbb7zB5s2bSSQSbNy4kVAolNvnbD9zU6kU733ve4nH4/zlL3/h4Ycf5he/+AX33ntvMb6kaW845xTgU5/6VN736re+9a3cc6N2TjUxbi666CLtzjvvzD1OpVJabW2tdv/99xdxVGIkvvrVr2orV64s+JzX69WMRqP2xBNP5LYdPHhQA7Rt27aN0wjFSADa7373u9xjVVW16upq7dvf/nZum9fr1cxms/ab3/xG0zRNO3DggAZo27dvz+3z/PPPa4qiaE1NTeM2djG4/udV0zTt9ttv1z7wgQ8M+jlyXie+9vZ2DdBeeeUVTdOG9zP3ueee03Q6ndba2prb54c//KHmcrm0WCw2vl+AGKD/OdU0Tbviiiu0z33uc4N+zmidU5kBHifxeJydO3eyYcOG3DadTseGDRvYtm1bEUcmRurIkSPU1tYyd+5cbrvtNk6dOgXAzp07SSQSeed48eLFzJo1S87xJHHixAlaW1vzzqHb7ebiiy/OncNt27bh8XhYu3Ztbp8NGzag0+l48803x33MYvi2bt1KZWUlixYt4jOf+QxdXV255+S8Tnw+nw+A0tJSYHg/c7dt28aKFSuoqqrK7bNp0yb8fj/79+8fx9GLQvqf06xf//rXlJeXs3z5cu655x7C4XDuudE6p4bzHLsYps7OTlKpVN4JA6iqquLQoUNFGpUYqYsvvphf/OIXLFq0iJaWFu677z7e/e53s2/fPlpbWzGZTHg8nrzPqaqqorW1tTgDFiOSPU+Fvk+zz7W2tlJZWZn3vMFgoLS0VM7zBHbttddy4403MmfOHI4dO8aXv/xlrrvuOrZt24Zer5fzOsGpqsrnP/95LrvsMpYvXw4wrJ+5ra2tBb+fs8+J4il0TgE++tGPUl9fT21tLXv27OGf/umfaGho4KmnngJG75xKABZiBK677rrc/QsuuICLL76Y+vp6Hn/8caxWaxFHJoQYykc+8pHc/RUrVnDBBRcwb948tm7dytVXX13EkYnhuPPOO9m3b1/eNRdichvsnPatu1+xYgU1NTVcffXVHDt2jHnz5o3a60sJxDgpLy9Hr9cPuDq1ra2N6urqIo1KnC+Px8PChQs5evQo1dXVxONxvF5v3j5yjieP7Hka6vu0urp6wIWryWSS7u5uOc+TyNy5cykvL+fo0aOAnNeJ7K677uKZZ57h5ZdfZubMmbntw/mZW11dXfD7OfucKI7BzmkhF198MUDe9+ponFMJwOPEZDKxZs0atmzZktumqipbtmxh/fr1RRyZOB/BYJBjx45RU1PDmjVrMBqNeee4oaGBU6dOyTmeJObMmUN1dXXeOfT7/bz55pu5c7h+/Xq8Xi87d+7M7fPnP/8ZVVVzP6jFxHfmzBm6urqoqakB5LxORJqmcdddd/G73/2OP//5z8yZMyfv+eH8zF2/fj179+7N++Nm8+bNuFwuli5dOj5fiMg52zktZNeuXQB536ujck7P4aI9cY4effRRzWw2a7/4xS+0AwcOaJ/+9Kc1j8eTdyWjmNi++MUvalu3btVOnDihvf7669qGDRu08vJyrb29XdM0Tfu7v/s7bdasWdqf//xnbceOHdr69eu19evXF3nUoq9AIKC988472jvvvKMB2ne+8x3tnXfe0U6ePKlpmqb927/9m+bxeLTf//732p49e7QPfOAD2pw5c7RIJJI7xrXXXqutWrVKe/PNN7XXXntNW7BggXbrrbcW60sS2tDnNRAIaP/wD/+gbdu2TTtx4oT2pz/9SVu9erW2YMECLRqN5o4h53Vi+cxnPqO53W5t69atWktLS+4WDodz+5ztZ24ymdSWL1+ubdy4Udu1a5f2wgsvaBUVFdo999xTjC9p2jvbOT169Kj29a9/XduxY4d24sQJ7fe//702d+5c7fLLL88dY7TOqQTgcfb9739fmzVrlmYymbSLLrpIe+ONN4o9JDECt9xyi1ZTU6OZTCZtxowZ2i233KIdPXo093wkEtH+/u//XispKdFsNpv2oQ99SGtpaSniiEV/L7/8sgYMuN1+++2apqVboX3lK1/RqqqqNLPZrF199dVaQ0ND3jG6urq0W2+9VXM4HJrL5dLuuOMOLRAIFOGrEVlDnddwOKxt3LhRq6io0IxGo1ZfX6996lOfGjD5IOd1Yil0PgHtoYceyu0znJ+5jY2N2nXXXadZrVatvLxc++IXv6glEolx/mqEpp39nJ46dUq7/PLLtdLSUs1sNmvz58/X/vEf/1Hz+Xx5xxmNc6pkBiSEEEIIIcS0IDXAQgghhBBiWpEALIQQQgghphUJwEIIIYQQYlqRACyEEEIIIaYVCcBCCCGEEGJakQAshBBCCCGmFQnAQgghhBBiWpEALIQQQgghphUJwEIIMUl94hOf4IMf/GCxhyGEEJOOodgDEEIIMZCiKEM+/9WvfpX//M//RBbzFEKIkZMALIQQE1BLS0vu/mOPPca9995LQ0NDbpvD4cDhcBRjaEIIMelJCYQQQkxA1dXVuZvb7UZRlLxtDodjQAnElVdeyWc/+1k+//nPU1JSQlVVFT/5yU8IhULccccdOJ1O5s+fz/PPP5/3Wvv27eO6667D4XBQVVXFxz/+cTo7O8f5KxZCiPEjAVgIIaaQhx9+mPLyct566y0++9nP8pnPfIabb76ZSy+9lLfffpuNGzfy8Y9/nHA4DIDX6+Wqq65i1apV7NixgxdeeIG2tjY+/OEPF/krEUKIsSMBWAghppCVK1fyz//8zyxYsIB77rkHi8VCeXk5n/rUp1iwYAH33nsvXV1d7NmzB4D/+q//YtWqVXzjG99g8eLFrFq1ip///Oe8/PLLHD58uMhfjRBCjA2pARZCiCnkggsuyN3X6/WUlZWxYsWK3LaqqioA2tvbAdi9ezcvv/xywXriY8eOsXDhwjEesRBCjD8JwEIIMYUYjca8x4qi5G3LdpdQVRWAYDDIDTfcwDe/+c0Bx6qpqRnDkQohRPFIABZCiGls9erVPPnkk8yePRuDQX4lCCGmB6kBFkKIaezOO++ku7ubW2+9le3bt3Ps2DFefPFF7rjjDlKpVLGHJ4QQY0ICsBBCTGO1tbW8/vrrpFIpNm7cyIoVK/j85z+Px+NBp5NfEUKIqUnRZBkhIYQQQggxjcif90IIIYQQYlqRACyEEEIIIaYVCcBCCCGEEGJakQAshBBCCCGmFQnAQgghhBBiWpEALIQQQgghphUJwEIIIYQQYlqRACyEEEIIIaYVCcBCCCGEEGJakQAshBBCCCGmFQnAQgghhBBiWvn/A1l4ntdkNkoDAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Update the parameters and create a new JAXProblem instance\n", + "jax_problem = jax_problem.update_parameters(jax_problem.parameters + noise)\n", + "\n", + "# Run simulations with the updated parameters\n", + "llh, results = run_simulations(jax_problem)\n", + "\n", + "# Plot the simulation results\n", + "plot_simulation(results)" + ] + }, + { + "cell_type": "markdown", + "id": "e73bdd447a4d48c8", + "metadata": {}, + "source": [ + "## Computing Gradients\n", + "\n", + "Similar to updating attributes, computing gradients in the JAX ecosystem can feel a bit unconventional if you’re not familiar with the JAX ecosysmt. JAX offers [powerful automatic differentiation](https://jax.readthedocs.io/en/latest/automatic-differentiation.html) through the `jax.grad` function. However, to use `jax.grad` with `JAXProblem`, we need to specify which parts of the `JAXProblem` should be treated as static." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "a8918f59607e6525", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:00.662578Z", + "start_time": "2024-11-19T09:51:00.649386Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error: Argument 'ParameterMappingForCondition(map_sim_var={'Epo_degradation_BaF3': 'Epo_degradation_BaF3', 'k_exp_hetero': 'k_exp_hetero', 'k_exp_homo': 'k_exp_homo', 'k_imp_hetero': 'k_imp_hetero', 'k_imp_homo': 'k_imp_homo', 'k_phos': 'k_phos', 'ratio': 0.693, 'specC17': 0.107, 'noiseParameter1_pSTAT5A_rel': 'sd_pSTAT5A_rel', 'noiseParameter1_pSTAT5B_rel': 'sd_pSTAT5B_rel', 'noiseParameter1_rSTAT5A_rel': 'sd_rSTAT5A_rel'},scale_map_sim_var={'Epo_degradation_BaF3': 'log10', 'k_exp_hetero': 'log10', 'k_exp_homo': 'log10', 'k_imp_hetero': 'log10', 'k_imp_homo': 'log10', 'k_phos': 'log10', 'ratio': 'lin', 'specC17': 'lin', 'noiseParameter1_pSTAT5A_rel': 'log10', 'noiseParameter1_pSTAT5B_rel': 'log10', 'noiseParameter1_rSTAT5A_rel': 'log10'},map_preeq_fix={},scale_map_preeq_fix={},map_sim_fix={},scale_map_sim_fix={})' of type is not a valid JAX type.\n" + ] + } + ], + "source": [ + "try:\n", + " # Attempt to compute the gradient of the run_simulations function\n", + " jax.grad(run_simulations, has_aux=True)(jax_problem)\n", + "except TypeError as e:\n", + " print(\"Error:\", e)" + ] + }, + { + "cell_type": "markdown", + "id": "922a9ffd94c99607", + "metadata": {}, + "source": "Fortunately, `equinox` simplifies this process by offering [filter_grad](https://docs.kidger.site/equinox/api/transformations/#equinox.filter_grad), which enables autodiff functionality that is compatible with `JAXProblem` and, in theory, also with `JAXModel`." + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "e2c635b6-79db-4e78-8738-789af29110b5", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:07.293314Z", + "start_time": "2024-11-19T09:51:00.709141Z" + } + }, + "outputs": [], + "source": [ + "import equinox as eqx\n", + "\n", + "# Compute the gradient using equinox's filter_grad, preserving auxiliary outputs\n", + "grad, _ = eqx.filter_grad(run_simulations, has_aux=True)(jax_problem)" + ] + }, + { + "cell_type": "markdown", + "id": "8fd639ad39948e72", + "metadata": {}, + "source": "Functions transformed by `filter_grad` return gradients that share the same structure as the first argument (unless specified otherwise). This allows us to access the gradient with respect to the parameters attribute directly `via grad.parameters`." + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "ab9225bf704e9ed5", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:07.310244Z", + "start_time": "2024-11-19T09:51:07.306293Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Array([ 2.39759630e+01, -1.36704159e-01, 1.33625245e+01, 3.25229304e+01,\n", + " 4.88660333e-05, 5.39482681e+01, -5.13624151e+00, -2.90885864e-02,\n", + " 6.08639536e+01], dtype=float64)" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grad.parameters" + ] + }, + { + "cell_type": "markdown", + "id": "5793acc4ad8908be", + "metadata": {}, + "source": "Attributes for which derivatives cannot be computed (typically anything that is not a [jax.numpy.array](https://jax.readthedocs.io/en/latest/_autosummary/jax.numpy.array.html)) are automatically set to `None`." + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "77e6bc4fa3e6970a", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:07.398319Z", + "start_time": "2024-11-19T09:51:07.392032Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "JAXProblem(\n", + " parameters=f64[9],\n", + " model=JAXModel_Boehm_JProteomeRes2014(api_version='0.0.1'),\n", + " _parameter_mappings={'model1_data1': None},\n", + " _measurements={('model1_data1',): (f64[3], f64[45], f64[0], f64[48], None)},\n", + " _petab_problem=None\n", + ")" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grad" + ] + }, + { + "cell_type": "markdown", + "id": "75fc08817f1b4734", + "metadata": {}, + "source": "Observant readers may notice that the gradient above appears to include numeric values for derivatives with respect to some measurements. However, `simulation_conditions` internally disables gradient computations using `jax.lax.stop_gradient`, resulting in these values being zeroed out." + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "a8b7634e-7bd8-41ae-a6dc-1d0f29993ac0", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:07.455764Z", + "start_time": "2024-11-19T09:51:07.450233Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(Array([0., 0., 0.], dtype=float64),\n", + " Array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", + " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", + " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], dtype=float64),\n", + " Array([], shape=(0,), dtype=float64),\n", + " Array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", + " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", + " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.], dtype=float64),\n", + " None)" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "grad._measurements[simulation_condition]" + ] + }, + { + "cell_type": "markdown", + "id": "3c6c4f2d3a2673a2", + "metadata": {}, + "source": "However, we can compute derivatives with respect to data elements using `JAXModel.simulate_condition`. In the example below, we differentiate the observables `y` (specified by passing `y` to the `ret` argument) with respect to the timepoints at which the model outputs are computed after the solving the differential equation. While this might not be particularly practical, it serves as an nice illustration of the power of automatic differentiation." + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "2a843410-4af4-4ff7-8b67-9293a5820caf", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:13.735937Z", + "start_time": "2024-11-19T09:51:07.494491Z" + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Array([[ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", + " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", + " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", + " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", + " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", + " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n", + " ...,\n", + " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", + " -1.30871686e-01, 0.00000000e+00, -3.80465095e-11],\n", + " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", + " 0.00000000e+00, -2.69250222e-01, -7.93596886e-11],\n", + " [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n", + " 0.00000000e+00, 0.00000000e+00, -2.29968854e-02]], dtype=float64)" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import jax.numpy as jnp\n", + "import diffrax\n", + "\n", + "# Define the simulation condition\n", + "simulation_condition = (\"model1_data1\",)\n", + "\n", + "# Load condition-specific data\n", + "ts_preeq, ts_dyn, ts_posteq, my, iys = jax_problem._measurements[\n", + " simulation_condition\n", + "]\n", + "\n", + "# Load parameters for the specified condition\n", + "p = jax_problem.load_parameters(simulation_condition[0])\n", + "# Disable preequilibration\n", + "p_preeq = jnp.array([])\n", + "\n", + "\n", + "# Define a function to compute the gradient with respect to dynamic timepoints\n", + "@eqx.filter_jacfwd\n", + "def grad_ts_dyn(tt):\n", + " return jax_problem.model.simulate_condition(\n", + " p=p,\n", + " p_preeq=p_preeq,\n", + " ts_preeq=ts_preeq,\n", + " ts_dyn=tt,\n", + " ts_posteq=ts_posteq,\n", + " my=jnp.array(my),\n", + " iys=jnp.array(iys),\n", + " solver=diffrax.Kvaerno5(),\n", + " controller=diffrax.PIDController(atol=1e-8, rtol=1e-8),\n", + " max_steps=2**10,\n", + " adjoint=diffrax.DirectAdjoint(),\n", + " ret=\"y\", # Return observables\n", + " )[0]\n", + "\n", + "\n", + "# Compute the gradient with respect to `ts_dyn`\n", + "g = grad_ts_dyn(ts_dyn)\n", + "g" + ] + }, + { + "cell_type": "markdown", + "id": "a9cec2a77b30669d", + "metadata": {}, + "source": [ + "## Compilation & Profiling\n", + "\n", + "To maximize performance with JAX, code should be just-in-time (JIT) compiled. This can be achieved using the `jax.jit` or `equinox.filter_jit` decorators. While JIT compilation introduces some overhead during the first function call, it significantly improves performance for subsequent calls. To demonstrate this, we will first clear the JIT cache and then profile the execution." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "d1f79c45ab2eccdc", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:14.292251Z", + "start_time": "2024-11-19T09:51:13.834276Z" + } + }, + "outputs": [], + "source": [ + "from time import time\n", + "\n", + "# Clear JAX caches to ensure a fresh start\n", + "jax.clear_caches()\n", + "\n", + "# Define a JIT-compiled gradient function with auxiliary outputs\n", + "gradfun = eqx.filter_jit(eqx.filter_grad(run_simulations, has_aux=True))" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "b44881332070e2b0", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:23.060962Z", + "start_time": "2024-11-19T09:51:14.309832Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Function compilation time: 2.53 seconds\n", + "Gradient compilation time: 6.21 seconds\n" + ] + } + ], + "source": [ + "# Measure the time taken for the first function call (including compilation)\n", + "start = time()\n", + "run_simulations(jax_problem)\n", + "print(f\"Function compilation time: {time() - start:.2f} seconds\")\n", + "\n", + "# Measure the time taken for the gradient computation (including compilation)\n", + "start = time()\n", + "gradfun(jax_problem)\n", + "print(f\"Gradient compilation time: {time() - start:.2f} seconds\")" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "a3e1463209074861", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:25.374277Z", + "start_time": "2024-11-19T09:51:23.078334Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "16.6 ms ± 609 μs per loop (mean ± std. dev. of 7 runs, 1 loop each)\n" + ] + } + ], + "source": [ + "%%timeit\n", + "run_simulations(\n", + " jax_problem,\n", + " controller=diffrax.PIDController(\n", + " rtol=1e-8, # same as amici default\n", + " atol=1e-16, # same as amici default\n", + " pcoeff=0.4, # recommended value for stiff systems\n", + " icoeff=0.3, # recommended value for stiff systems\n", + " dcoeff=0.0, # recommended value for stiff systems\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "2f074fbbebf834c6", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:31.394645Z", + "start_time": "2024-11-19T09:51:25.459759Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "39.8 ms ± 854 μs per loop (mean ± std. dev. of 7 runs, 1 loop each)\n" + ] + } + ], + "source": [ + "%%timeit \n", + "gradfun(\n", + " jax_problem,\n", + " controller=diffrax.PIDController(\n", + " rtol=1e-8, # same as amici default\n", + " atol=1e-16, # same as amici default\n", + " pcoeff=0.4, # recommended value for stiff systems\n", + " icoeff=0.3, # recommended value for stiff systems\n", + " dcoeff=0.0, # recommended value for stiff systems\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "5f68c5fcc16b637", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:55.244925Z", + "start_time": "2024-11-19T09:51:31.477484Z" + } + }, + "outputs": [], + "source": [ + "from amici.petab import simulate_petab\n", + "import amici\n", + "\n", + "# Import the PEtab problem as a standard AMICI model\n", + "amici_model = import_petab_problem(\n", + " petab_problem, compile_=True, verbose=False, jax=False\n", + ")\n", + "\n", + "# Configure the solver with appropriate tolerances\n", + "solver = amici_model.getSolver()\n", + "solver.setAbsoluteTolerance(1e-8)\n", + "solver.setRelativeTolerance(1e-8)\n", + "\n", + "# Prepare the parameters for the simulation\n", + "problem_parameters = dict(\n", + " zip(jax_problem.parameter_ids, jax_problem.parameters)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "413ed7c60b2cf4be", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:55.259985Z", + "start_time": "2024-11-19T09:51:55.257937Z" + } + }, + "outputs": [], + "source": [ + "# Profile simulation only\n", + "solver.setSensitivityOrder(amici.SensitivityOrder.none)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "768fa60e439ca8b4", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:57.417608Z", + "start_time": "2024-11-19T09:51:55.273367Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "26.1 ms ± 2.71 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" + ] + } + ], + "source": [ + "%%timeit \n", + "simulate_petab(\n", + " petab_problem,\n", + " amici_model,\n", + " solver=solver,\n", + " problem_parameters=problem_parameters,\n", + " scaled_parameters=True,\n", + " scaled_gradients=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "b8382b0b2b68f49e", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:57.497361Z", + "start_time": "2024-11-19T09:51:57.494502Z" + } + }, + "outputs": [], + "source": [ + "# Profile gradient computation using forward sensitivity analysis\n", + "solver.setSensitivityOrder(amici.SensitivityOrder.first)\n", + "solver.setSensitivityMethod(amici.SensitivityMethod.forward)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "3bae1fab8c416122", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:59.897459Z", + "start_time": "2024-11-19T09:51:57.511889Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "29.1 ms ± 1.82 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" + ] + } + ], + "source": [ + "%%timeit \n", + "simulate_petab(\n", + " petab_problem,\n", + " amici_model,\n", + " solver=solver,\n", + " problem_parameters=problem_parameters,\n", + " scaled_parameters=True,\n", + " scaled_gradients=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "71e0358227e1dc74", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:51:59.972149Z", + "start_time": "2024-11-19T09:51:59.969006Z" + } + }, + "outputs": [], + "source": [ + "# Profile gradient computation using adjoint sensitivity analysis\n", + "solver.setSensitivityOrder(amici.SensitivityOrder.first)\n", + "solver.setSensitivityMethod(amici.SensitivityMethod.adjoint)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "e3cc7971002b6d06", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:52:03.266074Z", + "start_time": "2024-11-19T09:51:59.992465Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "39.3 ms ± 1.6 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" + ] + } + ], + "source": [ + "%%timeit \n", + "simulate_petab(\n", + " petab_problem,\n", + " amici_model,\n", + " solver=solver,\n", + " problem_parameters=problem_parameters,\n", + " scaled_parameters=True,\n", + " scaled_gradients=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f6a0beb20f53561", + "metadata": { + "ExecuteTime": { + "end_time": "2024-11-19T09:52:03.338529Z", + "start_time": "2024-11-19T09:52:03.336789Z" + } + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/python/sdist/amici/jax.template.py b/python/sdist/amici/jax.template.py index 05d82288d5..367ba9e500 100644 --- a/python/sdist/amici/jax.template.py +++ b/python/sdist/amici/jax.template.py @@ -87,7 +87,7 @@ def _sigmay(self, y, pk): return TPL_SIGMAY_RET - def _llh(self, t, x, pk, tcl, my, iy): + def _nllh(self, t, x, pk, tcl, my, iy): y = self._y(t, x, pk, tcl) TPL_Y_SYMS = y TPL_SIGMAY_SYMS = self._sigmay(y, pk) diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index ceeea8d817..126cdb8039 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -162,7 +162,7 @@ def _sigmay( ... @abstractmethod - def _llh( + def _nllh( self, t: jt.Float[jt.Scalar, ""], x: jt.Float[jt.Array, "nxs"], @@ -172,7 +172,7 @@ def _llh( iy: jt.Int[jt.Array, ""], ) -> jt.Float[jt.Scalar, ""]: """ - Compute the log-likelihood of the observable for the specified observable index. + Compute the negative log-likelihood of the observable for the specified observable index. :param t: time point @@ -326,7 +326,7 @@ def _x_rdatas( """ return jax.vmap(self._x_rdata, in_axes=(0, None))(x, tcl) - def _llhs( + def _nllhs( self, ts: jt.Float[jt.Array, "nt nx"], xs: jt.Float[jt.Array, "nt nxs"], @@ -336,7 +336,7 @@ def _llhs( iys: jt.Int[jt.Array, "nt"], ) -> jt.Float[jt.Array, "nt"]: """ - Compute the log-likelihood of the observables. + Compute the negative log-likelihood for each observable. :param ts: time points @@ -351,9 +351,9 @@ def _llhs( :param iys: observable indices :return: - log-likelihood of the observables + negative log-likelihoods of the observables """ - return jax.vmap(self._llh, in_axes=(0, 0, None, None, 0, 0))( + return jax.vmap(self._nllh, in_axes=(0, 0, None, None, 0, 0))( ts, xs, p, tcl, mys, iys ) @@ -431,8 +431,8 @@ def simulate_condition( controller: diffrax.AbstractStepSizeController, adjoint: diffrax.AbstractAdjoint, max_steps: int | jnp.int_, - ret: str = "nllh", - ): + ret: str = "llh", + ) -> tuple[jt.Float[jt.Array, "nt *nx"] | jnp.float_, dict]: r""" Simulate a condition. @@ -466,8 +466,8 @@ def simulate_condition( maximum number of solver steps :param ret: which output to return. Valid values are - - `nllh`: negative log-likelihood (default) - - `llhs`: log-likelihoods at each time point + - `llh`: log-likelihood (default) + - `nllhs`: negative log-likelihood at each time point - `x0`: full initial state vector (after pre-equilibration) - `x0_solver`: reduced initial state vector (after pre-equilibration) - `x`: full state vector @@ -533,11 +533,11 @@ def simulate_condition( ts = jnp.concatenate((ts_preeq, ts_dyn, ts_posteq), axis=0) x = jnp.concatenate((x_preq, x_dyn, x_posteq), axis=0) - llhs = self._llhs(ts, x, p, tcl, my, iys) - nllh = -jnp.sum(llhs) + nllhs = self._nllhs(ts, x, p, tcl, my, iys) + llh = -jnp.sum(nllhs) return { - "nllh": nllh, - "llhs": llhs, + "llh": llh, + "nllhs": nllhs, "x": self._x_rdatas(x, tcl), "x_solver": x, "y": self._ys(ts, x, p, tcl, iys), @@ -547,6 +547,7 @@ def simulate_condition( "tcl": tcl, "res": self._ys(ts, x, p, tcl, iys) - my, }[ret], dict( + ts=ts, x=x, stats_preeq=stats_preeq, stats_dyn=stats_dyn, diff --git a/python/sdist/amici/jax/petab.py b/python/sdist/amici/jax/petab.py index b1ee96e167..6ddfb7c074 100644 --- a/python/sdist/amici/jax/petab.py +++ b/python/sdist/amici/jax/petab.py @@ -63,7 +63,7 @@ class JAXProblem(eqx.Module): model: JAXModel _parameter_mappings: dict[str, ParameterMappingForCondition] _measurements: dict[ - tuple[str], + tuple[str, ...], tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray], ] _petab_problem: petab.Problem @@ -156,6 +156,12 @@ def _get_measurements( ) return measurements + def get_all_simulation_conditions(self) -> tuple[tuple[str, ...], ...]: + simulation_conditions = ( + self._petab_problem.get_simulation_conditions_from_measurement_df() + ) + return tuple(tuple(row) for _, row in simulation_conditions.iterrows()) + def _get_nominal_parameter_values(self) -> jt.Float[jt.Array, "np"]: """ Get the nominal parameter values for the model based on the nominal values in the PEtab problem. @@ -245,9 +251,18 @@ def load_parameters( ) return self._unscale(p, pscale) + def update_parameters(self, p: jt.Float[jt.Array, "np"]) -> "JAXProblem": + """ + Update parameters for the model. + + :param p: + New problem instance with updated parameters. + """ + return eqx.tree_at(lambda p: p.parameters, self, p) + def run_simulation( self, - simulation_condition: tuple[str], + simulation_condition: tuple[str, ...], solver: diffrax.AbstractSolver, controller: diffrax.AbstractStepSizeController, max_steps: jnp.int_, @@ -293,7 +308,7 @@ def run_simulation( def run_simulations( problem: JAXProblem, - simulation_conditions: Iterable[tuple], + simulation_conditions: Iterable[tuple] | None = None, solver: diffrax.AbstractSolver = diffrax.Kvaerno5(), controller: diffrax.AbstractStepSizeController = diffrax.PIDController( rtol=1e-8, @@ -320,6 +335,9 @@ def run_simulations( :return: Overall negative log-likelihood and condition specific results and statistics. """ + if simulation_conditions is None: + simulation_conditions = problem.get_all_simulation_conditions() + results = { sc: problem.run_simulation(sc, solver, controller, max_steps) for sc in simulation_conditions From 5dc873506873fb476e5830c1d7ee0121a0975342 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 09:56:50 +0000 Subject: [PATCH 74/80] fix doc --- python/sdist/amici/jax/model.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/python/sdist/amici/jax/model.py b/python/sdist/amici/jax/model.py index 126cdb8039..cecebeab0e 100644 --- a/python/sdist/amici/jax/model.py +++ b/python/sdist/amici/jax/model.py @@ -196,6 +196,7 @@ def _nllh( def state_ids(self) -> list[str]: """ Get the state ids of the model. + :return: State ids """ @@ -206,6 +207,7 @@ def state_ids(self) -> list[str]: def observable_ids(self) -> list[str]: """ Get the observable ids of the model. + :return: Observable ids """ @@ -216,6 +218,7 @@ def observable_ids(self) -> list[str]: def parameter_ids(self) -> list[str]: """ Get the parameter ids of the model. + :return: Parameter ids """ From 784ab2c095192d100b3faf5debd9685688ad26b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 11:30:36 +0000 Subject: [PATCH 75/80] fix notebook symlink --- documentation/ExampleJaxPEtab.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/documentation/ExampleJaxPEtab.ipynb b/documentation/ExampleJaxPEtab.ipynb index b3f3b4e18e..821b14f21f 120000 --- a/documentation/ExampleJaxPEtab.ipynb +++ b/documentation/ExampleJaxPEtab.ipynb @@ -1 +1 @@ -./python/examples/example_jax_petab/ExampleJaxPEtab.ipynb \ No newline at end of file +../python/examples/example_jax_petab/ExampleJaxPEtab.ipynb \ No newline at end of file From d528168e0dc42a80c270ccfdc74b2dcfed5ed62e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 12:02:08 +0000 Subject: [PATCH 76/80] update notebook --- .../example_jax_petab/ExampleJaxPEtab.ipynb | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb index 3515567706..b157a114ad 100644 --- a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb +++ b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb @@ -64,7 +64,7 @@ "metadata": {}, "source": [ "## Simulation\n", - "In principle, we can already use this model for simulation using the [JAXModel](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel) method. However, this approach can be cumbersome as timepoints, data etc need to be specified manually. Instead, we process the PEtab problem into a [JAXProblem](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXProblem), which enables efficient simulation using [amici.jax.run_simulations]((https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.run_simulations)." + "In principle, we can already use this model for simulation using the [simulate_condition](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel.simulate_condition) method. However, this approach can be cumbersome as timepoints, data etc. need to be specified manually. Instead, we process the PEtab problem into a [JAXProblem](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXProblem), which enables efficient simulation using [amici.jax.run_simulations]((https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.run_simulations)." ] }, { @@ -539,7 +539,7 @@ "source": [ "The root cause of this error lies in the fact that, to enable autodiff, direct modifications of attributes are not allowed in [equinox](https://docs.kidger.site/equinox/), which AMICI utilizes under the hood. Consequently, attributes of instances like `JAXModel` or `JAXProblem` cannot be updated directly — this is the price we have to pay for autodiff.\n", "\n", - "However, `JAXProblem` provides a convenient method called `update_parameters`. The caveat is that this method creates a new JAXProblem instance instead of modifying the existing one." + "However, `JAXProblem` provides a convenient method called [update_parameters](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXProblem.update_parameters. The caveat is that this method creates a new JAXProblem instance instead of modifying the existing one." ] }, { @@ -1132,19 +1132,6 @@ " scaled_gradients=True,\n", ")" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f6a0beb20f53561", - "metadata": { - "ExecuteTime": { - "end_time": "2024-11-19T09:52:03.338529Z", - "start_time": "2024-11-19T09:52:03.336789Z" - } - }, - "outputs": [], - "source": [] } ], "metadata": { From 24d8c090628ea7e18ce96afef50700dc4f7a50fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 12:04:38 +0000 Subject: [PATCH 77/80] Update ExampleJaxPEtab.ipynb --- python/examples/example_jax_petab/ExampleJaxPEtab.ipynb | 1 + 1 file changed, 1 insertion(+) diff --git a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb index b157a114ad..f4ccfc1787 100644 --- a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb +++ b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb @@ -64,6 +64,7 @@ "metadata": {}, "source": [ "## Simulation\n", + "\n", "In principle, we can already use this model for simulation using the [simulate_condition](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel.simulate_condition) method. However, this approach can be cumbersome as timepoints, data etc. need to be specified manually. Instead, we process the PEtab problem into a [JAXProblem](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXProblem), which enables efficient simulation using [amici.jax.run_simulations]((https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.run_simulations)." ] }, From 5393e6c768fc0e7583574477b819329305bfd75a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 12:06:27 +0000 Subject: [PATCH 78/80] Update ExampleJaxPEtab.ipynb --- python/examples/example_jax_petab/ExampleJaxPEtab.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb index f4ccfc1787..9151cfcc13 100644 --- a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb +++ b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb @@ -21,7 +21,7 @@ "\n", "To begin, we will import a model using [PEtab](https://petab.readthedocs.io). For this demonstration, we will utilize the [Benchmark Collection](https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab), which provides a diverse set of models. For more information on importing PEtab models, refer to the corresponding [PEtab notebook](https://amici.readthedocs.io/en/latest/petab.html).\n", "\n", - "In this tutorial, we will import the Böhm model from the Benchmark Collection. Using [amici.petab_import](https://amici.readthedocs.io/en/latest/generated/amici.petab_import.html#amici.petab_import.import_petab_problem), we will load the PEtab problem. To create a [JAXModel](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel) instead of a standard AMICI model, we set the `jax` parameter to `True`. As we won't use the corresponding AMICI model, we set the `compile_` to False.\n" + "In this tutorial, we will import the Böhm model from the Benchmark Collection. Using [amici.petab_import](https://amici.readthedocs.io/en/latest/generated/amici.petab_import.html#amici.petab_import.import_petab_problem), we will load the PEtab problem. To create a [JAXModel](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel) instead of a standard AMICI model, we set the `jax` parameter to `True`. As we won't use the corresponding AMICI model, we set the `compile_` to `False`.\n" ] }, { @@ -540,7 +540,7 @@ "source": [ "The root cause of this error lies in the fact that, to enable autodiff, direct modifications of attributes are not allowed in [equinox](https://docs.kidger.site/equinox/), which AMICI utilizes under the hood. Consequently, attributes of instances like `JAXModel` or `JAXProblem` cannot be updated directly — this is the price we have to pay for autodiff.\n", "\n", - "However, `JAXProblem` provides a convenient method called [update_parameters](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXProblem.update_parameters. The caveat is that this method creates a new JAXProblem instance instead of modifying the existing one." + "However, `JAXProblem` provides a convenient method called [update_parameters](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXProblem.update_parameters). The caveat is that this method creates a new JAXProblem instance instead of modifying the existing one." ] }, { From a22f099c1b0f69e8468ef7202caf35855c7462cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 12:47:28 +0000 Subject: [PATCH 79/80] fix compilation issue --- python/examples/example_jax_petab/ExampleJaxPEtab.ipynb | 9 ++++++--- python/sdist/pyproject.toml | 4 +--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb index 9151cfcc13..10369f74b0 100644 --- a/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb +++ b/python/examples/example_jax_petab/ExampleJaxPEtab.ipynb @@ -21,7 +21,7 @@ "\n", "To begin, we will import a model using [PEtab](https://petab.readthedocs.io). For this demonstration, we will utilize the [Benchmark Collection](https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab), which provides a diverse set of models. For more information on importing PEtab models, refer to the corresponding [PEtab notebook](https://amici.readthedocs.io/en/latest/petab.html).\n", "\n", - "In this tutorial, we will import the Böhm model from the Benchmark Collection. Using [amici.petab_import](https://amici.readthedocs.io/en/latest/generated/amici.petab_import.html#amici.petab_import.import_petab_problem), we will load the PEtab problem. To create a [JAXModel](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel) instead of a standard AMICI model, we set the `jax` parameter to `True`. As we won't use the corresponding AMICI model, we set the `compile_` to `False`.\n" + "In this tutorial, we will import the Böhm model from the Benchmark Collection. Using [amici.petab_import](https://amici.readthedocs.io/en/latest/generated/amici.petab_import.html#amici.petab_import.import_petab_problem), we will load the PEtab problem. To create a [JAXModel](https://amici.readthedocs.io/en/latest/generated/amici.jax.html#amici.jax.JAXModel) instead of a standard AMICI model, we set the `jax` parameter to `True`.\n" ] }, { @@ -52,7 +52,7 @@ "# Import the PEtab problem as a JAX-compatible AMICI model\n", "jax_model = import_petab_problem(\n", " petab_problem,\n", - " compile_=False, # do not compile regular amici model\n", + " compile_=True, # do not compile regular amici model\n", " verbose=False, # no text output\n", " jax=True, # return jax model\n", ")" @@ -977,7 +977,10 @@ "\n", "# Import the PEtab problem as a standard AMICI model\n", "amici_model = import_petab_problem(\n", - " petab_problem, compile_=True, verbose=False, jax=False\n", + " petab_problem,\n", + " compile_=False, # do not recompile\n", + " verbose=False,\n", + " jax=False, # load the amici model this time\n", ")\n", "\n", "# Configure the solver with appropriate tolerances\n", diff --git a/python/sdist/pyproject.toml b/python/sdist/pyproject.toml index c2a20fd0f2..6441ac3300 100644 --- a/python/sdist/pyproject.toml +++ b/python/sdist/pyproject.toml @@ -129,9 +129,7 @@ line-length = 79 [tool.ruff] line-length = 79 extend-include = ["*.ipynb"] -exclude = ['jax.template.py'] -extend-select = ["UP"] [tool.ruff.lint] -extend-select = ["B028"] +extend-select = ["B028", "UP"] ignore = ["E402", "F403", "F405", "E741"] From c242b15d56e83c5068677f103aa110ab7fe915d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Tue, 19 Nov 2024 15:20:29 +0000 Subject: [PATCH 80/80] fix --- python/tests/test_jax.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/tests/test_jax.py b/python/tests/test_jax.py index 1ccd388257..d124a6e1be 100644 --- a/python/tests/test_jax.py +++ b/python/tests/test_jax.py @@ -162,7 +162,7 @@ def check_fields_jax( ) fun = beartype(jax_model.simulate_condition) - for output in ["nllh", "x0", "x", "y", "res"]: + for output in ["llh", "x0", "x", "y", "res"]: oargs = (*args[:-2], diffrax.DirectAdjoint(), 2**8, output) if sensi_order == amici.SensitivityOrder.none: r_jax[output] = fun(p, *oargs)[0]