Skip to content

Commit

Permalink
Merge branch 'main' into update-logging
Browse files Browse the repository at this point in the history
  • Loading branch information
brynpickering committed Oct 23, 2023
2 parents 8341233 + 95ce22d commit 68051c1
Show file tree
Hide file tree
Showing 12 changed files with 612 additions and 426 deletions.
624 changes: 319 additions & 305 deletions doc/_static/math.rst

Large diffs are not rendered by default.

50 changes: 27 additions & 23 deletions doc/_static/math_storage_inter_cluster.rst

Large diffs are not rendered by default.

8 changes: 0 additions & 8 deletions doc/api/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,6 @@ Model class
.. autoclass:: calliope.Model
:members:

.. _api_backend_interface:

Optimisation backend interface
==============================

.. automodule:: calliope.backend.backends
:members: BackendModel

.. _api_time_masks:

Time series
Expand Down
11 changes: 8 additions & 3 deletions src/calliope/backend/backend_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def _add_component(
if component_dict is None:
component_dict = self.inputs.math[component_type][name]

if component_dict.get("active", False):
if break_early and component_dict.get("active", False):
self.log(
component_type, name, "Component deactivated and therefore not built."
)
Expand All @@ -223,7 +223,10 @@ def _add_component(
)

top_level_where = parsed_component.generate_top_level_where_array(
self.inputs, align_to_foreach_sets=False, break_early=break_early
self.inputs,
self._dataset,
align_to_foreach_sets=False,
break_early=break_early,
)
if break_early and not top_level_where.any():
return parsed_component
Expand All @@ -242,7 +245,9 @@ def _add_component(
.astype(np.dtype("O"))
)
for element in equations:
where = element.evaluate_where(self.inputs, initial_where=top_level_where)
where = element.evaluate_where(
self.inputs, self._dataset, initial_where=top_level_where
)
if break_early and not where.any():
continue

Expand Down
5 changes: 3 additions & 2 deletions src/calliope/backend/expression_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,17 +597,18 @@ def eval(
"""
references.add(self.name)
evaluated: Optional[Union[dict, xr.DataArray, str]]
as_latex = eval_kwargs.get("as_latex", False)
if as_dict:
evaluated = {"param_or_var_name": self.name}
elif backend_interface is not None and backend_dataset is not None:
if as_values:
if as_values and not as_latex:
evaluated = backend_interface.get_parameter(
self.name, as_backend_objs=False
)
else:
evaluated = backend_dataset[self.name]

if eval_kwargs.get("as_latex", False):
if as_latex:
evaluated = self.as_latex(evaluated)
else:
evaluated = None
Expand Down
17 changes: 13 additions & 4 deletions src/calliope/backend/latex_backend_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,15 +347,22 @@ def add_variable(
name: str,
variable_dict: Optional[parsing.UnparsedVariableDict] = None,
) -> None:
domain_dict = {"real": r"\R", "integer": r"\Z"}
domain_dict = {"real": r"\mathbb{R}\;", "integer": r"\mathbb{Z}\;"}

self.valid_math_element_names.add(name)

def _variable_setter(where: xr.DataArray) -> xr.DataArray:
return where.where(where)

if variable_dict is None:
variable_dict = self.inputs.math["variables"][name]

parsed_component = self._add_component(
name, variable_dict, lambda where: where, "variables", break_early=False
name,
variable_dict,
_variable_setter,
"variables",
break_early=False,
)
where_array = self.variables[name]

Expand Down Expand Up @@ -445,7 +452,7 @@ def generate_math_doc(self, format: _ALLOWED_MATH_FILE_FORMATS = "tex") -> str:

def _add_latex_strings(self, where, element, equation_strings):
expr = element.evaluate_expression(self.inputs, self, as_latex=True)
where_latex = element.evaluate_where(self.inputs, as_latex=True)
where_latex = element.evaluate_where(self.inputs, self._dataset, as_latex=True)

if self.include == "all" or (self.include == "valid" and where.any()):
equation_strings.append({"expression": expr, "where": where_latex})
Expand All @@ -460,7 +467,9 @@ def _generate_math_string(
sets: Optional[list[str]] = None,
) -> None:
if parsed_component is not None:
where = parsed_component.evaluate_where(self.inputs, as_latex=True)
where = parsed_component.evaluate_where(
self.inputs, self._dataset, as_latex=True
)
sets = parsed_component.sets

if self.include == "all" or (
Expand Down
9 changes: 8 additions & 1 deletion src/calliope/backend/parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,7 @@ def add_expression_group_combination(
def evaluate_where( # noqa: F811
self,
model_data: xr.Dataset,
backend_dataset: Optional[xr.Dataset],
as_latex: Literal[False] = False,
initial_where: xr.DataArray = TRUE_ARRAY,
) -> xr.DataArray:
Expand All @@ -258,13 +259,15 @@ def evaluate_where( # noqa: F811
def evaluate_where( # noqa: F811
self,
model_data: xr.Dataset,
backend_dataset: Optional[xr.Dataset],
as_latex: Literal[True],
) -> str:
"Expecting string if requesting latex string"

def evaluate_where( # noqa: F811
self,
model_data: xr.Dataset,
backend_dataset: Optional[xr.Dataset] = None,
as_latex: bool = False,
initial_where: xr.DataArray = TRUE_ARRAY,
) -> Union[xr.DataArray, str]:
Expand All @@ -286,6 +289,7 @@ def evaluate_where( # noqa: F811
helper_functions=helper_functions._registry["where"],
as_latex=as_latex,
model_data=model_data,
backend_dataset=backend_dataset,
)
for where in self.where
]
Expand Down Expand Up @@ -704,6 +708,7 @@ def combine_exists_and_foreach(self, model_data: xr.Dataset) -> xr.DataArray:
def generate_top_level_where_array(
self,
model_data: xr.Dataset,
backend_dataset: Optional[xr.Dataset] = None,
align_to_foreach_sets: bool = True,
break_early: bool = True,
) -> xr.DataArray:
Expand Down Expand Up @@ -733,7 +738,9 @@ def generate_top_level_where_array(
return foreach_where

self.parse_top_level_where()
where = self.evaluate_where(model_data, initial_where=foreach_where)
where = self.evaluate_where(
model_data, backend_dataset, initial_where=foreach_where
)
if break_early and not where.any():
return where

Expand Down
60 changes: 49 additions & 11 deletions src/calliope/backend/where_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from __future__ import annotations

import operator
from typing import Any, Union
from typing import Any, Optional, Union

import numpy as np
import pyparsing as pp
Expand Down Expand Up @@ -162,12 +162,17 @@ def __repr__(self):
"Return string representation of the parsed grammar"
return f"DATA_VAR:{self.data_var}"

def as_latex(self, model_data: xr.Dataset, apply_where: bool = True) -> str:
def as_latex(
self, data: xr.Dataset, data_var_type: str, apply_where: bool = True
) -> str:
"""stringify conditional for use in a LaTex math formula"""
# TODO: add dims from a YAML schema of params that includes default dims
data_var_string = rf"\textit{{{self.data_var}}}"
if data_var_type == "parameters":
data_var_string = rf"\textit{{{self.data_var}}}"
else:
data_var_string = rf"\textbf{{{self.data_var}}}"

var = model_data.get(self.data_var, None)
var = data.get(self.data_var, None)
if var is not None and var.shape:
data_var_string += (
rf"_\text{{{','.join(str(i).removesuffix('s') for i in var.dims)}}}"
Expand All @@ -176,18 +181,27 @@ def as_latex(self, model_data: xr.Dataset, apply_where: bool = True) -> str:
data_var_string = rf"\exists ({data_var_string})"
return data_var_string

def _data_var_exists(self, model_data: xr.Dataset) -> xr.DataArray:
def _data_var_exists(
self, model_data: xr.Dataset, data_var_type: str
) -> xr.DataArray:
"mask by setting all (NaN | INF/-INF) to False, otherwise True"
var = model_data.get(self.data_var, xr.DataArray(np.nan))
return var.notnull() & (var != np.inf) & (var != -np.inf)
if data_var_type == "parameters":
return var.notnull() & (var != np.inf) & (var != -np.inf)
else:
return var.notnull()

def _data_var_with_default(self, model_data: xr.Dataset) -> xr.DataArray:
"Access data var and fill with default values. Return default value as an array if var does not exist"
default = model_data.attrs["defaults"].get(self.data_var)
return model_data.get(self.data_var, xr.DataArray(default)).fillna(default)

def eval(
self, model_data: xr.Dataset, apply_where: bool = True, **kwargs
self,
model_data: xr.Dataset,
backend_dataset: Optional[xr.Dataset] = None,
apply_where: bool = True,
**kwargs,
) -> Union[str, np.bool_, xr.DataArray]:
"""
Get parsed model data variable from the Calliope model dataset.
Expand All @@ -204,16 +218,40 @@ def eval(
Union[np.bool_, xr.DataArray]:
False if data variable not in model data, array otherwise.
"""
if backend_dataset is None:
backend_dataset = xr.Dataset()
if self.data_var in backend_dataset.data_vars.keys():
data_var_type = backend_dataset[self.data_var].attrs["obj_type"]
else:
data_var_type = "parameters"

if data_var_type not in ["parameters", "global_expressions", "variables"]:
raise TypeError(
f"Cannot check values in {data_var_type.removesuffix('s')} arrays in math `where` strings. "
f"Received {data_var_type.removesuffix('s')}: `{self.data_var}`."
)
if data_var_type != "parameters" and not apply_where:
raise TypeError(
f"Can only check for existence of values in {data_var_type.removesuffix('s')} arrays in math `where` strings. "
"These arrays cannot be used for comparison with expected values. "
f"Received `{self.data_var}`."
)

if data_var_type == "parameters":
source_array = model_data
else:
source_array = backend_dataset

if kwargs.get("as_latex", False):
return self.as_latex(model_data, apply_where)
return self.as_latex(source_array, data_var_type, apply_where)

if self.data_var not in model_data:
if data_var_type == "parameters" and self.data_var not in model_data:
return np.False_

if apply_where:
return self._data_var_exists(model_data)
return self._data_var_exists(source_array, data_var_type)
else:
return self._data_var_with_default(model_data)
return self._data_var_with_default(source_array)


class ComparisonParser(expression_parser.EvalComparisonOp):
Expand Down
Loading

0 comments on commit 68051c1

Please sign in to comment.