Skip to content

Commit

Permalink
feat: ✨ Switch to default value for max iterations
Browse files Browse the repository at this point in the history
  • Loading branch information
Anselmoo committed Jan 21, 2024
1 parent 2ea6bb5 commit f000094
Show file tree
Hide file tree
Showing 5 changed files with 128 additions and 30 deletions.
4 changes: 4 additions & 0 deletions spectrafit/api/report_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,10 @@ class SolverAPI(BaseModel):
default={},
description="Error bar comment if values reach initial value or boundary",
)
computional: Dict[str, Any] = Field(
...,
description="Computional information like number of function evaluations",
)


class OutputAPI(BaseModel):
Expand Down
11 changes: 5 additions & 6 deletions spectrafit/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ def model_check(self, model: str) -> None:
model (str): Model name.
Raises:
KeyError: If the model is not supported.
NotImplementedError: If the model is not implemented.
"""
if model.split("_")[0] not in self.__models__:
raise NotImplementedError(f"{model} is not supported!")
Expand Down Expand Up @@ -1532,12 +1532,11 @@ def __call__(self) -> Tuple[Minimizer, Any]:
**self.args_solver["minimizer"],
)

return (
minimizer,
minimizer.minimize(
**self.args_solver["optimizer"],
),
result = minimizer.minimize(
**self.args_solver["optimizer"],
)
self.args_solver["optimizer"]["max_nfev"] = minimizer.max_nfev
return minimizer, result

@staticmethod
def solve_local_fitting(
Expand Down
49 changes: 45 additions & 4 deletions spectrafit/plugins/notebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -595,6 +595,15 @@ def get_linear_correlation(self) -> Dict[str, Any]:
"""
return self.args_out["linear_correlation"]

@property
def get_computional(self) -> Dict[str, Any]:
"""Get the computational time.
Returns:
Dict[str, Any]: Computational time as dictionary.
"""
return self.args_out["fit_insights"]["computional"]

@property
def settings_conf_interval(self) -> Union[bool, Dict[str, Any]]:
"""Confidence interval settings.
Expand Down Expand Up @@ -705,7 +714,7 @@ def make_input_contribution(self) -> InputAPI:
global_fitting=self.settings_global_fitting,
confidence_interval=self.settings_conf_interval,
configurations=self.settings_configurations,
settings_solver_models=self.settings_solver_models.dict(
settings_solver_models=self.settings_solver_models.model_dump(
exclude_none=True
),
),
Expand All @@ -728,6 +737,7 @@ def make_solver_contribution(self) -> SolverAPI:
covariance_matrix=self.get_covariance_matrix,
variables=self.get_variables,
errorbars=self.get_errorbars,
computional=self.get_computional,
)

@property
Expand All @@ -742,15 +752,46 @@ def make_output_contribution(self) -> OutputAPI:
def __call__(self) -> Dict[str, Any]:
"""Get the complete report as dictionary.
!!! info "About the report and `recursive_exclude_none`"
The report is generated by using the `ReportAPI` class, which is a
`Pydantic`-definition of the report. The `Pydantic`-definition is
converted to a dictionary by using the `.model_dump()` option of `Pydantic`.
The `recursive_exclude_none` function is used to remove all `None` values
from the dictionary, which are hidden in the nested dictionaries.
Returns:
Dict[str, Any]: Report as dictionary by using the `.dict()` option of
pydantic. `None` is excluded.
"""
return ReportAPI(

def recursive_exclude_none(value: Dict[str, Any]) -> Dict[str, Any]:
"""Exclude `None` values from the dictionary.
Args:
value (Dict[str, Any]): Dictionary to exclude `None` values.
Returns:
Dict[str, Any]: Dictionary without `None` values.
"""
if isinstance(value, list):
return [recursive_exclude_none(v) for v in value if v is not None]
elif isinstance(value, dict):
return {
k: recursive_exclude_none(v)
for k, v in value.items()
if v is not None
}
else:
return value

report = ReportAPI(
input=self.make_input_contribution,
solver=self.make_solver_contribution,
output=self.make_output_contribution,
).dict(exclude_none=True)
).model_dump(exclude_none=True)
report = recursive_exclude_none(report)
return report


class SpectraFitNotebook(DataFramePlot, DataFrameDisplay, ExportResults):
Expand Down Expand Up @@ -1129,7 +1170,7 @@ def solver_model(
"column": list(self.df.columns),
"autopeak": self.autopeak,
**list2dict(peak_list=self.initial_model),
**self.settings_solver_models.dict(),
**self.settings_solver_models.model_dump(),
},
)(),
)()
Expand Down
92 changes: 73 additions & 19 deletions spectrafit/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,15 +156,17 @@ def __call__(self) -> Dict[Hashable, Any]:
metric_dict[fnc.__name__].append(fnc(y_true, y_pred))
except ValueError as err:
warn(
f"\n\n## WARNING ##\n{err} for function: "
f"{fnc.__name__}\n#############\n"
warn_meassage(
msg=f"Regression metric '{fnc.__name__}' could not "
f"be calculated due to: {err}"
)
)
metric_dict[fnc.__name__].append(np.nan)
return pd.DataFrame(metric_dict).T.to_dict(orient="split")


def fit_report_as_dict(
inpars: minimize, modelpars: Optional[Dict[str, Any]] = None
inpars: minimize, settings: Minimizer, modelpars: Optional[Dict[str, Any]] = None
) -> Dict[str, Dict[Any, Any]]:
"""Generate the best fit report as dictionary.
Expand All @@ -185,6 +187,9 @@ def fit_report_as_dict(
Args:
inpars (minimize): Input Parameters from a fit or the Minimizer results
returned from a fit.
settings (Minimizer): The lmfit `Minimizer`-class as a general minimizer
for curve fitting and optimization. It is required to extract the
initial settings of the fit.
modelpars (Dict[str, Any], optional): Known Model Parameters.
Defaults to None.
Expand All @@ -203,12 +208,15 @@ def fit_report_as_dict(
"errorbars": {},
"correlations": {},
"covariance_matrix": {},
"fit_insights": {},
"computional": {},
}

result, buffer, params = _extracted_gof_from_results(
result=result, buffer=buffer, params=params
)
buffer = _extracted_computional_from_results(
result=result, settings=settings, buffer=buffer
)
for name in parnames:
par = params[name]
buffer["variables"][name] = {"init_value": get_init_value(param=par)}
Expand Down Expand Up @@ -271,6 +279,34 @@ def get_init_value(
return f"As fixed value: {param.value}"


def _extracted_computional_from_results(
result: minimize, settings: Minimizer, buffer: Dict[str, Any]
) -> Dict[str, Any]:
"""Extract the computional from the results.
Args:
result (minimize): Input Parameters from a fit or the Minimizer results
returned from a fit.
settings (Minimizer): The lmfit `Minimizer`-class as a general minimizer
for curve fitting and optimization. It is required to extract the
initial settings of the fit.
buffer (Dict[str, Any]): The buffer to store the results.
Returns:
Dict[str, Any]: The buffer with updated results.
"""
buffer["computional"]["success"] = result.success
buffer["computional"]["message"] = result.message
buffer["computional"]["errorbars"] = result.errorbars
buffer["computional"]["nfev"] = result.nfev

buffer["computional"]["max_nfev"] = settings.max_nfev
buffer["computional"]["scale_covar"] = settings.scale_covar
buffer["computional"]["calc_covar"] = settings.calc_covar

return buffer


def _extracted_gof_from_results(
result: minimize, buffer: Dict[str, Any], params: Parameters
) -> Tuple[minimize, Dict[str, Any], Parameters]:
Expand Down Expand Up @@ -299,37 +335,55 @@ def _extracted_gof_from_results(
buffer["statistics"]["akaike_information"] = result.aic
buffer["statistics"]["bayesian_information"] = result.bic

buffer["fit_insights"]["success"] = result.success
buffer["fit_insights"]["message"] = result.message
buffer["fit_insights"]["errorbars"] = result.errorbars
buffer["fit_insights"]["max_nfev"] = result.max_nfev
buffer["fit_insights"]["nfev"] = result.nfev
buffer["fit_insights"]["scale_covar"] = result.scale_covar
buffer["fit_insights"]["calc_covar"] = result.calc_covar

if not result.errorbars:
warn(
"\n\n## WARNING ##\nUncertainties could "
"not be estimated\n#############\n"
)
warn(warn_meassage("Uncertainties could not be estimated"))

if result.method not in ("leastsq", "least_squares"):
warn(
f"\n\n## WARNING ##\nThe fitting method '{result.method}' does not "
"natively calculate and uncertainties cannot be estimated due to "
"be out of region!\n#############\n"
warn_meassage(
msg=f"The fitting method '{result.method}' does not "
"natively calculate and uncertainties cannot be "
"estimated due to be out of region!"
)
)

parnames_varying = [par for par in result.params if result.params[par].vary]
for name in parnames_varying:
par = params[name]
if par.init_value and np.allclose(par.value, par.init_value):
buffer["errorbars"]["at_initial_value"] = name
warn(
warn_meassage(
msg=f"The parameter '{name}' is at its initial "
"value and uncertainties cannot be estimated!"
)
)
if np.allclose(par.value, par.min) or np.allclose(par.value, par.max):
buffer["errorbars"]["at_boundary"] = name
warn(
warn_meassage(
msg=f"The parameter '{name}' is at its boundary "
"and uncertainties cannot be estimated!"
)
)

return result, buffer, params


def warn_meassage(msg: str) -> str:
"""Generate a warning message.
Args:
msg (str): The message to be printed.
Returns:
str: The warning message.
"""
top = "\n\n## WARNING " + "#" * (len(msg) - len("## WARNING ")) + "\n"
header = "\n" + "#" * len(msg) + "\n"
return top + msg + header


class PrintingResults:
"""Print the results of the fitting process."""

Expand Down
2 changes: 1 addition & 1 deletion spectrafit/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ def make_insight_report(self) -> None:
"""
self.args["fit_insights"] = fit_report_as_dict(
self.result, modelpars=self.result.params
inpars=self.result, settings=self.minimizer, modelpars=self.result.params
)
if self.args["conf_interval"]:
try:
Expand Down

0 comments on commit f000094

Please sign in to comment.