Skip to content

Infinite Gradient Handling #582

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
Open
37 changes: 36 additions & 1 deletion src/optimagic/optimization/internal_optimization_problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,10 @@
from optimagic.batch_evaluators import process_batch_evaluator
from optimagic.differentiation.derivatives import first_derivative
from optimagic.differentiation.numdiff_options import NumdiffOptions
from optimagic.exceptions import UserFunctionRuntimeError, get_traceback
from optimagic.exceptions import (
UserFunctionRuntimeError,
get_traceback,
)
Comment on lines +14 to +17
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
from optimagic.exceptions import (
UserFunctionRuntimeError,
get_traceback,
)
from optimagic.exceptions import UserFunctionRuntimeError, get_traceback

There is no change here anymore.

from optimagic.logging.logger import LogStore
from optimagic.logging.types import IterationState
from optimagic.optimization.fun_value import (
Expand Down Expand Up @@ -471,6 +474,7 @@ def _pure_evaluate_jac(
out_jac = _process_jac_value(
value=jac_value, direction=self._direction, converter=self._converter, x=x
)
self._assert_finite_jac(out_jac, jac_value, params)

stop_time = time.perf_counter()

Expand Down Expand Up @@ -508,6 +512,7 @@ def func(x: NDArray[np.float64]) -> SpecificFunctionValue:
p = self._converter.params_from_internal(x)
return self._fun(p)

params = self._converter.params_from_internal(x)
try:
numdiff_res = first_derivative(
func,
Expand Down Expand Up @@ -543,6 +548,8 @@ def func(x: NDArray[np.float64]) -> SpecificFunctionValue:
warnings.warn(msg)
fun_value, jac_value = self._error_penalty_func(x)

self._assert_finite_jac(jac_value, jac_value, params)

algo_fun_value, hist_fun_value = _process_fun_value(
value=fun_value, # type: ignore
solver_type=self._solver_type,
Expand Down Expand Up @@ -682,6 +689,8 @@ def _pure_evaluate_fun_and_jac(
if self._direction == Direction.MAXIMIZE:
out_jac = -out_jac

self._assert_finite_jac(out_jac, jac_value, params)

stop_time = time.perf_counter()

hist_entry = HistoryEntry(
Expand All @@ -704,6 +713,32 @@ def _pure_evaluate_fun_and_jac(

return (algo_fun_value, out_jac), hist_entry, log_entry

def _assert_finite_jac(
self, out_jac: NDArray[np.float64], jac_value: PyTree, params: PyTree
) -> None:
"""Check for infinite and NaN values in the jacobian and raise an error if
found.

Args:
out_jac: internal processed jacobian to check for infinities.
jac_value: original jacobian value as returned by the user function,
included in error messages for debugging.
params: user-facing parameter representation at evaluation point.

Raises:
UserFunctionRuntimeError: If any infinite values are found in the jacobian.

"""
if not np.all(np.isfinite(out_jac)):
msg = (
"The optimization received Jacobian containing infinite "
"or NaN values.\nCheck your objective function or its "
"jacobian, or try a different optimizer.\n"
f"Parameters at evaluation point: {params}\n"
f"Jacobian values: {jac_value}"
)
raise UserFunctionRuntimeError(msg)


def _process_fun_value(
value: SpecificFunctionValue,
Expand Down
1 change: 0 additions & 1 deletion src/optimagic/parameters/space_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,6 @@ def get_space_converter(
soft_lower_bounds=_soft_lower,
soft_upper_bounds=_soft_upper,
)

return converter, params


Expand Down
Loading