Skip to content

Commit

Permalink
Fix _masterFunc2 fail flag caching and add fail flag identification…
Browse files Browse the repository at this point in the history
… to IPOPT (#407)

* Cache fail flag

* Return NaN from IPOPT callback functions if evaluation failure

* Fixed typo in copy pasted code

* Fail flag tests with _masterFunc

* Undo accidental change of ipopt linear solver

* Simplify boolean evaluation

* Formatting

* A bit more test coverage

* A couple more unit tests to thoroughly test failure flag caching

* More thorough test parameterization

* Bounce the interpretation

* Upper (exclusive) bound of NumPy v2

* Last last unit test

* Formatting

* Upper numpy bound in conda environemnt
  • Loading branch information
eytanadler authored Jun 23, 2024
1 parent da0077a commit 7376d71
Show file tree
Hide file tree
Showing 6 changed files with 303 additions and 12 deletions.
2 changes: 1 addition & 1 deletion .github/environment.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
dependencies:
# build
- python >=3.9
- numpy >=1.21
- numpy >=1.21,<2
- ipopt
- swig
- meson >=1.3.2
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.11.2"
__version__ = "2.11.3"

from .pyOpt_history import History
from .pyOpt_variable import Variable
Expand Down
18 changes: 13 additions & 5 deletions pyoptsparse/pyIPOPT/pyIPOPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def __call__(

if len(optProb.constraints) == 0:
# If the user *actually* has an unconstrained problem,
# snopt sort of chokes with that....it has to have at
# IPOPT sort of chokes with that....it has to have at
# least one constraint. So we will add one
# automatically here:
self.unconstrained = True
Expand Down Expand Up @@ -217,19 +217,25 @@ def __call__(
# Define the 4 call back functions that ipopt needs:
def eval_f(x, user_data=None):
fobj, fail = self._masterFunc(x, ["fobj"])
if fail == 2:
if fail == 1:
fobj = np.array(np.NaN)
elif fail == 2:
self.userRequestedTermination = True
return fobj

def eval_g(x, user_data=None):
fcon, fail = self._masterFunc(x, ["fcon"])
if fail == 2:
if fail == 1:
fcon = np.array(np.NaN)
elif fail == 2:
self.userRequestedTermination = True
return fcon.copy()

def eval_grad_f(x, user_data=None):
gobj, fail = self._masterFunc(x, ["gobj"])
if fail == 2:
if fail == 1:
gobj = np.array(np.NaN)
elif fail == 2:
self.userRequestedTermination = True
return gobj.copy()

Expand All @@ -238,7 +244,9 @@ def eval_jac_g(x, flag, user_data=None):
return copy.deepcopy(matStruct)
else:
gcon, fail = self._masterFunc(x, ["gcon"])
if fail == 2:
if fail == 1:
gcon = np.array(np.NaN)
elif fail == 2:
self.userRequestedTermination = True
return gcon.copy()

Expand Down
19 changes: 15 additions & 4 deletions pyoptsparse/pyOpt_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init__(
self.storeSens: bool = True

# Cache storage
self.cache: Dict[str, Any] = {"x": None, "fobj": None, "fcon": None, "gobj": None, "gcon": None}
self.cache: Dict[str, Any] = {"x": None, "fobj": None, "fcon": None, "gobj": None, "gcon": None, "fail": None}

# A second-level cache for optimizers that require callbacks
# for each constraint. (eg. PSQP etc)
Expand Down Expand Up @@ -388,6 +388,7 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# fobj is now in cache
returns.append(self.cache["fobj"])
Expand Down Expand Up @@ -437,6 +438,7 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# fcon is now in cache
returns.append(self.cache["fcon"])
Expand All @@ -447,10 +449,13 @@ def _masterFunc2(self, x, evaluate, writeHist=True):
# The previous evaluated point is different than the point requested for the derivative
# OR this is the first call to _masterFunc2 in a hot started optimization
# Recursively call the routine with ['fobj', 'fcon']
self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
_, _, fail = self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
# We *don't* count that extra call, since that will
# screw up the numbering...so we subtract the last call.
self.callCounter -= 1
# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail
# Now, the point has been evaluated correctly so we
# determine if we have to run the sens calc:

Expand Down Expand Up @@ -491,6 +496,7 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# gobj is now in the cache
returns.append(self.cache["gobj"])
Expand All @@ -502,10 +508,13 @@ def _masterFunc2(self, x, evaluate, writeHist=True):
# The previous evaluated point is different than the point requested for the derivative
# OR this is the first call to _masterFunc2 in a hot started optimization
# Recursively call the routine with ['fobj', 'fcon']
self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
_, _, fail = self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
# We *don't* count that extra call, since that will
# screw up the numbering...so we subtract the last call.
self.callCounter -= 1
# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail
# Now, the point has been evaluated correctly so we
# determine if we have to run the sens calc:
if self.cache["gcon"] is None:
Expand Down Expand Up @@ -544,13 +553,15 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# gcon is now in the cache
returns.append(self.cache["gcon"])
if self.storeSens:
hist["funcsSens"] = self.cache["funcsSens"]

# Put the fail flag in the history:
# Update the fail flag with any cached failure and put the fail flag in the history
masterFail = max(self.cache["fail"], masterFail)
hist["fail"] = masterFail

# Put the iteration counter in the history
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def copy_shared_libraries():
keywords="optimization",
install_requires=[
"sqlitedict>=1.6",
"numpy>=1.21",
"numpy>=1.21,<2",
"scipy>=1.7",
"mdolab-baseclasses>=1.3.1",
],
Expand Down
Loading

0 comments on commit 7376d71

Please sign in to comment.