Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix _masterFunc2 fail flag caching and add fail flag identification to IPOPT #407

Merged
merged 15 commits into from
Jun 23, 2024
Merged
2 changes: 1 addition & 1 deletion .github/environment.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
dependencies:
# build
- python >=3.9
- numpy >=1.21
- numpy >=1.21,<2
- ipopt
- swig
- meson >=1.3.2
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.11.2"
__version__ = "2.11.3"

from .pyOpt_history import History
from .pyOpt_variable import Variable
Expand Down
18 changes: 13 additions & 5 deletions pyoptsparse/pyIPOPT/pyIPOPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@

if len(optProb.constraints) == 0:
# If the user *actually* has an unconstrained problem,
# snopt sort of chokes with that....it has to have at
# IPOPT sort of chokes with that....it has to have at
# least one constraint. So we will add one
# automatically here:
self.unconstrained = True
Expand Down Expand Up @@ -217,19 +217,25 @@
# Define the 4 call back functions that ipopt needs:
def eval_f(x, user_data=None):
fobj, fail = self._masterFunc(x, ["fobj"])
if fail == 2:
if fail == 1:
ewu63 marked this conversation as resolved.
Show resolved Hide resolved
fobj = np.array(np.NaN)

Check warning on line 221 in pyoptsparse/pyIPOPT/pyIPOPT.py

View check run for this annotation

Codecov / codecov/patch

pyoptsparse/pyIPOPT/pyIPOPT.py#L221

Added line #L221 was not covered by tests
elif fail == 2:
ewu63 marked this conversation as resolved.
Show resolved Hide resolved
self.userRequestedTermination = True
return fobj

def eval_g(x, user_data=None):
fcon, fail = self._masterFunc(x, ["fcon"])
if fail == 2:
if fail == 1:
fcon = np.array(np.NaN)

Check warning on line 229 in pyoptsparse/pyIPOPT/pyIPOPT.py

View check run for this annotation

Codecov / codecov/patch

pyoptsparse/pyIPOPT/pyIPOPT.py#L229

Added line #L229 was not covered by tests
elif fail == 2:
self.userRequestedTermination = True
return fcon.copy()

def eval_grad_f(x, user_data=None):
gobj, fail = self._masterFunc(x, ["gobj"])
if fail == 2:
if fail == 1:
gobj = np.array(np.NaN)

Check warning on line 237 in pyoptsparse/pyIPOPT/pyIPOPT.py

View check run for this annotation

Codecov / codecov/patch

pyoptsparse/pyIPOPT/pyIPOPT.py#L237

Added line #L237 was not covered by tests
elif fail == 2:
self.userRequestedTermination = True
return gobj.copy()

Expand All @@ -238,7 +244,9 @@
return copy.deepcopy(matStruct)
else:
gcon, fail = self._masterFunc(x, ["gcon"])
if fail == 2:
if fail == 1:
gcon = np.array(np.NaN)

Check warning on line 248 in pyoptsparse/pyIPOPT/pyIPOPT.py

View check run for this annotation

Codecov / codecov/patch

pyoptsparse/pyIPOPT/pyIPOPT.py#L248

Added line #L248 was not covered by tests
elif fail == 2:
self.userRequestedTermination = True
return gcon.copy()

Expand Down
19 changes: 15 additions & 4 deletions pyoptsparse/pyOpt_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init__(
self.storeSens: bool = True

# Cache storage
self.cache: Dict[str, Any] = {"x": None, "fobj": None, "fcon": None, "gobj": None, "gcon": None}
self.cache: Dict[str, Any] = {"x": None, "fobj": None, "fcon": None, "gobj": None, "gcon": None, "fail": None}

# A second-level cache for optimizers that require callbacks
# for each constraint. (eg. PSQP etc)
Expand Down Expand Up @@ -388,6 +388,7 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# fobj is now in cache
returns.append(self.cache["fobj"])
Expand Down Expand Up @@ -437,6 +438,7 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# fcon is now in cache
returns.append(self.cache["fcon"])
Expand All @@ -447,10 +449,13 @@ def _masterFunc2(self, x, evaluate, writeHist=True):
# The previous evaluated point is different than the point requested for the derivative
# OR this is the first call to _masterFunc2 in a hot started optimization
# Recursively call the routine with ['fobj', 'fcon']
self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
_, _, fail = self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
# We *don't* count that extra call, since that will
# screw up the numbering...so we subtract the last call.
self.callCounter -= 1
# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail
# Now, the point has been evaluated correctly so we
# determine if we have to run the sens calc:

Expand Down Expand Up @@ -491,6 +496,7 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# gobj is now in the cache
returns.append(self.cache["gobj"])
Expand All @@ -502,10 +508,13 @@ def _masterFunc2(self, x, evaluate, writeHist=True):
# The previous evaluated point is different than the point requested for the derivative
# OR this is the first call to _masterFunc2 in a hot started optimization
# Recursively call the routine with ['fobj', 'fcon']
self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
_, _, fail = self._masterFunc2(x, ["fobj", "fcon"], writeHist=False)
# We *don't* count that extra call, since that will
# screw up the numbering...so we subtract the last call.
self.callCounter -= 1
# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail
# Now, the point has been evaluated correctly so we
# determine if we have to run the sens calc:
if self.cache["gcon"] is None:
Expand Down Expand Up @@ -544,13 +553,15 @@ def _masterFunc2(self, x, evaluate, writeHist=True):

# Update fail flag
masterFail = max(masterFail, fail)
self.cache["fail"] = masterFail

# gcon is now in the cache
returns.append(self.cache["gcon"])
if self.storeSens:
hist["funcsSens"] = self.cache["funcsSens"]

# Put the fail flag in the history:
# Update the fail flag with any cached failure and put the fail flag in the history
masterFail = max(self.cache["fail"], masterFail)
ewu63 marked this conversation as resolved.
Show resolved Hide resolved
hist["fail"] = masterFail

# Put the iteration counter in the history
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def copy_shared_libraries():
keywords="optimization",
install_requires=[
"sqlitedict>=1.6",
"numpy>=1.21",
"numpy>=1.21,<2",
"scipy>=1.7",
"mdolab-baseclasses>=1.3.1",
],
Expand Down
Loading
Loading