diff --git a/doc/guide.rst b/doc/guide.rst index 0ed5d761..b7341eed 100644 --- a/doc/guide.rst +++ b/doc/guide.rst @@ -22,13 +22,13 @@ The optimization class is created using the following call: .. code-block:: python - optProb = Optimization("name", objFun) + optProb = Optimization("name", objconFun) -The general template of the objective function is as follows: +The general template of the objective and constraint function is as follows: .. code-block:: python - def obj_fun(xdict): + def objconFun(xdict): funcs = {} funcs["obj_name"] = function(xdict) funcs["con_name"] = function(xdict) @@ -196,17 +196,30 @@ This argument is a dictionary, and the keys must match the design variable sets Essentially what we have done is specified the which blocks of the constraint rows are non-zero, and provided the sparsity structure of ones that are sparse. -For linear constraints the values in ``jac`` are meaningful: -they must be the actual linear constraint Jacobian values (which do not change). -For non-linear constraints, only the sparsity structure (i.e. which entries are nonzero) is significant. -The values themselves will be determined by a call to the ``sens()`` function. - -Also note, that the ``wrt`` and ``jac`` keyword arguments are only supported when user-supplied sensitivity is used. +Note that the ``wrt`` and ``jac`` keyword arguments are only supported when user-supplied sensitivity is used. If automatic gradients from pyOptSparse are used, the constraint Jacobian will necessarily be dense. .. note:: Currently, only the optimizers SNOPT and IPOPT support sparse Jacobians. +Linear Constraints +~~~~~~~~~~~~~~~~~~ +Linear constraints in pyOptSparse are defined exclusively by ``jac``, ``lower``, and ``upper`` entries of the ``addConGroup`` method. +For linear constraint :math:`g_L \leq Ax + b \leq g_U`, the constraint definition would look like: + +.. code-block:: python + + optProb.addConGroup("con", num_cons, linear=True, wrt=["xvars"], jac={"xvars": A}, lower=gL - b, upper=gU - b) + +Users should not provide the linear constraint values (i.e., :math:`g = Ax + b`) in a user-defined objective/constraint function. +pyOptSparse will raise an error if you do so. + +For linear constraints, the values in ``jac`` are meaningful: +they must be the actual linear constraint Jacobian values (which do not change). +For non-linear constraints, only the sparsity structure (i.e. which entries are nonzero) is significant. +The values themselves will be determined by a call to the ``sens()`` function. + + Objectives ++++++++++ diff --git a/pyoptsparse/pyOpt_optimizer.py b/pyoptsparse/pyOpt_optimizer.py index 70ba70c7..2ae6dca0 100644 --- a/pyoptsparse/pyOpt_optimizer.py +++ b/pyoptsparse/pyOpt_optimizer.py @@ -367,6 +367,10 @@ def _masterFunc2(self, x, evaluate, writeHist=True): self.userObjTime += time.time() - timeA self.userObjCalls += 1 + # Make sure the user-defined function does *not* return linear constraint values + if self.callCounter == 0: + self._checkLinearConstraints(funcs) + # Discard zero imaginary components in funcs for key, val in funcs.items(): funcs[key] = np.real(val) @@ -417,6 +421,10 @@ def _masterFunc2(self, x, evaluate, writeHist=True): self.userObjTime += time.time() - timeA self.userObjCalls += 1 + # Make sure the user-defined function does *not* return linear constraint values + if self.callCounter == 0: + self._checkLinearConstraints(funcs) + # Discard zero imaginary components in funcs for key, val in funcs.items(): funcs[key] = np.real(val) @@ -867,6 +875,18 @@ def _on_setOption(self, name, value): """ pass + def _checkLinearConstraints(self, funcs): + """ + Makes sure that the user-defined obj/con function does not compute the linear constraint values + because the linear constraints are exclusively defined by jac and bounds in addConGroup. + """ + for conName in self.optProb.constraints: + if self.optProb.constraints[conName].linear and conName in funcs: + raise Error( + "Value for linear constraint returned from user obj function. Linear constraints " + + "are evaluated internally and should not be returned from the user's function." + ) + def setOption(self, name, value=None): """ Generic routine for all option setting. The routine does diff --git a/tests/test_lincon_error.py b/tests/test_lincon_error.py new file mode 100644 index 00000000..7af5723b --- /dev/null +++ b/tests/test_lincon_error.py @@ -0,0 +1,47 @@ +""" +Tests that pyOptSparse raises an error when a user-defined obj/con function returns a linear constraint value +(which should not because linear constraint is defined exclusively by jac and bounds) +""" + +# Standard Python modules +import unittest + +# First party modules +from pyoptsparse import SLSQP, Optimization +from pyoptsparse.pyOpt_error import Error + + +def objfunc(xdict): + """Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3""" + x = xdict["x"] + funcs = {} + + funcs["obj"] = x**2 + funcs["con"] = x - 1 # falsely return a linear constraint value + + fail = False + return funcs, fail + + +class TestLinearConstraintCheck(unittest.TestCase): + def test(self): + # define an optimization problem with a linear constraint + optProb = Optimization("test", objfunc) + optProb.addVarGroup("x", 1, value=1) + optProb.addObj("obj") + optProb.addConGroup("con", 1, lower=1.0, linear=True, wrt=["x"], jac={"x": [1.0]}) + + opt = SLSQP() + with self.assertRaises(Error) as context: + opt(optProb, sens="FD") + + # check if we get the expected error message + err_msg = ( + "Value for linear constraint returned from user obj function. Linear constraints " + + "are evaluated internally and should not be returned from the user's function." + ) + self.assertEqual(err_msg, str(context.exception)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_snopt_bugfix.py b/tests/test_snopt_bugfix.py index d3c8b598..12ffe28a 100644 --- a/tests/test_snopt_bugfix.py +++ b/tests/test_snopt_bugfix.py @@ -21,35 +21,6 @@ def objfunc(xdict): funcs = {} funcs["obj"] = (x - 3.0) ** 2 + x * y + (y + 4.0) ** 2 - 3.0 - conval = -x + y - funcs["con"] = conval - - fail = False - return funcs, fail - - -def objfunc_no_con(xdict): - """Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3""" - x = xdict["x"] - y = xdict["y"] - funcs = {} - - funcs["obj"] = (x - 3.0) ** 2 + x * y + (y + 4.0) ** 2 - 3.0 - - fail = False - return funcs, fail - - -def objfunc_2con(xdict): - """Evaluates the equation f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3""" - x = xdict["x"] - y = xdict["y"] - funcs = {} - - funcs["obj"] = (x - 3.0) ** 2 + x * y + (y + 4.0) ** 2 - 3.0 - conval = -x + y - funcs["con"] = conval * np.ones(2) - funcs["con2"] = (conval + 1) * np.ones(3) fail = False return funcs, fail @@ -115,7 +86,7 @@ def test_opt(self): def test_opt_bug1(self): # Due to a new feature, there is a TypeError when you optimize a model without a constraint. - optProb = Optimization("Paraboloid", objfunc_no_con) + optProb = Optimization("Paraboloid", objfunc) # Design Variables optProb.addVarGroup("x", 1, varType="c", lower=-50.0, upper=50.0, value=0.0) @@ -141,7 +112,7 @@ def test_opt_bug1(self): def test_opt_bug_print_2con(self): # Optimization Object - optProb = Optimization("Paraboloid", objfunc_2con) + optProb = Optimization("Paraboloid", objfunc) # Design Variables optProb.addVarGroup("x", 1, varType="c", lower=-50.0, upper=50.0, value=0.0) diff --git a/tests/test_user_termination.py b/tests/test_user_termination.py index 9aa4a980..017255f3 100644 --- a/tests/test_user_termination.py +++ b/tests/test_user_termination.py @@ -30,8 +30,6 @@ def objfunc(self, xdict): funcs = {} funcs["obj"] = (x - 3.0) ** 2 + x * y + (y + 4.0) ** 2 - 3.0 - conval = -x + y - funcs["con"] = conval if self.obj_count > self.max_obj: fail = 2