Skip to content

Commit cc8b880

Browse files
kanekoshmarcomanganoeirikurj
authored
IPOPT wrapper improvement to get more iteration information (#459)
* save IPOPT iteration histories * optional parameters * introduce major iteration flag to IPOPT * docs * test and example * improve error handling * check if IPOPT>=3.14 * remove unnecessary get_current_iterate * isort and pre-commit fixes * rename iter variables to follow IPOPT * fix variable names in test * rename variables in hs015 plotting example * drop ipopt 3.13 support * patch version bump * trigger CI --------- Co-authored-by: Marco Mangano <36549388+marcomangano@users.noreply.github.com> Co-authored-by: Eirikur Jonsson <36180221+eirikurj@users.noreply.github.com>
1 parent 8706115 commit cc8b880

8 files changed

Lines changed: 124 additions & 12 deletions

File tree

doc/api/history.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,8 @@ In this case, the history file would have the following layout::
5858
The main optimization history is indexed via call counters, in this example ``0`` and ``1``.
5959
Note that they do not match the major/minor iterations of a given optimizer, since gradient evaluations are stored separate from the function evaluation.
6060

61-
For SNOPT, a number of other values can be requested and stored in each major iteration, such as the feasibility and optimality from the SNOPT print out file.
61+
For SNOPT and IPOPT, a number of other values can be requested and stored in each major iteration, such as the feasibility and optimality.
62+
See SNOPT and IPOPT documentation pages for more details.
6263

6364

6465
API

doc/optimizers/IPOPT_options.yaml

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,25 @@ linear_solver:
1010
desc: The linear solver used.
1111
sb:
1212
desc: This is an undocumented option which suppresses the IPOPT header from being printed to screen every time.
13+
save_major_iteration_variables:
14+
desc: |
15+
This option is unique to the Python wrapper, and takes a list of values which can be saved at each major iteration to the History file.
16+
The possible values are
17+
18+
- ``alg_mod``: algorithm mode (0 for regular, 1 for restoration)
19+
- ``d_norm``: infinity norm of the primal step
20+
- ``regularization_size``: regularization term for the Hessian of the Lagrangian
21+
- ``ls_trials``: number of backtracking line search iterations
22+
- ``g_violation``: vector of constraint violations
23+
- ``grad_lag_x``: gradient of Lagrangian
24+
25+
In addition, a set of default parameters are saved to the history file and cannot be changed. These are
26+
27+
- ``inf_pr``: primal infeasibility
28+
- ``inf_du``: dual infeasibility (optimality measure)
29+
- ``mu``: barrier parameter
30+
- ``alpha_pr``: step size for primal variables
31+
- ``alpha_du``: step size for dual variables
32+
33+
pyOptSparse uses the same parameter names as `IPOPT <https://coin-or.github.io/Ipopt/OUTPUT.html>`_ and `cyipopt <https://cyipopt.readthedocs.io/en/stable/reference.html>`_.
34+
Detailed descriptions of these parameter can be found in their documentations.

examples/hs015VarPlot.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,4 +74,32 @@
7474
plt.xlabel("x1")
7575
plt.ylabel("x2")
7676
plt.title("Simple optimizer comparison")
77+
78+
# Plot optimality and feasibility history for SNOPT and IPOPT
79+
list_opt_with_optimality = [opt for opt in db.keys() if opt in ["ipopt", "snopt"]]
80+
if len(list_opt_with_optimality) > 0:
81+
fig, axs = plt.subplots(2, 1)
82+
83+
for opt in list_opt_with_optimality:
84+
# get iteration count, optimality, and feasibility.
85+
# SNOPT and IPOPT uses different parameter names for optimality and feasibility.
86+
if opt == "ipopt":
87+
optimality_name = "inf_du"
88+
feasibility_name = "inf_pr"
89+
elif opt == "snopt":
90+
optimality_name = "optimality"
91+
feasibility_name = "feasibility"
92+
93+
hist = db[opt].getValues(names=["iter", optimality_name, feasibility_name])
94+
axs[0].plot(hist["iter"], hist[optimality_name], "o-", label=opt)
95+
axs[1].plot(hist["iter"], hist[feasibility_name], "o-", label=opt)
96+
97+
axs[0].set_yscale("log")
98+
axs[1].set_yscale("log")
99+
axs[0].legend()
100+
axs[0].set_ylabel("Optimality")
101+
axs[0].set_xticklabels([])
102+
axs[1].set_ylabel("Feasibility")
103+
axs[1].set_xlabel("Iteration")
104+
77105
plt.show()

pyoptsparse/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "2.14.3"
1+
__version__ = "2.14.4"
22

33
from .pyOpt_history import History
44
from .pyOpt_variable import Variable

pyoptsparse/pyIPOPT/pyIPOPT.py

Lines changed: 51 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,9 @@ def __init__(self, raiseError=True, options={}):
4747
# IPOPT needs Jacobians in coo format
4848
self.jacType = "coo"
4949

50+
# List of pyIPOPT-specific options. We remove these from the list of options so these don't go into cyipopt.
51+
self.pythonOptions = ["save_major_iteration_variables"]
52+
5053
@staticmethod
5154
def _getInforms():
5255
informs = {
@@ -81,6 +84,7 @@ def _getDefaultOptions():
8184
"print_user_options": [str, "yes"],
8285
"output_file": [str, "IPOPT.out"],
8386
"linear_solver": [str, "mumps"],
87+
"save_major_iteration_variables": [list, []],
8488
}
8589
return defOpts
8690

@@ -203,7 +207,7 @@ def __call__(
203207
jac["coo"][ICOL].copy().astype("int_"),
204208
)
205209

206-
class CyIPOPTProblem:
210+
class CyIPOPTProblem(cyipopt.Problem):
207211
# Define the 4 call back functions that ipopt needs:
208212
def objective(_, x):
209213
fobj, fail = self._masterFunc(x, ["fobj"])
@@ -242,18 +246,58 @@ def jacobianstructure(_):
242246

243247
# Define intermediate callback. If this method returns false,
244248
# Ipopt will terminate with the User_Requested_Stop status.
245-
def intermediate(_, *args, **kwargs):
249+
# Also save iteration info in the history file. This callback is called every "major" iteration but not in line search iterations.
250+
# fmt: off
251+
def intermediate(self_cyipopt, alg_mod, iter_count, obj_value, inf_pr, inf_du, mu, d_norm, regularization_size, alpha_du, alpha_pr, ls_trials):
252+
# fmt: on
253+
if self.storeHistory:
254+
iterDict = {
255+
"isMajor": True,
256+
"inf_pr": inf_pr,
257+
"inf_du": inf_du,
258+
"mu": mu,
259+
"alpha_pr": alpha_pr,
260+
"alpha_du": alpha_du,
261+
}
262+
# optional parameters
263+
for saveVar in self.getOption("save_major_iteration_variables"):
264+
if saveVar == "alg_mod":
265+
iterDict[saveVar] = alg_mod
266+
elif saveVar == "d_norm":
267+
iterDict[saveVar] = d_norm
268+
elif saveVar == "regularization_size":
269+
iterDict[saveVar] = regularization_size
270+
elif saveVar == "ls_trials":
271+
iterDict[saveVar] = ls_trials
272+
elif saveVar in ["g_violation", "grad_lag_x"]:
273+
iterDict[saveVar] = self_cyipopt.get_current_violations()[saveVar]
274+
else:
275+
# IPOPT doesn't handle Python error well, so print an error message and send termination signal to IPOPT
276+
print(f"ERROR: Received unknown IPOPT save variable `{saveVar}`. "
277+
+ "Please see 'save_major_iteration_variables' option in the pyOptSparse "
278+
+ "documentation under 'IPOPT'.")
279+
print("Terminating IPOPT...")
280+
return False
281+
282+
# Find pyoptsparse call counters for objective and constraints calls at current x.
283+
# IPOPT calls objective and constraints separately, so we find two call counters and append iter_dict to both counters.
284+
call_counter_1 = self.hist._searchCallCounter(self.cache["x"])
285+
call_counter_2 = self.hist._searchCallCounter(self.cache["x"], last=call_counter_1 - 1)
286+
287+
for call_counter in [call_counter_2, call_counter_1]:
288+
if call_counter is not None:
289+
self.hist.write(call_counter, iterDict)
290+
246291
if self.userRequestedTermination is True:
247292
return False
248293
else:
249294
return True
250295

251296
timeA = time.time()
252297

253-
nlp = cyipopt.Problem(
298+
nlp = CyIPOPTProblem(
254299
n=len(xs),
255300
m=ncon,
256-
problem_obj=CyIPOPTProblem(),
257301
lb=blx,
258302
ub=bux,
259303
cl=blc,
@@ -296,4 +340,7 @@ def _set_ipopt_options(self, nlp):
296340
# ---------------------------------------------
297341

298342
for name, value in self.options.items():
343+
# skip pyIPOPT-specific options
344+
if name in self.pythonOptions:
345+
continue
299346
nlp.add_option(name, value)

pyoptsparse/pyOpt_history.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ def read(self, key):
152152
except KeyError:
153153
return None
154154

155-
def _searchCallCounter(self, x):
155+
def _searchCallCounter(self, x, last=None):
156156
"""
157157
Searches through existing callCounters, and finds the one corresponding
158158
to an evaluation at the design vector `x`.
@@ -162,6 +162,8 @@ def _searchCallCounter(self, x):
162162
----------
163163
x : ndarray
164164
The unscaled DV as a single array.
165+
last : int, optional
166+
The last callCounter to search from. If not provided, use the last callCounter in db.
165167
166168
Returns
167169
-------
@@ -173,7 +175,8 @@ def _searchCallCounter(self, x):
173175
-----
174176
The tolerance used for this is the value `numpy.finfo(numpy.float64).eps`.
175177
"""
176-
last = int(self.db["last"])
178+
if last is None:
179+
last = int(self.db["last"])
177180
callCounter = None
178181
for i in range(last, 0, -1):
179182
key = str(i)

pyoptsparse/pyOpt_optimizer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -578,9 +578,9 @@ def _masterFunc2(self, x, evaluate, writeHist=True):
578578
# timing
579579
hist["time"] = time.time() - self.startTime
580580

581-
# Save information about major iteration counting (only matters for SNOPT).
582-
if self.name == "SNOPT":
583-
hist["isMajor"] = False # this will be updated in _snstop if it is major
581+
# Save information about major iteration counting (only matters for SNOPT and IPOPT).
582+
if self.name in ["SNOPT", "IPOPT"]:
583+
hist["isMajor"] = False # this will be updated in _snstop or cyipopt's `intermediate` if it is major
584584
else:
585585
hist["isMajor"] = True # for other optimizers we assume everything's major
586586

tests/test_hs015.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,8 @@ def test_optimization(self, optName):
130130
def test_ipopt(self):
131131
self.optName = "IPOPT"
132132
self.setup_optProb()
133-
optOptions = self.optOptions.pop(self.optName, None)
133+
store_vars = ["alg_mod", "d_norm", "regularization_size", "ls_trials", "g_violation", "grad_lag_x"]
134+
optOptions = {"save_major_iteration_variables": store_vars}
134135
sol = self.optimize(optOptions=optOptions, storeHistory=True)
135136
# Check Solution
136137
self.assert_solution_allclose(sol, self.tol[self.optName])
@@ -144,6 +145,16 @@ def test_ipopt(self):
144145
data_last = hist.read(hist.read("last"))
145146
self.assertGreater(data_last["iter"], 0)
146147

148+
# Check entries in iteration data
149+
data = hist.getValues(callCounters=["last"])
150+
default_store_vars = ["inf_pr", "inf_du", "mu", "alpha_pr", "alpha_du"]
151+
for var in default_store_vars + store_vars:
152+
self.assertIn(var, data.keys())
153+
self.assertEqual(data["inf_pr"].shape, (1, 1))
154+
self.assertEqual(data["inf_du"].shape, (1, 1))
155+
self.assertEqual(data["g_violation"].shape, (1, 2))
156+
self.assertEqual(data["grad_lag_x"].shape, (1, 2))
157+
147158
# Make sure there is no duplication in objective history
148159
data = hist.getValues(names=["obj"])
149160
objhis_len = data["obj"].shape[0]

0 commit comments

Comments
 (0)