-
Notifications
You must be signed in to change notification settings - Fork 0
/
Convex_eq_constrained_Pure_Newtons_backtracking_line_search.py
65 lines (48 loc) · 1.48 KB
/
Convex_eq_constrained_Pure_Newtons_backtracking_line_search.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 1 21:40:10 2019
@author: fietekrutein
A script for solving an equality constrained convex problem
using Newton's method with backtracking line search
"""
import numpy as np
# functions as per example problem
def obj(x):
return (-np.log(x[0]) - np.log(x[1]))
def grad(x):
return (-1/x)
def hess(x):
return (np.diag(1/np.power(x,2)))
# Newtons method
def NewtonsMethod(x_init,A,grad,hess,alpha, beta, tol, nStop):
xCurrent = x_init
nIter=0
gC = grad(xCurrent)
hC = hess(xCurrent)
h_invC = np.linalg.inv(hC)
while True:
nIter += 1
t = 1
d = -np.dot(h_invC, gC) + np.dot(np.dot(np.dot(h_invC, A.T),
(1/(np.dot(A, np.dot(h_invC, A.T))))), np.dot(A, np.dot(h_invC, gC)))
xNext = xCurrent + t*d
while obj(xNext) > (obj(xCurrent) + alpha*t*np.dot(gC,d)):
t = beta * t
xNext = xCurrent + t*d
print("Iteration {}: {}".format(nIter, xNext))
gC = grad(xNext)
hC = hess(xNext)
h_invC = np.linalg.inv(hC)
if np.linalg.norm(d,2) <= tol or nIter >= nStop:
break
xCurrent = xNext
return(xCurrent)
# initialize problem parameters
A = np.array([2,5])
b = 1
alpha = 0.1
beta = 0.7
x_init = np.array([(b/(A[0]+A[1])), (b/(A[0]+A[1]))])
# execute
NewtonsMethod(x_init, A, grad, hess, alpha, beta, tol=10e-5, nStop=20)