1
2
3
4
5
6
7
8 import sys
9 sys.path.append('/home/dmitrey/scikits/openopt/scikits/openopt/solvers/optimizers')
10
11
12 from line_search import CubicInterpolationSearch
13 from numpy import *
14 from numpy.linalg import norm
15
17 x0 = asfarray(x0)
18 lineSearch = CubicInterpolationSearch(min_step_size = 0.0001)
19
20
21
22 gradtol = 1e-6
23
24
25 g0 = function.gradient(x0)
26 state0 = {'direction' : g0}
27 x1 = lineSearch(origin = x0, state = state0, function = function)
28
29 alpha_k = abs(x1 - x0).sum() / abs(g0).sum()
30
31
32
33
34 eps = 1e-5
35 M = 8
36 rho = 0.5
37 sigma1, sigma2 = 0.015, 0.8
38 sigma = 0.015
39
40 alpha_min, alpha_max = 0.015, 1e6
41
42 xk = x0.copy()
43 last_M_iter_objFun_values = []
44
45 for k in xrange(maxIter):
46 gk = function.gradient(xk)
47 if norm(gk) <= gradtol: break
48
49 fk = function(xk)
50 last_M_iter_objFun_values.append(fk)
51 if len(last_M_iter_objFun_values) > M: last_M_iter_objFun_values.pop()
52 maxObjFunValue = max(last_M_iter_objFun_values)
53
54 if not alpha_min <= alpha_k <= alpha_max: alpha_k = sigma
55 Lambda = 1.0 / alpha_k
56
57
58 while True:
59 if function(xk - Lambda * gk) < maxObjFunValue - rho * Lambda * dot(gk.T, gk):
60 Lambda_k = Lambda
61 yk = -Lambda_k * gk
62 xk += yk
63 break
64 sigma = (sigma1+sigma2)/2
65 Lambda *= sigma
66
67 alpha_k = -dot(gk.T, yk) / (Lambda_k * dot(gk.T, gk))
68
69
70 return xk
71
72 if __name__ == '__main__':
74 - def __call__(self, x): return ((x-arange(x.size))**6).sum()
75 - def gradient(self, x): return 6*(x-arange(x.size))**5
76
77 x0 = sin(arange(300))
78 fun = Function()
79 x_opt = BarzilaiBorwein_nonmonotone(fun, x0)
80 print x_opt
81 print fun(x_opt)
82