1
2
3
4
5
6
7
8 import sys
9 sys.path.append('/home/dmitrey/scikits/openopt/scikits/openopt/solvers/optimizers')
10
11
12 from line_search import CubicInterpolationSearch
13 from numpy import *
14 from numpy.linalg import norm
15
17 x0 = asfarray(x0)
18 lineSearch = CubicInterpolationSearch(min_step_size = 0.0001)
19 g0 = function.gradient(x0)
20
21
22 if norm(g0) <= 1e-6: return x0
23
24 state0 = {'direction' : g0}
25
26 x1 = lineSearch(origin = x0, state = state0, function = function)
27 print x1
28 s0 = x1 - x0
29
30 y0 = state['gradient'] - g0
31
32
33 if norm(state['gradient']) <= 1e-6: return x1
34
35 xk = x1
36 sk_ = s0
37 yk_ = y0
38 gk_ = state['gradient']
39
40 for k in xrange(maxIter):
41 alpha_k = dot(sk_, sk_) / dot(sk_,yk_)
42 sk_ = -alpha_k * gk_
43 xk += sk_
44 gk_prev = gk_.copy()
45 gk_ = function.gradient(xk)
46 yk_ = gk_ - gk_prev
47
48 if norm(gk_) <= 1e-6:
49 print 'k=', k
50 return xk
51 return xk
52
53 if __name__ == '__main__':
55 - def __call__(self, x): return ((x-arange(x.size))**2).sum()
56 - def gradient(self, x): return 2*(x-arange(x.size))
57
58 x0 = sin(arange(1000))
59 fun = Function()
60 x_opt = BarzilaiBorwein(fun, x0)
61 print x_opt
62 print fun(x_opt)
63