1import numpy as np
2import numpy.linalg as npla
3import scipy.optimize as spopt
4
5
6def fun(x):
7 '''
8 Funcao de Rosenbrock
9 '''
10 return sum(100.*(x[1:]-x[:-1]**2.)**2. + (1.-x[:-1])**2.)
11
12
13def grad(x):
14 xm = x[1:-1]
15 xm_m1 = x[:-2]
16 xm_p1 = x[2:]
17 der = np.zeros_like(x)
18 der[1:-1] = 200*(xm-xm_m1**2) - 400*(xm_p1 - xm**2)*xm - 2*(1-xm)
19 der[0] = -400*x[0]*(x[1]-x[0]**2) - 2*(1-x[0])
20 der[-1] = 200*(x[-1]-x[-2]**2)
21
22 return der
23
24
25n = 2
26
27
28maxiter = 100000
29
30tol = 1e-10
31
32
33x = np.zeros(n)
34
35
36gf = grad(x)
37d = -gf
38
39for k in range(maxiter):
40
41
42 alpha = spopt.line_search(fun, grad, x, d)[0]
43
44
45 x = x + alpha * d
46
47 nad = npla.norm(alpha * d)
48 nfun = npla.norm(fun(x))
49
50 print(f"{k+1}: {alpha:1.2e} {nad:1.2e} {nfun:1.2e}")
51
52 if ((nfun < tol) or np.isnan(nfun)):
53 break
54
55
56 ngf = grad(x)
57
58 beta = np.dot(ngf,ngf)/np.dot(gf,gf)
59 d = -ngf + beta * d
60
61 gf = ngf