from scipy optimize import minimize_scalar def 150 f0 110 return sum f

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
from scipy.optimize import minimize_scalar
def f(x, a = 150, b = 2, f0 = 110):
return sum([a*(x[i]**2 - x[i+1])**2 + b*(x[i] - 1)**2 for i in range(0, len(x)-1)]) + f0
def Grad(x, a = 150, b = 2):
return np.array([4*a*x[0]*(x[0]**2-x[1]) + 2*b*(x[0]-1),
4*a*x[1]*(x[1]**2-x[2]) + 2*b*(x[1]-1) - 2*a*(x[0]**2-x[1]),
-2*a*(x[1]**2-x[2])])
def GradSearch(x, nm ='Bounded', eps = 0.0005, max_step = 100, steps = 0):
result = minimize_scalar(lambda alpha: f(x - alpha*Grad(x)), bounds = (0, max_step), method = nm)
new_x = x - result.x*Grad(x)
print x, f(x), f(new_x)
if f(x) - f(new_x) < eps:
return (f(x), x, steps)
return GradSearch(new_x, nm, steps = steps + 1)