def FletcherReeves step d_prev x_prev delta 01 eps 01 max_step 100 -Gr

1
2
3
4
5
6
7
8
def FletcherReeves(x, step = 0, d_prev = 0, x_prev = 0, delta = 0.01, eps = 0.01, max_step = 100):
d = -Grad(x)
if step > 0: d += d_prev * (np.linalg.norm(Grad(x))/np.linalg.norm(Grad(x_prev)))**2
alpha = minimize_scalar(lambda alpha: f(x + alpha*d), bounds = (-max_step, max_step), method = 'Golden')
new_x = x + alpha.x*d
if abs(np.linalg.norm(x - new_x)) < delta and f(x) - f(new_x) < eps:
return f(new_x), new_x, step
return FletcherReeves(new_x, step+1, d, x)