def grad_search nm Bounded eps 0005 max_step 100 steps result minimize

1
2
3
4
5
6
def grad_search(x, nm ='Bounded', eps = 0.0005, max_step = 100, steps = 0):
result = minimize_scalar(lambda alpha: f(x - alpha * derivative(rosen, x)), bounds = (0, max_step), method = nm)
new_x = x - result.x*Grad(x)
if f(x) - f(new_x) < eps:
return tuple(x)
return GradSearch(new_x, nm, steps = steps + 1)