1
2
3
4
5 """
6 Line search decorator that overrides the default alpha_step value with a factor times the last alpha_step, the factor being a function of the current direction and the last direction
7 """
8
9 import numpy
10
12 """
13 Overrides the default step size and replaces it with a factor times the last one
14 """
15 - def __init__(self, line_search, **kwargs):
16 """
17 Needs to have :
18 - the decorated line search (line_search)
19 """
20 self.line_search = line_search
21
22 - def __call__(self, origin, function, state, **kwargs):
23 """
24 Returns a good candidate
25 Parameters :
26 - origin is the origin of the search
27 - function is the function to minimize
28 - state is the state of the optimizer
29 """
30 if 'alpha_step' in state:
31 state['initial_alpha_step'] = numpy.dot(state['last_gradient'], state['last_direction']) / numpy.dot(state['gradient'], state['direction']) * state['alpha_step']
32 state['last_direction'] = state['direction']
33 state['last_gradient'] = state['gradient']
34 return self.line_search(origin = origin, function = function, state = state, **kwargs)
35