summaryrefslogtreecommitdiffstats
path: root/Wrappers/Python/ccpi/optimisation/algorithms/GradientDescent.py
blob: 8c2b693ef9cf6366961a9795696e37759d3d1ada (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# -*- coding: utf-8 -*-
#  CCP in Tomographic Imaging (CCPi) Core Imaging Library (CIL).

#   Copyright 2017 UKRI-STFC
#   Copyright 2017 University of Manchester

#   Licensed under the Apache License, Version 2.0 (the "License");
#   you may not use this file except in compliance with the License.
#   You may obtain a copy of the License at

#   http://www.apache.org/licenses/LICENSE-2.0

#   Unless required by applicable law or agreed to in writing, software
#   distributed under the License is distributed on an "AS IS" BASIS,
#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#   See the License for the specific language governing permissions and
#   limitations under the License.
from ccpi.optimisation.algorithms import Algorithm

class GradientDescent(Algorithm):
    '''Implementation of Gradient Descent algorithm
    '''

    def __init__(self, **kwargs):
        '''initialisation can be done at creation time if all 
        proper variables are passed or later with set_up'''
        super(GradientDescent, self).__init__()

        x_init               = kwargs.get('x_init', None)
        objective_function   = kwargs.get('objective_function', None)
        rate                 = kwargs.get('rate', None)

        if x_init is not None and objective_function is not None and rate is not None:
            print(self.__class__.__name__, "set_up called from creator")
            self.set_up(x_init=x_init, objective_function=objective_function, rate=rate)
    
    def should_stop(self):
        '''stopping cryterion, currently only based on number of iterations'''
        return self.iteration >= self.max_iteration
    
    def set_up(self, x_init, objective_function, rate):
        '''initialisation of the algorithm'''
        self.x = x_init.copy()
        self.objective_function = objective_function
        self.rate = rate

        self.loss.append(objective_function(x_init))
        self.iteration = 0

        try:
            self.memopt = self.objective_function.memopt
        except AttributeError as ae:
            self.memopt = False
        if self.memopt:
            self.x_update = x_init.copy()

        self.configured = True

    def update(self):
        '''Single iteration'''
        if self.memopt:
            self.objective_function.gradient(self.x, out=self.x_update)
            self.x_update *= -self.rate
            self.x += self.x_update
        else:
            self.x += -self.rate * self.objective_function.gradient(self.x)

    def update_objective(self):
        self.loss.append(self.objective_function(self.x))