Passed
Push — master ( 502a6e...660aa4 )
by Simon
04:50 queued 14s
created

gradient_free_optimizers.optimizers.local_opt.simulated_annealing   A

Complexity

Total Complexity 3

Size/Duplication

Total Lines 33
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 19
dl 0
loc 33
rs 10
c 0
b 0
f 0
wmc 3

3 Methods

Rating   Name   Duplication   Size   Complexity  
A SimulatedAnnealingOptimizer._p_accept_default() 0 3 1
A SimulatedAnnealingOptimizer.__init__() 0 6 1
A SimulatedAnnealingOptimizer.evaluate() 0 3 1
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
6
import numpy as np
7
8
from ..local_opt import StochasticHillClimbingOptimizer
9
10
11
class SimulatedAnnealingOptimizer(StochasticHillClimbingOptimizer):
12
    name = "Simulated Annealing"
13
    _name_ = "simulated_annealing"
14
    __name__ = "SimulatedAnnealingOptimizer"
15
16
    optimizer_type = "local"
17
    computationally_expensive = False
18
19
    def __init__(self, *args, annealing_rate=0.97, start_temp=1, **kwargs):
20
        super().__init__(*args, **kwargs)
21
22
        self.annealing_rate = annealing_rate
23
        self.start_temp = start_temp
24
        self.temp = start_temp
25
26
    def _p_accept_default(self):
27
        # the 'minus' is omitted, because we maximize a score
28
        return np.exp(self._exponent)
29
30
    def evaluate(self, score_new):
31
        StochasticHillClimbingOptimizer.evaluate(self, score_new)
32
        self.temp *= self.annealing_rate
33