gradient_free_optimizers.optimizers.smb_opt.bayesian_optimization   A
last analyzed

Complexity

Total Complexity 6

Size/Duplication

Total Lines 87
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 6
eloc 62
dl 0
loc 87
rs 10
c 0
b 0
f 0

1 Function

Rating   Name   Duplication   Size   Complexity  
A normalize() 0 9 2

4 Methods

Rating   Name   Duplication   Size   Complexity  
A BayesianOptimizer.finish_initialization() 0 3 1
A BayesianOptimizer._training() 0 6 1
A BayesianOptimizer._expected_improvement() 0 5 1
A BayesianOptimizer.__init__() 0 31 1
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import numpy as np
6
from scipy.stats import norm
7
8
9
from .smbo import SMBO
10
from .surrogate_models import (
11
    GPR_linear,
12
    GPR,
13
)
14
from .acquisition_function import ExpectedImprovement
15
16
17
gaussian_process = {"gp_nonlinear": GPR(), "gp_linear": GPR_linear()}
18
19
20
def normalize(array):
21
    array_min = array.min()
22
    array_max = array.max()
23
    range_ = array_max - array_min
24
25
    if range_ == 0:
26
        return np.random.random_sample(array.shape)
27
    else:
28
        return (array - array_min) / range_
29
30
31
class BayesianOptimizer(SMBO):
32
    name = "Bayesian Optimization"
33
    _name_ = "bayesian_optimization"
34
    __name__ = "BayesianOptimizer"
35
36
    optimizer_type = "sequential"
37
    computationally_expensive = True
38
39
    def __init__(
40
        self,
41
        search_space,
42
        initialize={"grid": 4, "random": 2, "vertices": 4},
43
        constraints=[],
44
        random_state=None,
45
        rand_rest_p=0,
46
        nth_process=None,
47
        warm_start_smbo=None,
48
        max_sample_size=10000000,
49
        sampling={"random": 1000000},
50
        replacement=True,
51
        gpr=gaussian_process["gp_nonlinear"],
52
        xi=0.03,
53
    ):
54
        super().__init__(
55
            search_space=search_space,
56
            initialize=initialize,
57
            constraints=constraints,
58
            random_state=random_state,
59
            rand_rest_p=rand_rest_p,
60
            nth_process=nth_process,
61
            warm_start_smbo=warm_start_smbo,
62
            max_sample_size=max_sample_size,
63
            sampling=sampling,
64
            replacement=replacement,
65
        )
66
67
        self.gpr = gpr
68
        self.regr = gpr
69
        self.xi = xi
70
71
    def finish_initialization(self):
72
        self.all_pos_comb = self._all_possible_pos()
73
        return super().finish_initialization()
74
75
    def _expected_improvement(self):
76
        self.pos_comb = self._sampling(self.all_pos_comb)
77
78
        acqu_func = ExpectedImprovement(self.regr, self.pos_comb, self.xi)
79
        return acqu_func.calculate(self.X_sample, self.Y_sample)
80
81
    def _training(self):
82
        X_sample = np.array(self.X_sample)
83
        Y_sample = np.array(self.Y_sample)
84
85
        Y_sample = normalize(Y_sample).reshape(-1, 1)
86
        self.regr.fit(X_sample, Y_sample)
87