Passed
Push — master ( e7a955...cd6747 )
by Simon
04:58
created

BayesianOptimizer._training()   A

Complexity

Conditions 2

Size

Total Lines 9
Code Lines 7

Duplication

Lines 9
Ratio 100 %

Importance

Changes 0
Metric Value
eloc 7
dl 9
loc 9
rs 10
c 0
b 0
f 0
cc 2
nop 1
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import numpy as np
6
from scipy.stats import norm
7
8
9
from .smbo import SMBO
10
from .surrogate_models import (
11
    GPR_linear,
12
    GPR,
13
)
14
from .acquisition_function import ExpectedImprovement
15
16
17
gaussian_process = {"gp_nonlinear": GPR(), "gp_linear": GPR_linear()}
18
19
20
def normalize(array):
21
    num = array - array.min()
22
    den = array.max() - array.min()
23
24
    if den == 0:
25
        return np.random.random_sample(array.shape)
26
    else:
27
        return ((num / den) + 0) / 1
28
29
30 View Code Duplication
class BayesianOptimizer(SMBO):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
31
    name = "Bayesian Optimization"
32
    _name_ = "bayesian_optimization"
33
34
    def __init__(
35
        self,
36
        *args,
37
        gpr=gaussian_process["gp_nonlinear"],
38
        xi=0.03,
39
        warm_start_smbo=None,
40
        max_sample_size=10000000,
41
        sampling={"random": 1000000},
42
        warnings=100000000,
43
        **kwargs
44
    ):
45
        super().__init__(*args, **kwargs)
46
        self.gpr = gpr
47
        self.regr = gpr
48
        self.xi = xi
49
        self.warm_start_smbo = warm_start_smbo
50
        self.max_sample_size = max_sample_size
51
        self.sampling = sampling
52
        self.warnings = warnings
53
54
        self.init_warm_start_smbo()
55
56
    def _expected_improvement(self):
57
        all_pos_comb = self._all_possible_pos()
58
        self.pos_comb = self._sampling(all_pos_comb)
59
60
        acqu_func = ExpectedImprovement(self.gpr, self.pos_comb, self.xi)
61
        return acqu_func.calculate(self.X_sample, self.Y_sample)
62
63
    def _training(self):
64
        X_sample = np.array(self.X_sample)
65
        Y_sample = np.array(self.Y_sample)
66
67
        if len(Y_sample) == 0:
68
            return self.move_random()
69
70
        Y_sample = normalize(Y_sample).reshape(-1, 1)
71
        self.regr.fit(X_sample, Y_sample)
72