Passed
Push — master ( e37ecf...2614a9 )
by Simon
04:19
created

BayesianOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 21
Code Lines 19

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 19
nop 9
dl 0
loc 21
rs 9.45
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import numpy as np
6
from scipy.stats import norm
7
8
9
from .smbo import SMBO
10
from .surrogate_models import (
11
    GPR_linear,
12
    GPR,
13
)
14
from .acquisition_function import ExpectedImprovement
15
16
17
gaussian_process = {"gp_nonlinear": GPR(), "gp_linear": GPR_linear()}
18
19
20
def normalize(array):
21
    num = array - array.min()
22
    den = array.max() - array.min()
23
24
    if den == 0:
25
        return np.random.random_sample(array.shape)
26
    else:
27
        return ((num / den) + 0) / 1
28
29
30
class BayesianOptimizer(SMBO):
31
    name = "Bayesian Optimization"
32
    _name_ = "bayesian_optimization"
33
    __name__ = "BayesianOptimizer"
34
35
    optimizer_type = "sequential"
36
    computationally_expensive = True
37
38
    def __init__(
39
        self,
40
        *args,
41
        gpr=gaussian_process["gp_nonlinear"],
42
        xi=0.03,
43
        warm_start_smbo=None,
44
        max_sample_size=10000000,
45
        sampling={"random": 1000000},
46
        warnings=100000000,
47
        **kwargs
48
    ):
49
        super().__init__(*args, **kwargs)
50
        self.gpr = gpr
51
        self.regr = gpr
52
        self.xi = xi
53
        self.warm_start_smbo = warm_start_smbo
54
        self.max_sample_size = max_sample_size
55
        self.sampling = sampling
56
        self.warnings = warnings
57
58
        self.init_warm_start_smbo()
59
60
    def finish_initialization(self):
61
        self.all_pos_comb = self._all_possible_pos()
62
        return super().finish_initialization()
63
64
    def _expected_improvement(self):
65
        self.pos_comb = self._sampling(self.all_pos_comb)
66
67
        acqu_func = ExpectedImprovement(self.regr, self.pos_comb, self.xi)
68
        return acqu_func.calculate(self.X_sample, self.Y_sample)
69
70
    def _training(self):
71
        X_sample = np.array(self.X_sample)
72
        Y_sample = np.array(self.Y_sample)
73
74
        Y_sample = normalize(Y_sample).reshape(-1, 1)
75
        self.regr.fit(X_sample, Y_sample)
76