Passed
Push — master ( 7123ab...82152a )
by Simon
02:00 queued 12s
created

EnsembleOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 26
Code Lines 21

Duplication

Lines 26
Ratio 100 %

Importance

Changes 0
Metric Value
cc 1
eloc 21
nop 9
dl 26
loc 26
rs 9.376
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import numpy as np
6
from scipy.stats import norm
7
8
from ..smb_opt.smbo import SMBO
9
from ..smb_opt.surrogate_models import EnsembleRegressor
10
from ..smb_opt.acquisition_function import ExpectedImprovement
11
12
13
from sklearn.tree import DecisionTreeRegressor
14
from sklearn.ensemble import GradientBoostingRegressor
15
from sklearn.svm import SVR
16
from sklearn.gaussian_process import GaussianProcessRegressor
17
from sklearn.neural_network import MLPRegressor
18
19
20
def normalize(array):
21
    num = array - array.min()
22
    den = array.max() - array.min()
23
24
    if den == 0:
25
        return np.random.random_sample(array.shape)
26
    else:
27
        return ((num / den) + 0) / 1
28
29
30 View Code Duplication
class EnsembleOptimizer(SMBO):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
31
    name = "Ensemble Optimizer"
32
33
    def __init__(
34
        self,
35
        *args,
36
        estimators=[
37
            GradientBoostingRegressor(n_estimators=5),
38
            # DecisionTreeRegressor(),
39
            # MLPRegressor(),
40
            GaussianProcessRegressor(),
41
        ],
42
        xi=0.01,
43
        warm_start_smbo=None,
44
        max_sample_size=10000000,
45
        sampling={"random": 1000000},
46
        warnings=100000000,
47
        **kwargs
48
    ):
49
        super().__init__(*args, **kwargs)
50
        self.estimators = estimators
51
        self.regr = EnsembleRegressor(estimators)
52
        self.xi = xi
53
        self.warm_start_smbo = warm_start_smbo
54
        self.max_sample_size = max_sample_size
55
        self.sampling = sampling
56
        self.warnings = warnings
57
58
        self.init_warm_start_smbo()
59
60
    def _expected_improvement(self):
61
        all_pos_comb = self._all_possible_pos()
62
        self.pos_comb = self._sampling(all_pos_comb)
63
64
        acqu_func = ExpectedImprovement(self.regr, self.pos_comb, self.xi)
65
        return acqu_func.calculate(self.X_sample, self.Y_sample)
66
67
    def _training(self):
68
        X_sample = np.array(self.X_sample)
69
        Y_sample = np.array(self.Y_sample)
70
71
        if len(Y_sample) == 0:
72
            return self.move_random()
73
74
        Y_sample = normalize(Y_sample).reshape(-1, 1)
75
        self.regr.fit(X_sample, Y_sample)
76