Passed
Push — master ( 626f23...be3b1e )
by Simon
01:48
created

EnsembleOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 26
Code Lines 21

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 21
nop 9
dl 0
loc 26
rs 9.376
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
6
from ..smb_opt.exp_imp_based_opt import ExpectedImprovementBasedOptimization
7
from ..smb_opt.surrogate_models import EnsembleRegressor
8
9
10
from sklearn.tree import DecisionTreeRegressor
11
from sklearn.ensemble import GradientBoostingRegressor
12
from sklearn.svm import SVR
13
from sklearn.gaussian_process import GaussianProcessRegressor
14
from sklearn.neural_network import MLPRegressor
15
16
17
class EnsembleOptimizer(ExpectedImprovementBasedOptimization):
18
    name = "Ensemble Optimizer"
19
20
    def __init__(
21
        self,
22
        *args,
23
        estimators=[
24
            GradientBoostingRegressor(n_estimators=5),
25
            # DecisionTreeRegressor(),
26
            # MLPRegressor(),
27
            GaussianProcessRegressor(),
28
        ],
29
        xi=0.01,
30
        warm_start_smbo=None,
31
        max_sample_size=10000000,
32
        sampling={"random": 1000000},
33
        warnings=100000000,
34
        **kwargs,
35
    ):
36
        super().__init__(*args, **kwargs)
37
        self.estimators = estimators
38
        self.regr = EnsembleRegressor(estimators)
39
        self.xi = xi
40
        self.warm_start_smbo = warm_start_smbo
41
        self.max_sample_size = max_sample_size
42
        self.sampling = sampling
43
        self.warnings = warnings
44
45
        self.init_warm_start_smbo()
46