Passed
Push — master ( 626f23...be3b1e )
by Simon
01:48
created

BayesianOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 21
Code Lines 19

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 19
nop 9
dl 0
loc 21
rs 9.45
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
6
from .exp_imp_based_opt import ExpectedImprovementBasedOptimization
7
8
from .surrogate_models import (
9
    GPR_linear,
10
    GPR,
11
)
12
13
gaussian_process = {"gp_nonlinear": GPR(), "gp_linear": GPR_linear()}
14
15
16
class BayesianOptimizer(ExpectedImprovementBasedOptimization):
17
    name = "Bayesian Optimization"
18
19
    def __init__(
20
        self,
21
        *args,
22
        gpr=gaussian_process["gp_nonlinear"],
23
        xi=0.03,
24
        warm_start_smbo=None,
25
        max_sample_size=10000000,
26
        sampling={"random": 1000000},
27
        warnings=100000000,
28
        **kwargs,
29
    ):
30
        super().__init__(*args, **kwargs)
31
        self.gpr = gpr
32
        self.regr = gpr
33
        self.xi = xi
34
        self.warm_start_smbo = warm_start_smbo
35
        self.max_sample_size = max_sample_size
36
        self.sampling = sampling
37
        self.warnings = warnings
38
39
        self.init_warm_start_smbo()
40