Passed
Push — master ( e7a955...cd6747 )
by Simon
04:58
created

normalize()   A

Complexity

Conditions 2

Size

Total Lines 8
Code Lines 6

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 6
dl 0
loc 8
rs 10
c 0
b 0
f 0
cc 2
nop 1
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import numpy as np
6
from scipy.stats import norm
7
8
9
from .smbo import SMBO
10
from .surrogate_models import (
11
    RandomForestRegressor,
12
    ExtraTreesRegressor,
13
    GradientBoostingRegressor,
14
)
15
from .acquisition_function import ExpectedImprovement
16
17
18
tree_regressor_dict = {
19
    "random_forest": RandomForestRegressor,
20
    "extra_tree": ExtraTreesRegressor,
21
    "gradient_boost": GradientBoostingRegressor,
22
}
23
24
25
def normalize(array):
26
    num = array - array.min()
27
    den = array.max() - array.min()
28
29
    if den == 0:
30
        return np.random.random_sample(array.shape)
31
    else:
32
        return ((num / den) + 0) / 1
33
34
35 View Code Duplication
class ForestOptimizer(SMBO):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
36
    name = "Forest Optimization"
37
    _name_ = "forest_optimization"
38
    """Based on the forest-optimizer in the scikit-optimize package"""
39
40
    def __init__(
41
        self,
42
        *args,
43
        tree_regressor="extra_tree",
44
        tree_para={"n_estimators": 100},
45
        xi=0.03,
46
        warm_start_smbo=None,
47
        max_sample_size=10000000,
48
        sampling={"random": 1000000},
49
        warnings=100000000,
50
        **kwargs
51
    ):
52
        super().__init__(*args, **kwargs)
53
        self.tree_regressor = tree_regressor
54
        self.tree_para = tree_para
55
        self.regr = tree_regressor_dict[tree_regressor](**self.tree_para)
56
        self.xi = xi
57
        self.warm_start_smbo = warm_start_smbo
58
        self.max_sample_size = max_sample_size
59
        self.sampling = sampling
60
        self.warnings = warnings
61
62
        self.init_warm_start_smbo()
63
64
    def _expected_improvement(self):
65
        all_pos_comb = self._all_possible_pos()
66
        self.pos_comb = self._sampling(all_pos_comb)
67
68
        acqu_func = ExpectedImprovement(self.regr, self.pos_comb, self.xi)
69
        return acqu_func.calculate(self.X_sample, self.Y_sample)
70
71
    def _training(self):
72
        X_sample = np.array(self.X_sample)
73
        Y_sample = np.array(self.Y_sample)
74
75
        if len(Y_sample) == 0:
76
            return self.move_random()
77
78
        Y_sample = normalize(Y_sample).reshape(-1, 1)
79
        self.regr.fit(X_sample, Y_sample)
80