Passed
Push — master ( 9fd0ae...3f3c18 )
by Simon
02:02 queued 12s
created

GPR.fit()   A

Complexity

Conditions 1

Size

Total Lines 2
Code Lines 2

Duplication

Lines 2
Ratio 100 %

Importance

Changes 0
Metric Value
cc 1
eloc 2
nop 3
dl 2
loc 2
rs 10
c 0
b 0
f 0
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import time
6
import pytest
7
import random
8
import numpy as np
9
10
from gradient_free_optimizers import EnsembleOptimizer
11
from sklearn.tree import DecisionTreeRegressor
12
from sklearn.ensemble import GradientBoostingRegressor
13
from sklearn.gaussian_process import GaussianProcessRegressor
14
from sklearn.neural_network import MLPRegressor
15
from sklearn.gaussian_process.kernels import Matern, WhiteKernel, RBF
16
from ._base_para_test import _base_para_test_func
17
from gradient_free_optimizers import RandomSearchOptimizer
18
19
20
def objective_function_nan(para):
21
    rand = random.randint(0, 1)
22
23
    if rand == 0:
24
        return 1
25
    else:
26
        return np.nan
27
28
29
def objective_function_m_inf(para):
30
    rand = random.randint(0, 1)
31
32
    if rand == 0:
33
        return 1
34
    else:
35
        return -np.inf
36
37
38
def objective_function_inf(para):
39
    rand = random.randint(0, 1)
40
41
    if rand == 0:
42
        return 1
43
    else:
44
        return np.inf
45
46
47
search_space_ = {"x1": np.arange(0, 20, 1)}
48
49
50
def objective_function(para):
51
    score = -para["x1"] * para["x1"]
52
    return score
53
54
55
search_space = {"x1": np.arange(-10, 11, 1)}
56
search_space2 = {"x1": np.arange(-10, 51, 1)}
57
search_space3 = {"x1": np.arange(-50, 11, 1)}
58
59
60
opt1 = RandomSearchOptimizer(search_space)
61
opt2 = RandomSearchOptimizer(search_space2)
62
opt3 = RandomSearchOptimizer(search_space3)
63
opt4 = RandomSearchOptimizer(search_space_)
64
opt5 = RandomSearchOptimizer(search_space_)
65
opt6 = RandomSearchOptimizer(search_space_)
66
67
opt1.search(objective_function, n_iter=30)
68
opt2.search(objective_function, n_iter=30)
69
opt3.search(objective_function, n_iter=30)
70
opt4.search(objective_function_nan, n_iter=30)
71
opt5.search(objective_function_m_inf, n_iter=30)
72
opt6.search(objective_function_inf, n_iter=30)
73
74
search_data1 = opt1.results
75
search_data2 = opt2.results
76
search_data3 = opt3.results
77
search_data4 = opt4.results
78
search_data5 = opt5.results
79
search_data6 = opt6.results
80
81
82 View Code Duplication
class GPR:
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
83
    def __init__(self):
84
        nu_param = 0.5
85
        matern = Matern(
86
            # length_scale=length_scale_param,
87
            # length_scale_bounds=length_scale_bounds_param,
88
            nu=nu_param,
89
        )
90
91
        self.gpr = GaussianProcessRegressor(
92
            kernel=matern + RBF() + WhiteKernel(), n_restarts_optimizer=1
93
        )
94
95
    def fit(self, X, y):
96
        self.gpr.fit(X, y)
97
98
    def predict(self, X, return_std=False):
99
        return self.gpr.predict(X, return_std=return_std)
100
101
102
ensemble_optimizer_para = [
103
    (
104
        {
105
            "estimators": [
106
                GradientBoostingRegressor(n_estimators=5),
107
                GaussianProcessRegressor(),
108
            ]
109
        }
110
    ),
111
    (
112
        {
113
            "estimators": [
114
                GradientBoostingRegressor(n_estimators=5),
115
                DecisionTreeRegressor(),
116
                MLPRegressor(),
117
                GaussianProcessRegressor(),
118
            ]
119
        }
120
    ),
121
    ({"xi": 0.001}),
122
    ({"xi": 0.5}),
123
    ({"xi": 0.9}),
124
    ({"warm_start_smbo": None}),
125
    ({"warm_start_smbo": search_data1}),
126
    ({"warm_start_smbo": search_data2}),
127
    ({"warm_start_smbo": search_data3}),
128
    ({"warm_start_smbo": search_data4}),
129
    ({"warm_start_smbo": search_data5}),
130
    ({"warm_start_smbo": search_data6}),
131
    ({"init_sample_size": 10000000}),
132
    ({"init_sample_size": 10000}),
133
    ({"init_sample_size": 1000000000}),
134
    ({"sampling": False}),
135
    ({"sampling": {"random": 1}}),
136
    ({"sampling": {"random": 100000000}}),
137
    ({"warnings": False}),
138
    ({"warnings": 1}),
139
    ({"warnings": 100000000000}),
140
    ({"rand_rest_p": 0}),
141
    ({"rand_rest_p": 0.5}),
142
    ({"rand_rest_p": 1}),
143
    ({"rand_rest_p": 10}),
144
]
145
146
147
pytest_wrapper = ("opt_para", ensemble_optimizer_para)
148
149
150
@pytest.mark.parametrize(*pytest_wrapper)
151
def test_ensemble_para(opt_para):
152
    _base_para_test_func(opt_para, EnsembleOptimizer)
153
154
155
def test_warm_start_0():
156
    opt = EnsembleOptimizer(search_space, warm_start_smbo=search_data1)
157
158
    assert len(opt.X_sample) == 30
159