objective_function()   A
last analyzed

Complexity

Conditions 1

Size

Total Lines 3
Code Lines 3

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 3
nop 1
dl 0
loc 3
rs 10
c 0
b 0
f 0
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import time
6
import pytest
7
import random
8
import numpy as np
9
10
from gradient_free_optimizers import BayesianOptimizer
11
from sklearn.gaussian_process import GaussianProcessRegressor
12
from sklearn.gaussian_process.kernels import Matern, WhiteKernel, RBF
13
from ._base_para_test import _base_para_test_func
14
from gradient_free_optimizers import RandomSearchOptimizer
15
16
17
def objective_function_nan(para):
18
    rand = random.randint(0, 1)
19
20
    if rand == 0:
21
        return 1
22
    else:
23
        return np.nan
24
25
26
def objective_function_m_inf(para):
27
    rand = random.randint(0, 1)
28
29
    if rand == 0:
30
        return 1
31
    else:
32
        return -np.inf
33
34
35
def objective_function_inf(para):
36
    rand = random.randint(0, 1)
37
38
    if rand == 0:
39
        return 1
40
    else:
41
        return np.inf
42
43
44
search_space_ = {"x1": np.arange(0, 20, 1)}
45
46
47
def objective_function(para):
48
    score = -para["x1"] * para["x1"]
49
    return score
50
51
52
search_space = {"x1": np.arange(-10, 11, 1)}
53
search_space2 = {"x1": np.arange(-10, 51, 1)}
54
search_space3 = {"x1": np.arange(-50, 11, 1)}
55
56
57
opt1 = RandomSearchOptimizer(search_space)
58
opt2 = RandomSearchOptimizer(search_space2)
59
opt3 = RandomSearchOptimizer(search_space3)
60
opt4 = RandomSearchOptimizer(search_space_)
61
opt5 = RandomSearchOptimizer(search_space_)
62
opt6 = RandomSearchOptimizer(search_space_)
63
64
opt1.search(objective_function, n_iter=30)
65
opt2.search(objective_function, n_iter=30)
66
opt3.search(objective_function, n_iter=30)
67
opt4.search(objective_function_nan, n_iter=30)
68
opt5.search(objective_function_m_inf, n_iter=30)
69
opt6.search(objective_function_inf, n_iter=30)
70
71
search_data1 = opt1.search_data
72
search_data2 = opt2.search_data
73
search_data3 = opt3.search_data
74
search_data4 = opt4.search_data
75
search_data5 = opt5.search_data
76
search_data6 = opt6.search_data
77
78
79 View Code Duplication
class GPR:
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
80
    def __init__(self):
81
        nu_param = 0.5
82
        matern = Matern(
83
            # length_scale=length_scale_param,
84
            # length_scale_bounds=length_scale_bounds_param,
85
            nu=nu_param,
86
        )
87
88
        self.gpr = GaussianProcessRegressor(
89
            kernel=matern + RBF() + WhiteKernel(), n_restarts_optimizer=1
90
        )
91
92
    def fit(self, X, y):
93
        self.gpr.fit(X, y)
94
95
    def predict(self, X, return_std=False):
96
        return self.gpr.predict(X, return_std=return_std)
97
98
99
bayesian_optimizer_para = [
100
    ({"gpr": GPR()}),
101
    ({"xi": 0.001}),
102
    ({"xi": 0.5}),
103
    ({"xi": 0.9}),
104
    ({"warm_start_smbo": None}),
105
    ({"warm_start_smbo": search_data1}),
106
    ({"warm_start_smbo": search_data2}),
107
    ({"warm_start_smbo": search_data3}),
108
    ({"warm_start_smbo": search_data4}),
109
    ({"warm_start_smbo": search_data5}),
110
    ({"warm_start_smbo": search_data6}),
111
    ({"max_sample_size": 10000000}),
112
    ({"max_sample_size": 10000}),
113
    ({"max_sample_size": 1000000000}),
114
    ({"sampling": False}),
115
    ({"sampling": {"random": 1}}),
116
    ({"sampling": {"random": 100000000}}),
117
    ({"rand_rest_p": 0}),
118
    ({"rand_rest_p": 0.5}),
119
    ({"rand_rest_p": 1}),
120
    ({"rand_rest_p": 10}),
121
    ({"replacement": True}),
122
    ({"replacement": False}),
123
]
124
125
126
pytest_wrapper = ("opt_para", bayesian_optimizer_para)
127
128
129
@pytest.mark.parametrize(*pytest_wrapper)
130
def test_bayesian_para(opt_para):
131
    _base_para_test_func(opt_para, BayesianOptimizer)
132
133
134
def test_warm_start_0():
135
    opt = BayesianOptimizer(search_space, warm_start_smbo=search_data1)
136
137
    assert len(opt.X_sample) == 30
138