tests.test_optimizers.test_parameter.test_forest_optimizer_para_init   A
last analyzed

Complexity

Total Complexity 9

Size/Duplication

Total Lines 118
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 81
dl 0
loc 118
rs 10
c 0
b 0
f 0
wmc 9

6 Functions

Rating   Name   Duplication   Size   Complexity  
A objective_function() 0 3 1
A objective_function_m_inf() 0 7 2
A objective_function_inf() 0 7 2
A objective_function_nan() 0 7 2
A test_dto_para() 0 3 1
A test_warm_start_0() 0 4 1
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import time
6
import pytest
7
import random
8
import numpy as np
9
10
from gradient_free_optimizers import ForestOptimizer
11
from ._base_para_test import _base_para_test_func
12
from gradient_free_optimizers import RandomSearchOptimizer
13
14
15
def objective_function_nan(para):
16
    rand = random.randint(0, 1)
17
18
    if rand == 0:
19
        return 1
20
    else:
21
        return np.nan
22
23
24
def objective_function_m_inf(para):
25
    rand = random.randint(0, 1)
26
27
    if rand == 0:
28
        return 1
29
    else:
30
        return -np.inf
31
32
33
def objective_function_inf(para):
34
    rand = random.randint(0, 1)
35
36
    if rand == 0:
37
        return 1
38
    else:
39
        return np.inf
40
41
42
search_space_ = {"x1": np.arange(0, 20, 1)}
43
44
45
def objective_function(para):
46
    score = -para["x1"] * para["x1"]
47
    return score
48
49
50
search_space = {"x1": np.arange(-10, 11, 1)}
51
search_space2 = {"x1": np.arange(-10, 51, 1)}
52
search_space3 = {"x1": np.arange(-50, 11, 1)}
53
54
55
opt1 = RandomSearchOptimizer(search_space)
56
opt2 = RandomSearchOptimizer(search_space2)
57
opt3 = RandomSearchOptimizer(search_space3)
58
opt4 = RandomSearchOptimizer(search_space_)
59
opt5 = RandomSearchOptimizer(search_space_)
60
opt6 = RandomSearchOptimizer(search_space_)
61
62
opt1.search(objective_function, n_iter=30)
63
opt2.search(objective_function, n_iter=30)
64
opt3.search(objective_function, n_iter=30)
65
opt4.search(objective_function_nan, n_iter=30)
66
opt5.search(objective_function_m_inf, n_iter=30)
67
opt6.search(objective_function_inf, n_iter=30)
68
69
search_data1 = opt1.search_data
70
search_data2 = opt2.search_data
71
search_data3 = opt3.search_data
72
search_data4 = opt4.search_data
73
search_data5 = opt5.search_data
74
search_data6 = opt6.search_data
75
76
77
dto_para = [
78
    ({"tree_regressor": "random_forest"}),
79
    ({"tree_regressor": "extra_tree"}),
80
    ({"tree_regressor": "gradient_boost"}),
81
    ({"xi": 0.001}),
82
    ({"xi": 0.5}),
83
    ({"xi": 0.9}),
84
    ({"warm_start_smbo": None}),
85
    ({"warm_start_smbo": search_data1}),
86
    ({"warm_start_smbo": search_data2}),
87
    ({"warm_start_smbo": search_data3}),
88
    ({"warm_start_smbo": search_data4}),
89
    ({"warm_start_smbo": search_data5}),
90
    ({"warm_start_smbo": search_data6}),
91
    ({"max_sample_size": 10000000}),
92
    ({"max_sample_size": 10000}),
93
    ({"max_sample_size": 1000000000}),
94
    ({"sampling": False}),
95
    ({"sampling": {"random": 1}}),
96
    ({"sampling": {"random": 100000000}}),
97
    ({"rand_rest_p": 0}),
98
    ({"rand_rest_p": 0.5}),
99
    ({"rand_rest_p": 1}),
100
    ({"rand_rest_p": 10}),
101
    ({"replacement": True}),
102
    ({"replacement": False}),
103
]
104
105
106
pytest_wrapper = ("opt_para", dto_para)
107
108
109
@pytest.mark.parametrize(*pytest_wrapper)
110
def test_dto_para(opt_para):
111
    _base_para_test_func(opt_para, ForestOptimizer)
112
113
114
def test_warm_start_0():
115
    opt = ForestOptimizer(search_space, warm_start_smbo=search_data1)
116
117
    assert len(opt.X_sample) == 30
118