tests.test_hyper_gradient_trafo   A
last analyzed

Complexity

Total Complexity 5

Size/Duplication

Total Lines 119
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 5
eloc 68
dl 0
loc 119
rs 10
c 0
b 0
f 0

3 Functions

Rating   Name   Duplication   Size   Complexity  
A objective_function_1() 0 7 1
A objective_function_0() 0 4 1
A test_trafo_0() 0 10 3
1
"""Test module for hyper gradient transformation functionality."""
2
3
import time
4
5
import numpy as np
6
import pandas as pd
7
import pytest
8
9
from hyperactive import Hyperactive
10
11
12
def objective_function_0(opt):
13
    """Return simple quadratic objective function for testing."""
14
    score = -opt["x1"] * opt["x1"]
15
    return score
16
17
18
search_space_0 = {
19
    "x1": list(np.arange(-5, 6, 1)),
20
}
21
search_space_1 = {
22
    "x1": list(np.arange(0, 6, 1)),
23
}
24
search_space_2 = {
25
    "x1": list(np.arange(-5, 1, 1)),
26
}
27
28
29
search_space_3 = {
30
    "x1": list(np.arange(-1, 1, 0.1)),
31
}
32
search_space_4 = {
33
    "x1": list(np.arange(-1, 0, 0.1)),
34
}
35
search_space_5 = {
36
    "x1": list(np.arange(0, 1, 0.1)),
37
}
38
39
40
search_space_para_0 = [
41
    (search_space_0),
42
    (search_space_1),
43
    (search_space_2),
44
    (search_space_3),
45
    (search_space_4),
46
    (search_space_5),
47
]
48
49
50
@pytest.mark.parametrize("search_space", search_space_para_0)
51
def test_trafo_0(search_space):
52
    """Test search space transformations with various ranges."""
53
    hyper = Hyperactive()
54
    hyper.add_search(objective_function_0, search_space, n_iter=25)
55
    hyper.run()
56
57
    for value in hyper.search_data(objective_function_0)["x1"].values:
58
        if value not in search_space["x1"]:
59
            assert False
60
61
62
# ----------------- # Test if memory warm starts do work as intended
63
64
65
from sklearn.datasets import load_breast_cancer
66
from sklearn.model_selection import cross_val_score
67
from sklearn.tree import DecisionTreeClassifier
68
69
data = load_breast_cancer()
70
X, y = data.data, data.target
71
72
73
def objective_function_1(opt):
74
    """Decision tree objective function for testing with sklearn."""
75
    dtc = DecisionTreeClassifier(min_samples_split=opt["min_samples_split"])
76
    scores = cross_val_score(dtc, X, y, cv=10)
77
    time.sleep(0.1)
78
79
    return scores.mean()
80
81
82
search_space_0 = {
83
    "min_samples_split": list(np.arange(2, 12)),
84
}
85
86
search_space_1 = {
87
    "min_samples_split": list(np.arange(12, 22)),
88
}
89
90
search_space_2 = {
91
    "min_samples_split": list(np.arange(22, 32)),
92
}
93
94
memory_dict = {"min_samples_split": range(2, 12), "score": range(2, 12)}
95
memory_warm_start_0 = pd.DataFrame(memory_dict)
96
97
memory_dict = {"min_samples_split": range(12, 22), "score": range(12, 22)}
98
memory_warm_start_1 = pd.DataFrame(memory_dict)
99
100
memory_dict = {"min_samples_split": range(22, 32), "score": range(22, 32)}
101
memory_warm_start_2 = pd.DataFrame(memory_dict)
102
103
search_space_para_1 = [
104
    (search_space_0, memory_warm_start_0),
105
    (search_space_1, memory_warm_start_1),
106
    (search_space_2, memory_warm_start_2),
107
]
108
109
random_state_para_0 = [
110
    (0),
111
    (1),
112
    (2),
113
    (3),
114
    (4),
115
]
116
117
# ----------------- # Test if wrong memory warm starts do not work as intended
118
""" test is possible in future gfo versions
119
@pytest.mark.parametrize("random_state", random_state_para_0)
120
@pytest.mark.parametrize("search_space, memory_warm_start", search_space_para_1)
121
def test_trafo_1(random_state, search_space, memory_warm_start):
122
    search_space = search_space
123
    memory_warm_start = memory_warm_start
124
125
    c_time_0 = time.perf_counter()
126
    hyper = Hyperactive()
127
    hyper.add_search(
128
        objective_function_1,
129
        search_space,
130
        n_iter=10,
131
        random_state=random_state,
132
        initialize={"random": 1},
133
    )
134
    hyper.run()
135
    d_time_0 = time.perf_counter() - c_time_0
136
137
    c_time_1 = time.perf_counter()
138
    hyper = Hyperactive()
139
    hyper.add_search(
140
        objective_function_1,
141
        search_space,
142
        n_iter=10,
143
        random_state=random_state,
144
        initialize={"random": 1},
145
        memory_warm_start=memory_warm_start,
146
    )
147
    hyper.run()
148
    d_time_1 = time.perf_counter() - c_time_1
149
150
    assert d_time_1 < d_time_0 * 0.5
151
152
153
154
search_space_0 = {
155
    "min_samples_split": list(np.arange(2, 12)),
156
}
157
158
search_space_1 = {
159
    "min_samples_split": list(np.arange(12, 22)),
160
}
161
162
search_space_2 = {
163
    "min_samples_split": list(np.arange(22, 32)),
164
}
165
166
memory_dict = {"min_samples_split": range(12, 22), "score": range(2, 12)}
167
memory_warm_start_0 = pd.DataFrame(memory_dict)
168
169
memory_dict = {"min_samples_split": range(22, 32), "score": range(12, 22)}
170
memory_warm_start_1 = pd.DataFrame(memory_dict)
171
172
memory_dict = {"min_samples_split": range(2, 12), "score": range(22, 32)}
173
memory_warm_start_2 = pd.DataFrame(memory_dict)
174
175
search_space_para_2 = [
176
    (search_space_0, memory_warm_start_0),
177
    (search_space_1, memory_warm_start_1),
178
    (search_space_2, memory_warm_start_2),
179
]
180
181
random_state_para_0 = [
182
    (0),
183
    (1),
184
    (2),
185
    (3),
186
    (4),
187
]
188
189
190
@pytest.mark.parametrize("random_state", random_state_para_0)
191
@pytest.mark.parametrize("search_space, memory_warm_start", search_space_para_2)
192
def test_trafo_2(random_state, search_space, memory_warm_start):
193
    search_space = search_space
194
    memory_warm_start = memory_warm_start
195
196
    c_time_0 = time.perf_counter()
197
    hyper = Hyperactive()
198
    hyper.add_search(
199
        objective_function_1,
200
        search_space,
201
        n_iter=25,
202
        random_state=random_state,
203
        initialize={"random": 1},
204
    )
205
    hyper.run()
206
    d_time_0 = time.perf_counter() - c_time_0
207
208
    c_time_1 = time.perf_counter()
209
    hyper = Hyperactive()
210
    hyper.add_search(
211
        objective_function_1,
212
        search_space,
213
        n_iter=25,
214
        random_state=random_state,
215
        initialize={"random": 1},
216
        memory_warm_start=memory_warm_start,
217
    )
218
    hyper.run()
219
    d_time_1 = time.perf_counter() - c_time_1
220
221
    assert not (d_time_1 < d_time_0 * 0.8)
222
"""
223