Passed
Pull Request — master (#101)
by Simon
01:32
created

test_attributes_results_6()   A

Complexity

Conditions 1

Size

Total Lines 24
Code Lines 17

Duplication

Lines 24
Ratio 100 %

Importance

Changes 0
Metric Value
cc 1
eloc 17
nop 0
dl 24
loc 24
rs 9.55
c 0
b 0
f 0
1
import pytest
2
import numpy as np
3
import pandas as pd
4
5
from hyperactive.optimizers import HillClimbingOptimizer
6
from hyperactive.experiment import BaseExperiment
7
from hyperactive.search_config import SearchConfig
8
9
10
class Experiment(BaseExperiment):
11
    def objective_function(self, opt):
12
        score = -opt["x1"] * opt["x1"]
13
        return score
14
15
16
experiment = Experiment()
17
18
search_config = SearchConfig(
19
    x1=list(np.arange(0, 100, 1)),
20
)
21
22
23
def test_attributes_results_0():
24
    hyper = HillClimbingOptimizer()
25
    hyper.add_search(experiment, search_config, n_iter=100)
26
    hyper.run()
27
28
    assert isinstance(hyper.search_data(experiment), pd.DataFrame)
29
30
31
def test_attributes_results_1():
32
    hyper = HillClimbingOptimizer()
33
    hyper.add_search(experiment, search_config, n_iter=100)
34
    hyper.run()
35
36
    assert set(search_config.keys()) < set(hyper.search_data(experiment).columns)
37
38
39
def test_attributes_results_2():
40
    hyper = HillClimbingOptimizer()
41
    hyper.add_search(experiment, search_config, n_iter=100)
42
    hyper.run()
43
44
    assert "x1" in list(hyper.search_data(experiment).columns)
45
46
47
def test_attributes_results_3():
48
    hyper = HillClimbingOptimizer()
49
    hyper.add_search(experiment, search_config, n_iter=100)
50
    hyper.run()
51
52
    assert "score" in list(hyper.search_data(experiment).columns)
53
54
55
def test_attributes_results_4():
56
    hyper = HillClimbingOptimizer()
57
    hyper.add_search(
58
        experiment,
59
        search_config,
60
        n_iter=1,
61
        initialize={"warm_start": [{"x1": 0}]},
62
    )
63
    hyper.run()
64
65
    assert 0 in list(hyper.search_data(experiment)["x1"].values)
66
67
68
def test_attributes_results_5():
69
    hyper = HillClimbingOptimizer()
70
    hyper.add_search(
71
        experiment,
72
        search_config,
73
        n_iter=1,
74
        initialize={"warm_start": [{"x1": 10}]},
75
    )
76
    hyper.run()
77
78
    print(
79
        "\n x1_results \n",
80
        list(hyper.search_data(experiment)["x1"].values),
81
    )
82
83
    assert 10 in list(hyper.search_data(experiment)["x1"].values)
84
85
86 View Code Duplication
def test_attributes_results_6():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
87
    def objective_function(opt):
88
        score = -opt["x1"] * opt["x1"]
89
        return score
90
91
    search_space = {
92
        "x1": list(np.arange(0, 10, 1)),
93
    }
94
95
    hyper = HillClimbingOptimizer()
96
    hyper.add_search(
97
        experiment,
98
        search_config,
99
        n_iter=20,
100
        initialize={"random": 1},
101
        memory=False,
102
    )
103
    hyper.run()
104
105
    x1_results = list(hyper.search_data(experiment)["x1"].values)
106
107
    print("\n x1_results \n", x1_results)
108
109
    assert len(set(x1_results)) < len(x1_results)
110
111
112 View Code Duplication
def test_attributes_results_7():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
113
    def objective_function(opt):
114
        score = -opt["x1"] * opt["x1"]
115
        return score
116
117
    search_space = {
118
        "x1": list(np.arange(0, 10, 1)),
119
    }
120
121
    hyper = HillClimbingOptimizer()
122
    hyper.add_search(
123
        experiment,
124
        search_config,
125
        n_iter=20,
126
    )
127
    hyper.run()
128
129
    search_data = hyper.search_data(experiment)
130
    with pytest.raises(Exception) as e_info:
131
        search_data["eval_times"]
132
133
134 View Code Duplication
def test_attributes_results_8():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
135
    def objective_function(opt):
136
        score = -opt["x1"] * opt["x1"]
137
        return score
138
139
    search_space = {
140
        "x1": list(np.arange(0, 10, 1)),
141
    }
142
143
    hyper = HillClimbingOptimizer()
144
    hyper.add_search(
145
        experiment,
146
        search_config,
147
        n_iter=20,
148
    )
149
    hyper.run()
150
151
    search_data = hyper.search_data(experiment)
152
    with pytest.raises(Exception) as e_info:
153
        search_data["iter_times"]
154
155
156
def test_attributes_results_9():
157
    def objective_function(opt):
158
        score = -opt["x1"] * opt["x1"]
159
        return score
160
161
    search_space = {
162
        "x1": list(np.arange(0, 10, 1)),
163
    }
164
165
    hyper = HillClimbingOptimizer()
166
    hyper.add_search(
167
        experiment,
168
        search_config,
169
        n_iter=20,
170
    )
171
    hyper.run()
172
173
    search_data = hyper.search_data(experiment, times=True)
174
    search_data["iter_times"]
175
    search_data["eval_times"]
176
177
178
"""
179
def test_attributes_results_7():
180
    def objective_function(para):
181
        score = -para["x1"] * para["x1"]
182
        return score
183
184
    search_space = {
185
        "x1": np.arange(0, 10, 1),
186
    }
187
188
    opt = RandomSearchOptimizer(search_space)
189
    opt.search(
190
        experiment, n_iter=20, initialize={"random": 1}, memory=True
191
    )
192
193
    x1_results = list(opt.results["x1"].values)
194
195
    print("\n x1_results \n", x1_results)
196
197
    assert len(set(x1_results)) == len(x1_results)
198
199
200
def test_attributes_results_8():
201
    def objective_function(para):
202
        score = -para["x1"] * para["x1"]
203
        return score
204
205
    search_space = {
206
        "x1": np.arange(-10, 11, 1),
207
    }
208
209
    results = pd.DataFrame(np.arange(-10, 10, 1), columns=["x1"])
210
    results["score"] = 0
211
212
    opt = RandomSearchOptimizer(search_space)
213
    opt.search(
214
        experiment,
215
        n_iter=100,
216
        initialize={},
217
        memory=True,
218
        memory_warm_start=results,
219
    )
220
221
    print("\n opt.results \n", opt.results)
222
223
    x1_results = list(opt.results["x1"].values)
224
225
    assert 10 == x1_results[0]
226
"""
227