1
|
|
|
import pytest |
2
|
|
|
from tqdm import tqdm |
3
|
|
|
import numpy as np |
4
|
|
|
import pandas as pd |
5
|
|
|
from functools import reduce |
6
|
|
|
|
7
|
|
|
from gradient_free_optimizers import GridSearchOptimizer |
8
|
|
|
|
9
|
|
|
from surfaces.test_functions import SphereFunction, RastriginFunction |
10
|
|
|
|
11
|
|
|
|
12
|
|
|
obj_func_l = ( |
13
|
|
|
"objective_function", |
14
|
|
|
[ |
15
|
|
|
(SphereFunction(n_dim=1, metric="score")), |
16
|
|
|
(RastriginFunction(n_dim=1, metric="score")), |
17
|
|
|
], |
18
|
|
|
) |
19
|
|
|
|
20
|
|
|
|
21
|
|
|
@pytest.mark.parametrize(*obj_func_l) |
22
|
|
|
def test_global_perf_0(objective_function): |
23
|
|
|
search_space = {"x0": np.arange(-10, 10, 0.1)} |
24
|
|
|
initialize = {"vertices": 2} |
25
|
|
|
|
26
|
|
|
print( |
27
|
|
|
"\n np.array(search_space.values()) \n", |
28
|
|
|
np.array(search_space.values()), |
29
|
|
|
np.array(search_space.values()).shape, |
30
|
|
|
) |
31
|
|
|
|
32
|
|
|
dim_sizes_list = [len(array) for array in search_space.values()] |
33
|
|
|
ss_size = reduce((lambda x, y: x * y), dim_sizes_list) |
34
|
|
|
|
35
|
|
|
n_opts = 10 |
36
|
|
|
n_iter = ss_size |
37
|
|
|
|
38
|
|
|
scores = [] |
39
|
|
|
for rnd_st in tqdm(range(n_opts)): |
40
|
|
|
opt = GridSearchOptimizer( |
41
|
|
|
search_space, initialize=initialize, random_state=rnd_st |
42
|
|
|
) |
43
|
|
|
opt.search( |
44
|
|
|
objective_function, |
45
|
|
|
n_iter=n_iter, |
46
|
|
|
memory=False, |
47
|
|
|
verbosity=False, |
48
|
|
|
) |
49
|
|
|
|
50
|
|
|
scores.append(opt.best_score) |
51
|
|
|
score_mean = np.array(scores).mean() |
52
|
|
|
|
53
|
|
|
print("\n score_mean", score_mean) |
54
|
|
|
print("\n n_iter", n_iter) |
55
|
|
|
|
56
|
|
|
assert score_mean > -0.001 |
57
|
|
|
|
58
|
|
|
|
59
|
|
|
obj_func_l = ( |
60
|
|
|
"objective_function", |
61
|
|
|
[ |
62
|
|
|
(SphereFunction(n_dim=2, metric="score")), |
63
|
|
|
(RastriginFunction(n_dim=2, metric="score")), |
64
|
|
|
], |
65
|
|
|
) |
66
|
|
|
|
67
|
|
|
|
68
|
|
|
@pytest.mark.parametrize(*obj_func_l) |
69
|
|
|
def test_global_perf_1(objective_function): |
70
|
|
|
search_space = { |
71
|
|
|
"x0": np.arange(-2, 1, 0.1), |
72
|
|
|
"x1": np.arange(-1, 2, 0.1), |
73
|
|
|
} |
74
|
|
|
initialize = {"vertices": 2} |
75
|
|
|
|
76
|
|
|
dim_sizes_list = [len(array) for array in search_space.values()] |
77
|
|
|
ss_size = reduce((lambda x, y: x * y), dim_sizes_list) |
78
|
|
|
|
79
|
|
|
n_opts = 10 |
80
|
|
|
n_iter = ss_size |
81
|
|
|
|
82
|
|
|
scores = [] |
83
|
|
|
for rnd_st in tqdm(range(n_opts)): |
84
|
|
|
opt = GridSearchOptimizer( |
85
|
|
|
search_space, initialize=initialize, random_state=rnd_st |
86
|
|
|
) |
87
|
|
|
opt.search( |
88
|
|
|
objective_function, |
89
|
|
|
n_iter=n_iter, |
90
|
|
|
memory=False, |
91
|
|
|
verbosity=False, |
92
|
|
|
) |
93
|
|
|
|
94
|
|
|
scores.append(opt.best_score) |
95
|
|
|
score_mean = np.array(scores).mean() |
96
|
|
|
|
97
|
|
|
print("\n score_mean", score_mean) |
98
|
|
|
|
99
|
|
|
assert score_mean > -0.001 |
100
|
|
|
|