1
|
|
|
import pytest |
2
|
|
|
from tqdm import tqdm |
3
|
|
|
import numpy as np |
4
|
|
|
|
5
|
|
|
from ._parametrize import ( |
6
|
|
|
optimizers_singleOpt, |
7
|
|
|
optimizers_PopBased, |
8
|
|
|
optimizers_SBOM, |
9
|
|
|
) |
10
|
|
|
|
11
|
|
|
|
12
|
|
View Code Duplication |
@pytest.mark.parametrize(*optimizers_singleOpt) |
|
|
|
|
13
|
|
|
def test_convex_convergence_singleOpt(Optimizer): |
14
|
|
|
def objective_function(para): |
15
|
|
|
score = -para["x1"] * para["x1"] |
16
|
|
|
return score |
17
|
|
|
|
18
|
|
|
search_space = {"x1": np.arange(-100, 101, 1)} |
19
|
|
|
initialize = {"vertices": 1} |
20
|
|
|
|
21
|
|
|
n_opts = 33 |
22
|
|
|
|
23
|
|
|
scores = [] |
24
|
|
|
for rnd_st in tqdm(range(n_opts)): |
25
|
|
|
opt = Optimizer(search_space) |
26
|
|
|
opt.search( |
27
|
|
|
objective_function, |
28
|
|
|
n_iter=100, |
29
|
|
|
random_state=rnd_st, |
30
|
|
|
memory=False, |
31
|
|
|
verbosity=False, |
32
|
|
|
initialize=initialize, |
33
|
|
|
) |
34
|
|
|
|
35
|
|
|
scores.append(opt.best_score) |
36
|
|
|
score_mean = np.array(scores).mean() |
37
|
|
|
|
38
|
|
|
assert score_mean > -25 |
39
|
|
|
|
40
|
|
|
|
41
|
|
View Code Duplication |
@pytest.mark.parametrize(*optimizers_PopBased) |
|
|
|
|
42
|
|
|
def test_convex_convergence_popBased(Optimizer): |
43
|
|
|
def objective_function(para): |
44
|
|
|
score = -para["x1"] * para["x1"] |
45
|
|
|
return score |
46
|
|
|
|
47
|
|
|
search_space = {"x1": np.arange(-100, 101, 1)} |
48
|
|
|
initialize = {"vertices": 2, "grid": 2} |
49
|
|
|
|
50
|
|
|
n_opts = 33 |
51
|
|
|
|
52
|
|
|
scores = [] |
53
|
|
|
for rnd_st in tqdm(range(n_opts)): |
54
|
|
|
opt = Optimizer(search_space) |
55
|
|
|
opt.search( |
56
|
|
|
objective_function, |
57
|
|
|
n_iter=80, |
58
|
|
|
random_state=rnd_st, |
59
|
|
|
memory=False, |
60
|
|
|
verbosity=False, |
61
|
|
|
initialize=initialize, |
62
|
|
|
) |
63
|
|
|
|
64
|
|
|
scores.append(opt.best_score) |
65
|
|
|
score_mean = np.array(scores).mean() |
66
|
|
|
|
67
|
|
|
assert score_mean > -25 |
68
|
|
|
|
69
|
|
|
|
70
|
|
|
@pytest.mark.parametrize(*optimizers_SBOM) |
71
|
|
|
def test_convex_convergence_SBOM(Optimizer): |
72
|
|
|
def objective_function(para): |
73
|
|
|
score = -para["x1"] * para["x1"] |
74
|
|
|
return score |
75
|
|
|
|
76
|
|
|
search_space = {"x1": np.arange(-33, 33, 1)} |
77
|
|
|
initialize = {"vertices": 2} |
78
|
|
|
|
79
|
|
|
n_opts = 10 |
80
|
|
|
|
81
|
|
|
scores = [] |
82
|
|
|
for rnd_st in tqdm(range(n_opts)): |
83
|
|
|
opt = Optimizer(search_space) |
84
|
|
|
opt.search( |
85
|
|
|
objective_function, |
86
|
|
|
n_iter=30, |
87
|
|
|
random_state=rnd_st, |
88
|
|
|
memory=False, |
89
|
|
|
verbosity=False, |
90
|
|
|
initialize=initialize, |
91
|
|
|
) |
92
|
|
|
|
93
|
|
|
scores.append(opt.best_score) |
94
|
|
|
score_mean = np.array(scores).mean() |
95
|
|
|
|
96
|
|
|
print("scores", scores) |
97
|
|
|
|
98
|
|
|
print("score_mean", score_mean) |
99
|
|
|
assert score_mean > -25 |
100
|
|
|
# assert False |
101
|
|
|
|
102
|
|
|
|