Completed
Push — master ( ce1e03...f67568 )
by Simon
14:21
created

test_EvolutionStrategyOptimizer()   A

Complexity

Conditions 2

Size

Total Lines 10
Code Lines 9

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 2
eloc 9
nop 0
dl 0
loc 10
rs 9.95
c 0
b 0
f 0
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import numpy as np
6
7
from hyperactive import Hyperactive
8
9
X, y = np.array([0]), np.array([0])
10
memory = False
11
n_iter = 100
12
13
14
def sphere_function(para, X_train, y_train):
15
    loss = []
16
    for key in para.keys():
17
        if key == "iteration":
18
            continue
19
        loss.append(para[key] * para[key])
20
21
    return -np.array(loss).sum()
22
23
24
search_config = {
25
    sphere_function: {"x1": np.arange(-10, 10, 0.1), "x2": np.arange(-10, 10, 0.1)}
26
}
27
28
29
def test_HillClimbingOptimizer():
30
    opt = Hyperactive(X, y, memory=memory)
31
    opt.search(search_config, n_iter=n_iter, optimizer="HillClimbing")
32
33
    for epsilon in [0.01, 0.1, 1]:
34
        opt = Hyperactive(X, y, memory=memory)
35
        opt.search(
36
            search_config,
37
            n_iter=n_iter,
38
            optimizer={"HillClimbing": {"epsilon": epsilon}},
39
        )
40
41
42
def test_StochasticHillClimbingOptimizer():
43
    opt = Hyperactive(X, y, memory=memory)
44
    opt.search(search_config, n_iter=n_iter, optimizer="StochasticHillClimbing")
45
46
    for p_down in [0.01, 0.1, 1]:
47
        opt = Hyperactive(X, y, memory=memory)
48
        opt.search(
49
            search_config,
50
            n_iter=n_iter,
51
            optimizer={"StochasticHillClimbing": {"p_down": p_down}},
52
        )
53
54
55
def test_TabuOptimizer():
56
    opt = Hyperactive(X, y, memory=memory)
57
    opt.search(search_config, n_iter=n_iter, optimizer="TabuSearch")
58
59
    for tabu_memory in [1, 3, 5]:
60
        opt = Hyperactive(X, y, memory=memory)
61
        opt.search(
62
            search_config,
63
            n_iter=n_iter,
64
            optimizer={"TabuSearch": {"tabu_memory": tabu_memory}},
65
        )
66
67
68
def test_RandomSearchOptimizer():
69
    opt = Hyperactive(X, y, memory=memory)
70
    opt.search(search_config, n_iter=n_iter, optimizer="RandomSearch")
71
72
73
def test_RandomRestartHillClimbingOptimizer():
74
    opt = Hyperactive(X, y, memory=memory)
75
    opt.search(search_config, n_iter=n_iter, optimizer="RandomRestartHillClimbing")
76
77
    for n_restarts in [3, 5, 20]:
78
        opt = Hyperactive(X, y, memory=memory)
79
        opt.search(
80
            search_config,
81
            n_iter=n_iter,
82
            optimizer={"RandomRestartHillClimbing": {"n_restarts": n_restarts}},
83
        )
84
85
86 View Code Duplication
def test_RandomAnnealingOptimizer():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
87
    opt = Hyperactive(X, y, memory=memory)
88
    opt.search(search_config, n_iter=n_iter, optimizer="RandomAnnealing")
89
90
    for start_temp in [0.1, 1, 10]:
91
        opt = Hyperactive(X, y, memory=memory)
92
        opt.search(
93
            search_config,
94
            n_iter=n_iter,
95
            optimizer={"RandomAnnealing": {"start_temp": start_temp}},
96
        )
97
98
99 View Code Duplication
def test_SimulatedAnnealingOptimizer():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
100
    opt = Hyperactive(X, y, memory=memory)
101
    opt.search(search_config, n_iter=n_iter, optimizer="SimulatedAnnealing")
102
103
    for start_temp in [0.1, 1, 10]:
104
        opt = Hyperactive(X, y, memory=memory)
105
        opt.search(
106
            search_config,
107
            n_iter=n_iter,
108
            optimizer={"SimulatedAnnealing": {"start_temp": start_temp}},
109
        )
110
111
112 View Code Duplication
def test_StochasticTunnelingOptimizer():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
113
    opt = Hyperactive(X, y, memory=memory)
114
    opt.search(search_config, n_iter=n_iter, optimizer="StochasticTunneling")
115
116
    for start_temp in [0.1, 1, 10]:
117
        opt = Hyperactive(X, y, memory=memory)
118
        opt.search(
119
            search_config,
120
            n_iter=n_iter,
121
            optimizer={"StochasticTunneling": {"start_temp": start_temp}},
122
        )
123
124
125
def test_ParallelTemperingOptimizer():
126
    opt = Hyperactive(X, y, memory=memory)
127
    opt.search(search_config, n_iter=n_iter, optimizer="ParallelTempering")
128
129
    for n_swaps in [1, 10, 30]:
130
        opt = Hyperactive(X, y, memory=memory)
131
        opt.search(
132
            search_config,
133
            n_iter=n_iter,
134
            optimizer={"ParallelTempering": {"n_swaps": n_swaps}},
135
        )
136
137
138
def test_ParticleSwarmOptimizer():
139
    opt = Hyperactive(X, y, memory=memory)
140
    opt.search(search_config, n_iter=n_iter, optimizer="ParticleSwarm")
141
142
    for n_particles in [2, 10, 30]:
143
        opt = Hyperactive(X, y, memory=memory)
144
        opt.search(
145
            search_config,
146
            n_iter=n_iter,
147
            optimizer={"ParticleSwarm": {"n_particles": n_particles}},
148
        )
149
150
151
def test_EvolutionStrategyOptimizer():
152
    opt = Hyperactive(X, y, memory=memory)
153
    opt.search(search_config, n_iter=n_iter, optimizer="EvolutionStrategy")
154
155
    for individuals in [2, 10, 30]:
156
        opt = Hyperactive(X, y, memory=memory)
157
        opt.search(
158
            search_config,
159
            n_iter=n_iter,
160
            optimizer={"EvolutionStrategy": {"individuals": individuals}},
161
        )
162
163
164
def test_BayesianOptimizer():
165
    opt = Hyperactive(X, y, memory=memory)
166
    opt.search(search_config, n_iter=int(n_iter / 33), optimizer="Bayesian")
167
168
    for warm_start_smbo in [True]:
169
        opt = Hyperactive(X, y, memory="long")
170
        opt.search(
171
            search_config,
172
            n_iter=int(n_iter / 33),
173
            optimizer={"Bayesian": {"warm_start_smbo": warm_start_smbo}},
174
        )
175
176
177
def test_TPE():
178
    opt = Hyperactive(X, y, memory=memory)
179
    opt.search(search_config, n_iter=int(n_iter / 5), optimizer="TPE")
180
181
182
def test_DecisionTreeOptimizer():
183
    opt = Hyperactive(X, y, memory=memory)
184
    opt.search(search_config, n_iter=int(n_iter / 33), optimizer="DecisionTree")
185