Completed
Push — master ( aee0ed...be0089 )
by Simon
01:27
created

tests.test_optimizers   A

Complexity

Total Complexity 27

Size/Duplication

Total Lines 187
Duplicated Lines 16.04 %

Importance

Changes 0
Metric Value
eloc 125
dl 30
loc 187
rs 10
c 0
b 0
f 0
wmc 27

14 Functions

Rating   Name   Duplication   Size   Complexity  
A sphere_function() 0 8 3
A test_SimulatedAnnealingOptimizer() 10 10 2
A test_StochasticTunnelingOptimizer() 10 10 2
A test_ParticleSwarmOptimizer() 0 10 2
A test_RandomRestartHillClimbingOptimizer() 0 10 2
A test_RandomAnnealingOptimizer() 10 10 2
A test_HillClimbingOptimizer() 0 10 2
A test_ParallelTemperingOptimizer() 0 10 2
A test_StochasticHillClimbingOptimizer() 0 10 2
A test_RandomSearchOptimizer() 0 3 1
A test_TabuOptimizer() 0 10 2
A test_EvolutionStrategyOptimizer() 0 10 2
A test_BayesianOptimizer() 0 10 2
A test_TPE() 0 3 1

How to fix   Duplicated Code   

Duplicated Code

Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.

Common duplication problems, and corresponding solutions are:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import numpy as np
6
7
from sklearn.datasets import load_iris
8
from sklearn.model_selection import cross_val_score
9
from sklearn.tree import DecisionTreeClassifier
10
from hyperactive import Hyperactive
11
12
data = load_iris()
13
X = data.data
14
y = data.target
15
16
memory = False
17
18
n_iter = 100
19
20
21
def sphere_function(para, X_train, y_train):
22
    loss = []
23
    for key in para.keys():
24
        if key == "iteration":
25
            continue
26
        loss.append(para[key] * para[key])
27
28
    return -np.array(loss).sum()
29
30
31
search_config = {
32
    sphere_function: {"x1": np.arange(-10, 10, 0.1), "x2": np.arange(-10, 10, 0.1)}
33
}
34
35
36
def test_HillClimbingOptimizer():
37
    opt = Hyperactive(X, y, memory=memory)
38
    opt.search(search_config, n_iter=n_iter, optimizer="HillClimbing")
39
40
    for epsilon in [0.01, 0.1, 1]:
41
        opt = Hyperactive(X, y, memory=memory)
42
        opt.search(
43
            search_config,
44
            n_iter=n_iter,
45
            optimizer={"HillClimbing": {"epsilon": epsilon}},
46
        )
47
48
49
def test_StochasticHillClimbingOptimizer():
50
    opt = Hyperactive(X, y, memory=memory)
51
    opt.search(search_config, n_iter=n_iter, optimizer="StochasticHillClimbing")
52
53
    for p_down in [0.01, 0.1, 1]:
54
        opt = Hyperactive(X, y, memory=memory)
55
        opt.search(
56
            search_config,
57
            n_iter=n_iter,
58
            optimizer={"StochasticHillClimbing": {"p_down": p_down}},
59
        )
60
61
62
def test_TabuOptimizer():
63
    opt = Hyperactive(X, y, memory=memory)
64
    opt.search(search_config, n_iter=n_iter, optimizer="TabuSearch")
65
66
    for tabu_memory in [1, 3, 5]:
67
        opt = Hyperactive(X, y, memory=memory)
68
        opt.search(
69
            search_config,
70
            n_iter=n_iter,
71
            optimizer={"TabuSearch": {"tabu_memory": tabu_memory}},
72
        )
73
74
75
def test_RandomSearchOptimizer():
76
    opt = Hyperactive(X, y, memory=memory)
77
    opt.search(search_config, n_iter=n_iter, optimizer="RandomSearch")
78
79
80
def test_RandomRestartHillClimbingOptimizer():
81
    opt = Hyperactive(X, y, memory=memory)
82
    opt.search(search_config, n_iter=n_iter, optimizer="RandomRestartHillClimbing")
83
84
    for n_restarts in [3, 5, 20]:
85
        opt = Hyperactive(X, y, memory=memory)
86
        opt.search(
87
            search_config,
88
            n_iter=n_iter,
89
            optimizer={"RandomRestartHillClimbing": {"n_restarts": n_restarts}},
90
        )
91
92
93 View Code Duplication
def test_RandomAnnealingOptimizer():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
94
    opt = Hyperactive(X, y, memory=memory)
95
    opt.search(search_config, n_iter=n_iter, optimizer="RandomAnnealing")
96
97
    for start_temp in [0.1, 1, 10]:
98
        opt = Hyperactive(X, y, memory=memory)
99
        opt.search(
100
            search_config,
101
            n_iter=n_iter,
102
            optimizer={"RandomAnnealing": {"start_temp": start_temp}},
103
        )
104
105
106 View Code Duplication
def test_SimulatedAnnealingOptimizer():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
107
    opt = Hyperactive(X, y, memory=memory)
108
    opt.search(search_config, n_iter=n_iter, optimizer="SimulatedAnnealing")
109
110
    for start_temp in [0.1, 1, 10]:
111
        opt = Hyperactive(X, y, memory=memory)
112
        opt.search(
113
            search_config,
114
            n_iter=n_iter,
115
            optimizer={"SimulatedAnnealing": {"start_temp": start_temp}},
116
        )
117
118
119 View Code Duplication
def test_StochasticTunnelingOptimizer():
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
120
    opt = Hyperactive(X, y, memory=memory)
121
    opt.search(search_config, n_iter=n_iter, optimizer="StochasticTunneling")
122
123
    for start_temp in [0.1, 1, 10]:
124
        opt = Hyperactive(X, y, memory=memory)
125
        opt.search(
126
            search_config,
127
            n_iter=n_iter,
128
            optimizer={"StochasticTunneling": {"start_temp": start_temp}},
129
        )
130
131
132
def test_ParallelTemperingOptimizer():
133
    opt = Hyperactive(X, y, memory=memory)
134
    opt.search(search_config, n_iter=n_iter, optimizer="ParallelTempering")
135
136
    for n_swaps in [1, 10, 30]:
137
        opt = Hyperactive(X, y, memory=memory)
138
        opt.search(
139
            search_config,
140
            n_iter=n_iter,
141
            optimizer={"ParallelTempering": {"n_swaps": n_swaps}},
142
        )
143
144
145
def test_ParticleSwarmOptimizer():
146
    opt = Hyperactive(X, y, memory=memory)
147
    opt.search(search_config, n_iter=n_iter, optimizer="ParticleSwarm")
148
149
    for n_particles in [2, 10, 30]:
150
        opt = Hyperactive(X, y, memory=memory)
151
        opt.search(
152
            search_config,
153
            n_iter=n_iter,
154
            optimizer={"ParticleSwarm": {"n_particles": n_particles}},
155
        )
156
157
158
def test_EvolutionStrategyOptimizer():
159
    opt = Hyperactive(X, y, memory=memory)
160
    opt.search(search_config, n_iter=n_iter, optimizer="EvolutionStrategy")
161
162
    for individuals in [2, 10, 30]:
163
        opt = Hyperactive(X, y, memory=memory)
164
        opt.search(
165
            search_config,
166
            n_iter=n_iter,
167
            optimizer={"EvolutionStrategy": {"individuals": individuals}},
168
        )
169
170
171
def test_BayesianOptimizer():
172
    opt = Hyperactive(X, y, memory=memory)
173
    opt.search(search_config, n_iter=int(n_iter / 33), optimizer="Bayesian")
174
175
    for warm_start_smbo in [True]:
176
        opt = Hyperactive(X, y, memory="long")
177
        opt.search(
178
            search_config,
179
            n_iter=int(n_iter / 33),
180
            optimizer={"Bayesian": {"warm_start_smbo": warm_start_smbo}},
181
        )
182
183
184
def test_TPE():
185
    opt = Hyperactive(X, y, memory=memory)
186
    opt.search(search_config, n_iter=int(n_iter / 5), optimizer="TPE")
187