Passed
Push — master ( 7ebf7e...f9b33d )
by Simon
01:42
created

objective_function()   A

Complexity

Conditions 1

Size

Total Lines 3
Code Lines 3

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 3
nop 1
dl 0
loc 3
rs 10
c 0
b 0
f 0
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import pytest
6
import numpy as np
7
8
from gradient_free_optimizers import (
9
    HillClimbingOptimizer,
10
    StochasticHillClimbingOptimizer,
11
    TabuOptimizer,
12
    RandomRestartHillClimbingOptimizer,
13
    RandomAnnealingOptimizer,
14
    SimulatedAnnealingOptimizer,
15
)
16
17
18
def objective_function(para):
19
    score = -para["x1"] * para["x1"]
20
    return score
21
22
23
search_space = {"x1": np.arange(-100, 101, 1)}
24
25
26
HillClimbing_para = [
27
    ({"epsilon": 0.0001}),
28
    ({"epsilon": 1}),
29
    ({"epsilon": 10}),
30
    ({"epsilon": 10000}),
31
    ({"distribution": "normal"}),
32
    ({"distribution": "laplace"}),
33
    ({"distribution": "logistic"}),
34
    ({"distribution": "gumbel"}),
35
    ({"n_neighbours": 1}),
36
    ({"n_neighbours": 10}),
37
    ({"n_neighbours": 100}),
38
    ({"rand_rest_p": 0}),
39
    ({"rand_rest_p": 0.5}),
40
    ({"rand_rest_p": 1}),
41
    ({"rand_rest_p": 10}),
42
]
43
44
45
pytest_wrapper = ("para", HillClimbing_para)
46
47
optimizers_local = (
48
    "Optimizer",
49
    [
50
        (HillClimbingOptimizer),
51
        (StochasticHillClimbingOptimizer),
52
        (TabuOptimizer),
53
        (SimulatedAnnealingOptimizer),
54
        (RandomRestartHillClimbingOptimizer),
55
        (RandomAnnealingOptimizer),
56
    ],
57
)
58
59
60
@pytest.mark.parametrize(*optimizers_local)
61
@pytest.mark.parametrize(*pytest_wrapper)
62
def test_HillClimbing_para(Optimizer, para):
63
    opt = Optimizer(search_space, **para)
64
    opt.search(
65
        objective_function,
66
        n_iter=10,
67
        memory=False,
68
        verbosity=False,
69
        initialize={"vertices": 1},
70
    )
71
72
    for optimizer in opt.optimizers:
73
        para_key = list(para.keys())[0]
74
        para_value = getattr(optimizer, para_key)
75
76
        assert para_value == para[para_key]
77