Passed
Push — master ( 23cc6a...a392e7 )
by Simon
04:42 queued 12s
created

tests.test_optimization_strategies.test_early_stopping   A

Complexity

Total Complexity 1

Size/Duplication

Total Lines 56
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 38
dl 0
loc 56
rs 10
c 0
b 0
f 0
wmc 1

1 Function

Rating   Name   Duplication   Size   Complexity  
A test_strategy_early_stopping_0() 0 44 1
1
import pytest
2
import numpy as np
3
4
5
from hyperactive import Hyperactive
6
from hyperactive.optimizers.strategies import CustomOptimizationStrategy
7
from hyperactive.optimizers import RandomSearchOptimizer
8
9
from ._parametrize import optimizers
10
11
12
@pytest.mark.parametrize(*optimizers)
13
def test_strategy_early_stopping_0(Optimizer):
14
    def objective_function(para):
15
        score = -para["x1"] * para["x1"]
16
        return score
17
18
    search_space = {
19
        "x1": list(np.arange(0, 100, 0.1)),
20
    }
21
22
    n_iter_no_change = 5
23
    early_stopping = {
24
        "n_iter_no_change": n_iter_no_change,
25
    }
26
27
    optimizer1 = Optimizer()
28
    optimizer2 = RandomSearchOptimizer()
29
30
    opt_strat = CustomOptimizationStrategy()
31
    opt_strat.add_optimizer(optimizer1, duration=0.5, early_stopping=early_stopping)
32
    opt_strat.add_optimizer(optimizer2, duration=0.5)
33
34
    n_iter = 30
35
36
    hyper = Hyperactive()
37
    hyper.add_search(
38
        objective_function,
39
        search_space,
40
        optimizer=opt_strat,
41
        n_iter=n_iter,
42
        initialize={"warm_start": [{"x1": 0}]},
43
    )
44
    hyper.run()
45
46
    optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
47
    optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
48
49
    search_data = optimizer1.search_data
50
    n_performed_iter = len(search_data)
51
52
    print("\n n_performed_iter \n", n_performed_iter)
53
    print("\n n_iter_no_change \n", n_iter_no_change)
54
55
    assert n_performed_iter == (n_iter_no_change + 1)
56