Passed
Pull Request — master (#110)
by
unknown
01:35
created

tests.test_api.test_max_score   A

Complexity

Total Complexity 2

Size/Duplication

Total Lines 78
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 54
dl 0
loc 78
rs 10
c 0
b 0
f 0
wmc 2

2 Functions

Rating   Name   Duplication   Size   Complexity  
A test_max_score_0() 0 29 1
A test_max_score_1() 0 30 1
1
import time
2
import numpy as np
3
from sklearn.datasets import load_breast_cancer
4
from sklearn.model_selection import cross_val_score
5
from sklearn.tree import DecisionTreeClassifier
6
7
from hyperactive.optimizers import HillClimbingOptimizer, RandomSearchOptimizer
8
from hyperactive.experiment import BaseExperiment
9
from hyperactive.search_config import SearchConfig
10
11
12
search_config = SearchConfig(
13
    x1=list(np.arange(0, 100, 0.1)),
14
)
15
16
17
def test_max_score_0():
18
    class Experiment(BaseExperiment):
19
        def objective_function(self, para):
20
            score = -para["x1"] * para["x1"]
21
            return score
22
23
    experiment = Experiment()
24
25
    max_score = -9999
26
27
    hyper = HillClimbingOptimizer(
28
        epsilon=0.01,
29
        rand_rest_p=0,
30
    )
31
    hyper.add_search(
32
        experiment,
33
        search_config,
34
        n_iter=100000,
35
        initialize={"warm_start": [{"x1": 99}]},
36
        max_score=max_score,
37
    )
38
    hyper.run()
39
40
    print("\n Results head \n", hyper.search_data(experiment).head())
41
    print("\n Results tail \n", hyper.search_data(experiment).tail())
42
43
    print("\nN iter:", len(hyper.search_data(experiment)))
44
45
    assert -100 > hyper.best_score(experiment) > max_score
46
47
48
def test_max_score_1():
49
50
    class Experiment(BaseExperiment):
51
        def objective_function(self, para):
52
            score = -para["x1"] * para["x1"]
53
            time.sleep(0.01)
54
            return score
55
56
    experiment = Experiment()
57
58
    max_score = -9999
59
60
    c_time = time.perf_counter()
61
    hyper = RandomSearchOptimizer()
62
    hyper.add_search(
63
        experiment,
64
        search_config,
65
        n_iter=100000,
66
        initialize={"warm_start": [{"x1": 99}]},
67
        max_score=max_score,
68
    )
69
    hyper.run()
70
    diff_time = time.perf_counter() - c_time
71
72
    print("\n Results head \n", hyper.search_data(experiment).head())
73
    print("\n Results tail \n", hyper.search_data(experiment).tail())
74
75
    print("\nN iter:", len(hyper.search_data(experiment)))
76
77
    assert diff_time < 1
78