Code Duplication    Length = 29-34 lines in 2 locations

tests/test_optimizers/test_random_restart.py 1 location

@@ 7-40 (lines=34) @@
4
from ._parametrize import optimizers_local
5
6
7
@pytest.mark.parametrize(*optimizers_local)
8
def test_convex_convergence_singleOpt(Optimizer):
9
    def objective_function(para):
10
        score = -(para["x1"] * para["x1"])
11
        return score
12
13
    search_space = {
14
        "x1": np.arange(-100, 101, 1),
15
    }
16
17
    init1 = {
18
        "x1": -1000,
19
    }
20
    initialize = {"warm_start": [init1]}
21
22
    n_opts = 33
23
24
    scores = []
25
    for rnd_st in range(n_opts):
26
        opt = Optimizer(search_space, initialize=initialize, rand_rest_p=1)
27
        opt.search(
28
            objective_function,
29
            n_iter=20,
30
            random_state=rnd_st,
31
            memory=False,
32
            verbosity=False,
33
        )
34
35
        scores.append(opt.best_score)
36
    score_mean = np.array(scores).mean()
37
38
    print("score_mean", score_mean)
39
40
    assert score_mean > -400
41
42

tests/test_performance/test_local_opt.py 1 location

@@ 24-52 (lines=29) @@
21
)
22
23
24
@pytest.mark.parametrize(*opt_local_l)
25
def test_local_perf(Optimizer):
26
    def objective_function(para):
27
        score = -para["x1"] * para["x1"]
28
        return score
29
30
    search_space = {"x1": np.arange(-100, 101, 1)}
31
    initialize = {"vertices": 2}
32
33
    n_opts = 33
34
    n_iter = 100
35
36
    scores = []
37
    for rnd_st in tqdm(range(n_opts)):
38
        opt = Optimizer(search_space, initialize=initialize)
39
        opt.search(
40
            objective_function,
41
            n_iter=n_iter,
42
            random_state=rnd_st,
43
            memory=False,
44
            verbosity=False,
45
        )
46
47
        scores.append(opt.best_score)
48
    score_mean = np.array(scores).mean()
49
50
    print("\n score_mean", score_mean)
51
52
    assert score_mean > -5
53