Code Duplication    Length = 28-28 lines in 2 locations

tests/test_optimizers/test_convex_convergence.py 2 locations

@@ 38-65 (lines=28) @@
35
    assert -500 < score_mean
36
37
38
@pytest.mark.parametrize(*optimizers_SBOM)
39
def test_convex_convergence_SBOM(Optimizer):
40
    def objective_function(para):
41
        score = -para["x1"] * para["x1"]
42
        return score
43
44
    search_space = {"x1": np.arange(-33, 33, 1)}
45
    initialize = {"vertices": 2}
46
47
    n_opts = 10
48
49
    scores = []
50
    for rnd_st in tqdm(range(n_opts)):
51
        opt = Optimizer(search_space)
52
        opt.search(
53
            objective_function,
54
            n_iter=30,
55
            random_state=rnd_st,
56
            memory=False,
57
            verbosity={"print_results": False, "progress_bar": False},
58
            initialize=initialize,
59
        )
60
61
        scores.append(opt.best_score)
62
    score_mean = np.array(scores).mean()
63
    print("scores", scores)
64
65
    assert -500 < score_mean
66
67
@@ 8-35 (lines=28) @@
5
from ._parametrize import optimizers_noSBOM, optimizers_SBOM
6
7
8
@pytest.mark.parametrize(*optimizers_noSBOM)
9
def test_convex_convergence_noSBOM(Optimizer):
10
    def objective_function(para):
11
        score = -para["x1"] * para["x1"]
12
        return score
13
14
    search_space = {"x1": np.arange(-33, 33, 1)}
15
    initialize = {"vertices": 2}
16
17
    n_opts = 33
18
19
    scores = []
20
    for rnd_st in tqdm(range(n_opts)):
21
        opt = Optimizer(search_space)
22
        opt.search(
23
            objective_function,
24
            n_iter=50,
25
            random_state=rnd_st,
26
            memory=False,
27
            verbosity={"print_results": False, "progress_bar": False},
28
            initialize=initialize,
29
        )
30
31
        scores.append(opt.best_score)
32
    score_mean = np.array(scores).mean()
33
    print("scores", scores)
34
35
    assert -500 < score_mean
36
37
38
@pytest.mark.parametrize(*optimizers_SBOM)