| @@ 24-51 (lines=28) @@ | ||
| 21 | ) |
|
| 22 | ||
| 23 | ||
| 24 | @pytest.mark.parametrize(*opt_local_l) |
|
| 25 | def test_local_perf(Optimizer): |
|
| 26 | def objective_function(para): |
|
| 27 | score = -para["x1"] * para["x1"] |
|
| 28 | return score |
|
| 29 | ||
| 30 | search_space = {"x1": np.arange(-100, 101, 1)} |
|
| 31 | initialize = {"vertices": 2} |
|
| 32 | ||
| 33 | n_opts = 33 |
|
| 34 | n_iter = 100 |
|
| 35 | ||
| 36 | scores = [] |
|
| 37 | for rnd_st in tqdm(range(n_opts)): |
|
| 38 | opt = Optimizer(search_space, initialize=initialize, random_state=rnd_st) |
|
| 39 | opt.search( |
|
| 40 | objective_function, |
|
| 41 | n_iter=n_iter, |
|
| 42 | memory=False, |
|
| 43 | verbosity=False, |
|
| 44 | ) |
|
| 45 | ||
| 46 | scores.append(opt.best_score) |
|
| 47 | score_mean = np.array(scores).mean() |
|
| 48 | ||
| 49 | print("\n score_mean", score_mean) |
|
| 50 | ||
| 51 | assert score_mean > -5 |
|
| 52 | ||
| @@ 24-53 (lines=30) @@ | ||
| 21 | ) |
|
| 22 | ||
| 23 | ||
| 24 | @pytest.mark.parametrize(*opt_global_l) |
|
| 25 | def test_global_perf(Optimizer): |
|
| 26 | ackley_function = RastriginFunction(n_dim=1, metric="score") |
|
| 27 | ||
| 28 | def objective_function(para): |
|
| 29 | score = -para["x1"] * para["x1"] |
|
| 30 | return score |
|
| 31 | ||
| 32 | search_space = {"x1": np.arange(-100, 101, 1)} |
|
| 33 | initialize = {"vertices": 2} |
|
| 34 | ||
| 35 | n_opts = 33 |
|
| 36 | n_iter = 100 |
|
| 37 | ||
| 38 | scores = [] |
|
| 39 | for rnd_st in tqdm(range(n_opts)): |
|
| 40 | opt = Optimizer(search_space, initialize=initialize, random_state=rnd_st) |
|
| 41 | opt.search( |
|
| 42 | objective_function, |
|
| 43 | n_iter=n_iter, |
|
| 44 | memory=False, |
|
| 45 | verbosity=False, |
|
| 46 | ) |
|
| 47 | ||
| 48 | scores.append(opt.best_score) |
|
| 49 | score_mean = np.array(scores).mean() |
|
| 50 | ||
| 51 | print("\n score_mean", score_mean) |
|
| 52 | ||
| 53 | assert score_mean > -5 |
|
| 54 | ||