| Conditions | 2 |
| Total Lines | 28 |
| Code Lines | 22 |
| Lines | 28 |
| Ratio | 100 % |
| Changes | 0 | ||
| 1 | import pytest |
||
| 8 | View Code Duplication | @pytest.mark.parametrize(*optimizers_noSBOM) |
|
|
|
|||
| 9 | def test_convex_convergence_noSBOM(Optimizer): |
||
| 10 | def objective_function(para): |
||
| 11 | score = -para["x1"] * para["x1"] |
||
| 12 | return score |
||
| 13 | |||
| 14 | search_space = {"x1": np.arange(-33, 33, 1)} |
||
| 15 | initialize = {"vertices": 2} |
||
| 16 | |||
| 17 | n_opts = 33 |
||
| 18 | |||
| 19 | scores = [] |
||
| 20 | for rnd_st in tqdm(range(n_opts)): |
||
| 21 | opt = Optimizer(search_space) |
||
| 22 | opt.search( |
||
| 23 | objective_function, |
||
| 24 | n_iter=50, |
||
| 25 | random_state=rnd_st, |
||
| 26 | memory=False, |
||
| 27 | verbosity={"print_results": False, "progress_bar": False}, |
||
| 28 | initialize=initialize, |
||
| 29 | ) |
||
| 30 | |||
| 31 | scores.append(opt.best_score) |
||
| 32 | score_mean = np.array(scores).mean() |
||
| 33 | print("scores", scores) |
||
| 34 | |||
| 35 | assert -500 < score_mean |
||
| 36 | |||
| 67 |