test_constr_opt_2()   B
last analyzed

Complexity

Conditions 2

Size

Total Lines 71
Code Lines 49

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 49
dl 0
loc 71
rs 8.669
c 0
b 0
f 0
cc 2
nop 0

How to fix   Long Method   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
"""Test module for constraint optimization strategy."""
2
3
import numpy as np
4
5
from hyperactive import Hyperactive
6
from hyperactive.optimizers import HillClimbingOptimizer, RandomSearchOptimizer
7
from hyperactive.optimizers.strategies import CustomOptimizationStrategy
8
9
10
def test_constr_opt_0():
11
    """Test constrained optimization with single constraint."""
12
13
    def objective_function(para):
14
        score = -para["x1"] * para["x1"]
15
        return score
16
17
    search_space = {
18
        "x1": list(np.arange(-15, 15, 1)),
19
    }
20
21
    def constraint_1(para):
22
        print(" para", para)
23
24
        return para["x1"] > -5
25
26
    constraints_list = [constraint_1]
27
28
    optimizer1 = RandomSearchOptimizer()
29
    optimizer2 = HillClimbingOptimizer()
30
31
    opt_strat = CustomOptimizationStrategy()
32
    opt_strat.add_optimizer(optimizer1, duration=0.7)
33
    opt_strat.add_optimizer(optimizer2, duration=0.3)
34
35
    hyper = Hyperactive()
36
    hyper.add_search(
37
        objective_function,
38
        search_space,
39
        n_iter=50,
40
        constraints=constraints_list,
41
        optimizer=opt_strat,
42
    )
43
    hyper.run()
44
45
    search_data = hyper.search_data(objective_function)
46
    x0_values = search_data["x1"].values
47
48
    print("\n search_data \n", search_data, "\n")
49
50
    assert np.all(x0_values > -5)
51
52
53
def test_constr_opt_1():
54
    """Test constrained optimization with multi-dimensional search space."""
55
56
    def objective_function(para):
57
        score = -(para["x1"] * para["x1"] + para["x2"] * para["x2"])
58
        return score
59
60
    search_space = {
61
        "x1": list(np.arange(-10, 10, 1)),
62
        "x2": list(np.arange(-10, 10, 1)),
63
    }
64
65
    def constraint_1(para):
66
        return para["x1"] > -5
67
68
    constraints_list = [constraint_1]
69
70
    optimizer1 = RandomSearchOptimizer()
71
    optimizer2 = HillClimbingOptimizer()
72
73
    opt_strat = CustomOptimizationStrategy()
74
    opt_strat.add_optimizer(optimizer1, duration=0.7)
75
    opt_strat.add_optimizer(optimizer2, duration=0.3)
76
77
    hyper = Hyperactive()
78
    hyper.add_search(
79
        objective_function,
80
        search_space,
81
        n_iter=50,
82
        constraints=constraints_list,
83
        optimizer=opt_strat,
84
    )
85
    hyper.run()
86
87
    search_data = hyper.search_data(objective_function)
88
    x0_values = search_data["x1"].values
89
90
    print("\n search_data \n", search_data, "\n")
91
92
    assert np.all(x0_values > -5)
93
94
95
def test_constr_opt_2():
96
    """Test constrained optimization with multiple constraints."""
97
    n_iter = 50
98
99
    def objective_function(para):
100
        score = -para["x1"] * para["x1"]
101
        return score
102
103
    search_space = {
104
        "x1": list(np.arange(-10, 10, 0.1)),
105
    }
106
107
    def constraint_1(para):
108
        return para["x1"] > -5
109
110
    def constraint_2(para):
111
        return para["x1"] < 5
112
113
    constraints_list = [constraint_1, constraint_2]
114
115
    optimizer1 = RandomSearchOptimizer()
116
    optimizer2 = HillClimbingOptimizer()
117
118
    opt_strat = CustomOptimizationStrategy()
119
    opt_strat.add_optimizer(optimizer1, duration=0.7)
120
    opt_strat.add_optimizer(optimizer2, duration=0.3)
121
122
    hyper = Hyperactive()
123
    hyper.add_search(
124
        objective_function,
125
        search_space,
126
        n_iter=50,
127
        constraints=constraints_list,
128
        optimizer=opt_strat,
129
    )
130
    hyper.run()
131
132
    search_data = hyper.search_data(objective_function)
133
    x0_values = search_data["x1"].values
134
135
    print("\n search_data \n", search_data, "\n")
136
137
    assert np.all(x0_values > -5)
138
    assert np.all(x0_values < 5)
139
140
    n_current_positions = 0
141
    n_current_scores = 0
142
143
    n_best_positions = 0
144
    n_best_scores = 0
145
146
    for optimizer_setup in list(hyper.opt_pros.values())[0].optimizer_setup_l:
147
        optimizer = optimizer_setup["optimizer"].gfo_optimizer
148
        duration = optimizer_setup["duration"]
149
150
        duration_sum = 1
151
        n_iter_expected = round(n_iter * duration / duration_sum)
152
153
        n_current_positions = n_current_positions + len(optimizer.pos_current_list)
154
        n_current_scores = n_current_scores + len(optimizer.score_current_list)
155
156
        n_best_positions = n_best_positions + len(optimizer.pos_best_list)
157
        n_best_scores = n_best_scores + len(optimizer.score_best_list)
158
159
        print("\n  optimizer", optimizer)
160
        print("  n_new_positions", optimizer.pos_new_list, len(optimizer.pos_new_list))
161
        print("  n_new_scores", optimizer.score_new_list, len(optimizer.score_new_list))
162
        print("  n_iter_expected", n_iter_expected)
163
164
        assert len(optimizer.pos_new_list) == n_iter_expected
165
        assert len(optimizer.score_new_list) == n_iter_expected
166