Conditions | 2 |
Total Lines | 71 |
Code Lines | 49 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
1 | """Test module for constraint optimization strategy.""" |
||
95 | def test_constr_opt_2(): |
||
96 | """Test constrained optimization with multiple constraints.""" |
||
97 | n_iter = 50 |
||
98 | |||
99 | def objective_function(para): |
||
100 | score = -para["x1"] * para["x1"] |
||
101 | return score |
||
102 | |||
103 | search_space = { |
||
104 | "x1": list(np.arange(-10, 10, 0.1)), |
||
105 | } |
||
106 | |||
107 | def constraint_1(para): |
||
108 | return para["x1"] > -5 |
||
109 | |||
110 | def constraint_2(para): |
||
111 | return para["x1"] < 5 |
||
112 | |||
113 | constraints_list = [constraint_1, constraint_2] |
||
114 | |||
115 | optimizer1 = RandomSearchOptimizer() |
||
116 | optimizer2 = HillClimbingOptimizer() |
||
117 | |||
118 | opt_strat = CustomOptimizationStrategy() |
||
119 | opt_strat.add_optimizer(optimizer1, duration=0.7) |
||
120 | opt_strat.add_optimizer(optimizer2, duration=0.3) |
||
121 | |||
122 | hyper = Hyperactive() |
||
123 | hyper.add_search( |
||
124 | objective_function, |
||
125 | search_space, |
||
126 | n_iter=50, |
||
127 | constraints=constraints_list, |
||
128 | optimizer=opt_strat, |
||
129 | ) |
||
130 | hyper.run() |
||
131 | |||
132 | search_data = hyper.search_data(objective_function) |
||
133 | x0_values = search_data["x1"].values |
||
134 | |||
135 | print("\n search_data \n", search_data, "\n") |
||
136 | |||
137 | assert np.all(x0_values > -5) |
||
138 | assert np.all(x0_values < 5) |
||
139 | |||
140 | n_current_positions = 0 |
||
141 | n_current_scores = 0 |
||
142 | |||
143 | n_best_positions = 0 |
||
144 | n_best_scores = 0 |
||
145 | |||
146 | for optimizer_setup in list(hyper.opt_pros.values())[0].optimizer_setup_l: |
||
147 | optimizer = optimizer_setup["optimizer"].gfo_optimizer |
||
148 | duration = optimizer_setup["duration"] |
||
149 | |||
150 | duration_sum = 1 |
||
151 | n_iter_expected = round(n_iter * duration / duration_sum) |
||
152 | |||
153 | n_current_positions = n_current_positions + len(optimizer.pos_current_list) |
||
154 | n_current_scores = n_current_scores + len(optimizer.score_current_list) |
||
155 | |||
156 | n_best_positions = n_best_positions + len(optimizer.pos_best_list) |
||
157 | n_best_scores = n_best_scores + len(optimizer.score_best_list) |
||
158 | |||
159 | print("\n optimizer", optimizer) |
||
160 | print(" n_new_positions", optimizer.pos_new_list, len(optimizer.pos_new_list)) |
||
161 | print(" n_new_scores", optimizer.score_new_list, len(optimizer.score_new_list)) |
||
162 | print(" n_iter_expected", n_iter_expected) |
||
163 | |||
164 | assert len(optimizer.pos_new_list) == n_iter_expected |
||
165 | assert len(optimizer.score_new_list) == n_iter_expected |
||
166 |