Conditions | 2 |
Total Lines | 70 |
Code Lines | 49 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
1 | import numpy as np |
||
89 | def test_constr_opt_2(): |
||
90 | n_iter = 50 |
||
91 | |||
92 | def objective_function(para): |
||
93 | score = -para["x1"] * para["x1"] |
||
94 | return score |
||
95 | |||
96 | search_space = { |
||
97 | "x1": list(np.arange(-10, 10, 0.1)), |
||
98 | } |
||
99 | |||
100 | def constraint_1(para): |
||
101 | return para["x1"] > -5 |
||
102 | |||
103 | def constraint_2(para): |
||
104 | return para["x1"] < 5 |
||
105 | |||
106 | constraints_list = [constraint_1, constraint_2] |
||
107 | |||
108 | optimizer1 = RandomSearchOptimizer() |
||
109 | optimizer2 = HillClimbingOptimizer() |
||
110 | |||
111 | opt_strat = CustomOptimizationStrategy() |
||
112 | opt_strat.add_optimizer(optimizer1, duration=0.7) |
||
113 | opt_strat.add_optimizer(optimizer2, duration=0.3) |
||
114 | |||
115 | hyper = Hyperactive() |
||
116 | hyper.add_search( |
||
117 | objective_function, |
||
118 | search_space, |
||
119 | n_iter=50, |
||
120 | constraints=constraints_list, |
||
121 | optimizer=opt_strat, |
||
122 | ) |
||
123 | hyper.run() |
||
124 | |||
125 | search_data = hyper.search_data(objective_function) |
||
126 | x0_values = search_data["x1"].values |
||
127 | |||
128 | print("\n search_data \n", search_data, "\n") |
||
129 | |||
130 | assert np.all(x0_values > -5) |
||
131 | assert np.all(x0_values < 5) |
||
132 | |||
133 | n_current_positions = 0 |
||
134 | n_current_scores = 0 |
||
135 | |||
136 | n_best_positions = 0 |
||
137 | n_best_scores = 0 |
||
138 | |||
139 | for optimizer_setup in list(hyper.opt_pros.values())[0].optimizer_setup_l: |
||
140 | optimizer = optimizer_setup["optimizer"].gfo_optimizer |
||
141 | duration = optimizer_setup["duration"] |
||
142 | |||
143 | duration_sum = 1 |
||
144 | n_iter_expected = round(n_iter * duration / duration_sum) |
||
145 | |||
146 | n_current_positions = n_current_positions + len(optimizer.pos_current_list) |
||
147 | n_current_scores = n_current_scores + len(optimizer.score_current_list) |
||
148 | |||
149 | n_best_positions = n_best_positions + len(optimizer.pos_best_list) |
||
150 | n_best_scores = n_best_scores + len(optimizer.score_best_list) |
||
151 | |||
152 | print("\n optimizer", optimizer) |
||
153 | print(" n_new_positions", optimizer.pos_new_list, len(optimizer.pos_new_list)) |
||
154 | print(" n_new_scores", optimizer.score_new_list, len(optimizer.score_new_list)) |
||
155 | print(" n_iter_expected", n_iter_expected) |
||
156 | |||
157 | assert len(optimizer.pos_new_list) == n_iter_expected |
||
158 | assert len(optimizer.score_new_list) == n_iter_expected |
||
159 |