Conditions | 2 |
Total Lines | 72 |
Code Lines | 48 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
1 | """Test module for constraint optimization functionality.""" |
||
77 | def test_constr_opt_2(): |
||
78 | """Test constraint optimization with multiple constraints.""" |
||
79 | n_iter = 50 |
||
80 | |||
81 | def objective_function(para): |
||
82 | score = -para["x1"] * para["x1"] |
||
83 | return score |
||
84 | |||
85 | search_space = { |
||
86 | "x1": list(np.arange(-10, 10, 0.1)), |
||
87 | } |
||
88 | |||
89 | def constraint_1(para): |
||
90 | return para["x1"] > -5 |
||
91 | |||
92 | def constraint_2(para): |
||
93 | return para["x1"] < 5 |
||
94 | |||
95 | constraints_list = [constraint_1, constraint_2] |
||
96 | |||
97 | hyper = Hyperactive() |
||
98 | hyper.add_search( |
||
99 | objective_function, |
||
100 | search_space, |
||
101 | n_iter=50, |
||
102 | constraints=constraints_list, |
||
103 | ) |
||
104 | hyper.run() |
||
105 | |||
106 | search_data = hyper.search_data(objective_function) |
||
107 | x0_values = search_data["x1"].values |
||
108 | |||
109 | print("\n search_data \n", search_data, "\n") |
||
110 | |||
111 | assert np.all(x0_values > -5) |
||
112 | assert np.all(x0_values < 5) |
||
113 | |||
114 | n_new_positions = 0 |
||
115 | n_new_scores = 0 |
||
116 | |||
117 | n_current_positions = 0 |
||
118 | n_current_scores = 0 |
||
119 | |||
120 | n_best_positions = 0 |
||
121 | n_best_scores = 0 |
||
122 | |||
123 | for hyper_optimizer in hyper.opt_pros.values(): |
||
124 | optimizer = hyper_optimizer.gfo_optimizer |
||
125 | |||
126 | n_new_positions = n_new_positions + len(optimizer.pos_new_list) |
||
127 | n_new_scores = n_new_scores + len(optimizer.score_new_list) |
||
128 | |||
129 | n_current_positions = n_current_positions + len(optimizer.pos_current_list) |
||
130 | n_current_scores = n_current_scores + len(optimizer.score_current_list) |
||
131 | |||
132 | n_best_positions = n_best_positions + len(optimizer.pos_best_list) |
||
133 | n_best_scores = n_best_scores + len(optimizer.score_best_list) |
||
134 | |||
135 | print("\n optimizer", optimizer) |
||
136 | print(" n_new_positions", optimizer.pos_new_list) |
||
137 | print(" n_new_scores", optimizer.score_new_list) |
||
138 | |||
139 | assert n_new_positions == n_iter |
||
140 | assert n_new_scores == n_iter |
||
141 | |||
142 | assert n_current_positions == n_current_scores |
||
143 | assert n_current_positions <= n_new_positions |
||
144 | |||
145 | assert n_best_positions == n_best_scores |
||
146 | assert n_best_positions <= n_new_positions |
||
147 | |||
148 | assert n_new_positions == n_new_scores |
||
149 |