Conditions | 2 |
Total Lines | 71 |
Code Lines | 48 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
1 | import numpy as np |
||
71 | def test_constr_opt_2(): |
||
72 | n_iter = 50 |
||
73 | |||
74 | def objective_function(para): |
||
75 | score = -para["x1"] * para["x1"] |
||
76 | return score |
||
77 | |||
78 | search_space = { |
||
79 | "x1": list(np.arange(-10, 10, 0.1)), |
||
80 | } |
||
81 | |||
82 | def constraint_1(para): |
||
83 | return para["x1"] > -5 |
||
84 | |||
85 | def constraint_2(para): |
||
86 | return para["x1"] < 5 |
||
87 | |||
88 | constraints_list = [constraint_1, constraint_2] |
||
89 | |||
90 | hyper = Hyperactive() |
||
91 | hyper.add_search( |
||
92 | objective_function, |
||
93 | search_space, |
||
94 | n_iter=50, |
||
95 | constraints=constraints_list, |
||
96 | ) |
||
97 | hyper.run() |
||
98 | |||
99 | search_data = hyper.search_data(objective_function) |
||
100 | x0_values = search_data["x1"].values |
||
101 | |||
102 | print("\n search_data \n", search_data, "\n") |
||
103 | |||
104 | assert np.all(x0_values > -5) |
||
105 | assert np.all(x0_values < 5) |
||
106 | |||
107 | n_new_positions = 0 |
||
108 | n_new_scores = 0 |
||
109 | |||
110 | n_current_positions = 0 |
||
111 | n_current_scores = 0 |
||
112 | |||
113 | n_best_positions = 0 |
||
114 | n_best_scores = 0 |
||
115 | |||
116 | for hyper_optimizer in hyper.opt_pros.values(): |
||
117 | optimizer = hyper_optimizer.gfo_optimizer |
||
118 | |||
119 | n_new_positions = n_new_positions + len(optimizer.pos_new_list) |
||
120 | n_new_scores = n_new_scores + len(optimizer.score_new_list) |
||
121 | |||
122 | n_current_positions = n_current_positions + len(optimizer.pos_current_list) |
||
123 | n_current_scores = n_current_scores + len(optimizer.score_current_list) |
||
124 | |||
125 | n_best_positions = n_best_positions + len(optimizer.pos_best_list) |
||
126 | n_best_scores = n_best_scores + len(optimizer.score_best_list) |
||
127 | |||
128 | print("\n optimizer", optimizer) |
||
129 | print(" n_new_positions", optimizer.pos_new_list) |
||
130 | print(" n_new_scores", optimizer.score_new_list) |
||
131 | |||
132 | assert n_new_positions == n_iter |
||
133 | assert n_new_scores == n_iter |
||
134 | |||
135 | assert n_current_positions == n_current_scores |
||
136 | assert n_current_positions <= n_new_positions |
||
137 | |||
138 | assert n_best_positions == n_best_scores |
||
139 | assert n_best_positions <= n_new_positions |
||
140 | |||
141 | assert n_new_positions == n_new_scores |
||
142 |