Conditions | 7 |
Total Lines | 56 |
Code Lines | 41 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
1 | # Author: Simon Blanke |
||
62 | def search(self, nth_process, p_bar): |
||
63 | for optimizer_setup in self.optimizer_setup_l: |
||
64 | hyper_opt = optimizer_setup["optimizer"] |
||
65 | duration = optimizer_setup["duration"] |
||
66 | |||
67 | n_iter = round(self.n_iter * duration / self.duration_sum) |
||
68 | |||
69 | # initialize |
||
70 | if self.best_para is not None: |
||
71 | initialize = {} |
||
72 | if "warm_start" in initialize: |
||
73 | initialize["warm_start"].append(self.best_para) |
||
74 | else: |
||
75 | initialize["warm_start"] = [self.best_para] |
||
76 | else: |
||
77 | initialize = dict(self.initialize) |
||
78 | |||
79 | # memory_warm_start |
||
80 | if self.search_data is not None: |
||
81 | memory_warm_start = self.search_data |
||
82 | else: |
||
83 | memory_warm_start = self.memory_warm_start |
||
84 | |||
85 | # warm_start_smbo |
||
86 | if ( |
||
87 | hyper_opt.optimizer_class.optimizer_type == "sequential" |
||
88 | and self.search_data is not None |
||
89 | ): |
||
90 | hyper_opt.opt_params["warm_start_smbo"] = self.search_data |
||
91 | |||
92 | hyper_opt.setup_search( |
||
93 | objective_function=self.objective_function, |
||
94 | s_space=self.s_space, |
||
95 | n_iter=n_iter, |
||
96 | initialize=initialize, |
||
97 | pass_through=self.pass_through, |
||
98 | callbacks=self.callbacks, |
||
99 | catch=self.catch, |
||
100 | max_score=self.max_score, |
||
101 | early_stopping=self.early_stopping, |
||
102 | random_state=self.random_state, |
||
103 | memory=self.memory, |
||
104 | memory_warm_start=memory_warm_start, |
||
105 | verbosity=self.verbosity, |
||
106 | ) |
||
107 | |||
108 | hyper_opt.search(nth_process, p_bar) |
||
109 | |||
110 | self._add_result_attributes( |
||
111 | hyper_opt.best_para, |
||
112 | hyper_opt.best_score, |
||
113 | hyper_opt.best_since_iter, |
||
114 | hyper_opt.eval_times, |
||
115 | hyper_opt.iter_times, |
||
116 | hyper_opt.search_data, |
||
117 | hyper_opt.gfo_optimizer.random_seed, |
||
118 | ) |
||
119 |