Passed
Push — master ( 193da7...4bb259 )
by Simon
01:36 queued 11s
created

SearchProcess.__init__()   A

Complexity

Conditions 1

Size

Total Lines 37
Code Lines 32

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 32
nop 13
dl 0
loc 37
rs 9.112
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import time
6
import random
7
import numpy as np
8
import pandas as pd
9
10
from importlib import import_module
11
12
13
optimizer_dict = {
14
    "HillClimbing": "HillClimbingOptimizer",
15
    "StochasticHillClimbing": "StochasticHillClimbingOptimizer",
16
    "TabuSearch": "TabuOptimizer",
17
    "RandomSearch": "RandomSearchOptimizer",
18
    "RandomRestartHillClimbing": "RandomRestartHillClimbingOptimizer",
19
    "RandomAnnealing": "RandomAnnealingOptimizer",
20
    "SimulatedAnnealing": "SimulatedAnnealingOptimizer",
21
    "StochasticTunneling": "StochasticTunnelingOptimizer",
22
    "ParallelTempering": "ParallelTemperingOptimizer",
23
    "ParticleSwarm": "ParticleSwarmOptimizer",
24
    "EvolutionStrategy": "EvolutionStrategyOptimizer",
25
    "Bayesian": "BayesianOptimizer",
26
    "TPE": "TreeStructuredParzenEstimators",
27
    "DecisionTree": "DecisionTreeOptimizer",
28
}
29
30
31
class SearchProcess:
32
    def __init__(
33
        self,
34
        nth_process,
35
        verb,
36
        objective_function,
37
        search_space,
38
        n_iter,
39
        function_parameter,
40
        optimizer,
41
        n_jobs,
42
        init_para,
43
        memory,
44
        hyperactive,
45
        random_state,
46
    ):
47
        self.nth_process = nth_process
48
        self.verb = verb
49
        self.objective_function = objective_function
50
        self.search_space = search_space
51
        self.n_iter = n_iter
52
        self.function_parameter = function_parameter
53
        self.optimizer = optimizer
54
        self.n_jobs = n_jobs
55
        self.init_para = init_para
56
        self.memory = memory
57
        self.hyperactive = hyperactive
58
        self.random_state = random_state
59
60
        self._process_arguments()
61
62
        self.iter_times = []
63
        self.eval_times = []
64
65
        module = import_module("gradient_free_optimizers")
66
        self.opt_class = getattr(module, optimizer_dict[optimizer])
67
68
        self.res = ResultsManager(objective_function, search_space, function_parameter)
69
70
    def _results_dict(self):
71
        results_dict = {
72
            "eval_times": self.eval_times,
73
            "iter_times": self.iter_times,
74
            "memory": self.cand.memory_dict_new,
75
            "para_best": self.cand.para_best,
76
            "score_best": self.cand.score_best,
77
        }
78
79
        return results_dict
80
81
    def _time_exceeded(self, start_time, max_time):
82
        run_time = time.time() - start_time
83
        return max_time and run_time > max_time
84
85
    def _initialize_search(self, nth_process):
86
        init_positions = self.cand.init.set_start_pos(self.n_positions)
87
        self.opt = self.opt_class(init_positions, self.cand.space.dim, opt_para={})
88
89
        self.verb.p_bar.init_p_bar(nth_process, self.n_iter, self.objective_function)
90
91
    def _process_arguments(self):
92
        self._set_random_seed()
93
94
        if isinstance(self.optimizer, dict):
95
            optimizer = list(self.optimizer.keys())[0]
96
            self.opt_para = self.optimizer[optimizer]
97
            self.optimizer = optimizer
98
99
            self.n_positions = self._get_n_positions()
100
        else:
101
            self.opt_para = {}
102
            self.n_positions = self._get_n_positions()
103
104
    def _get_n_positions(self):
105
        n_positions_strings = [
106
            "n_positions",
107
            "system_temperatures",
108
            "n_particles",
109
            "individuals",
110
        ]
111
112
        n_positions = 1
113
        for n_pos_name in n_positions_strings:
114
            if n_pos_name in list(self.opt_para.keys()):
115
                n_positions = self.opt_para[n_pos_name]
116
                if n_positions == "system_temperatures":
117
                    n_positions = len(n_positions)
118
119
        return n_positions
120
121
    def _set_random_seed(self):
122
        """Sets the random seed separately for each thread (to avoid getting the same results in each thread)"""
123
        if self.random_state is None:
124
            self.random_state = np.random.randint(0, high=2 ** 32 - 2)
125
126
        random.seed(self.random_state + self.nth_process)
127
        np.random.seed(self.random_state + self.nth_process)
128
129
    def store_memory(self, memory):
130
        pass
131
132
    def print_best_para(self):
133
        self.verb.info.print_start_point()
134
135
    def search(self, start_time, max_time, nth_process):
136
        start_time_search = time.time()
137
        self._initialize_search(nth_process)
138
139
        # loop to initialize N positions
140
        for nth_init in range(len(self.opt.init_positions)):
141
            start_time_iter = time.time()
142
            pos_new = self.opt.init_pos(nth_init)
143
144
            start_time_eval = time.time()
145
            score_new = self.cand.get_score(pos_new, nth_init)
146
            self.eval_times.append(time.time() - start_time_eval)
147
148
            self.opt.evaluate(score_new)
149
            self.iter_times.append(time.time() - start_time_iter)
150
151
        # loop to do the iterations
152
        for nth_iter in range(len(self.opt.init_positions), self.n_iter):
153
            start_time_iter = time.time()
154
            pos_new = self.opt.iterate(nth_iter)
155
156
            start_time_eval = time.time()
157
            score_new = self.cand.get_score(pos_new, nth_iter)
158
            self.eval_times.append(time.time() - start_time_eval)
159
160
            self.opt.evaluate(score_new)
161
            self.iter_times.append(time.time() - start_time_search)
162
163
            if self._time_exceeded(start_time, max_time):
164
                break
165
166
        self.verb.p_bar.close_p_bar()
167
168
        self.res.memory_dict_new = self.cand.memory_dict_new
169
        self.res.results_dict = self._results_dict()
170
171
        return self.res
172
173
174
from optimization_metadata import HyperactiveWrapper
175
from ..meta_data.meta_data_path import meta_data_path
176
177
178
class ResultsManager:
179
    def __init__(
180
        self, objective_function, search_space, function_parameter,
181
    ):
182
        self.objective_function = objective_function
183
        self.search_space = search_space
184
        self.function_parameter = function_parameter
185
186
        self.memory_dict_new = {}
187
188
        self.hypermem = HyperactiveWrapper(
189
            main_path=meta_data_path(),
190
            X=function_parameter["features"],
191
            y=function_parameter["target"],
192
            model=self.objective_function,
193
            search_space=search_space,
194
        )
195
196
    def load_long_term_memory(self):
197
        return self.hypermem.load()
198
199
    def save_long_term_memory(self):
200
        self.hypermem.save(self.memory_dict_new)
201