1
|
|
|
# Author: Simon Blanke |
2
|
|
|
# Email: [email protected] |
3
|
|
|
# License: MIT License |
4
|
|
|
|
5
|
|
|
from ._optimizer_attributes import OptimizerAttributes |
6
|
|
|
from ._constraint import Constraint |
7
|
|
|
|
8
|
|
|
from ..optimization_backend.gradient_free_optimizers._objective_function import ( |
9
|
|
|
ObjectiveFunction, |
10
|
|
|
) |
11
|
|
|
from ..optimization_backend.gradient_free_optimizers._hyper_gradient_conv import ( |
12
|
|
|
HyperGradientConv, |
13
|
|
|
) |
14
|
|
|
|
15
|
|
|
|
16
|
|
|
class Search(OptimizerAttributes): |
17
|
|
|
max_time: float |
18
|
|
|
nth_process: int |
19
|
|
|
|
20
|
|
|
def __init__(self, optimizer_class, opt_params): |
21
|
|
|
super().__init__() |
22
|
|
|
self.optimizer_class = optimizer_class |
23
|
|
|
self.opt_params = opt_params |
24
|
|
|
|
25
|
|
|
def setup( |
26
|
|
|
self, |
27
|
|
|
experiment, |
28
|
|
|
s_space, |
29
|
|
|
n_iter, |
30
|
|
|
initialize, |
31
|
|
|
constraints, |
32
|
|
|
pass_through, |
33
|
|
|
max_score, |
34
|
|
|
early_stopping, |
35
|
|
|
random_state, |
36
|
|
|
memory, |
37
|
|
|
memory_warm_start, |
38
|
|
|
): |
39
|
|
|
self.experiment = experiment |
40
|
|
|
self.s_space = s_space |
41
|
|
|
self.n_iter = n_iter |
42
|
|
|
|
43
|
|
|
self.initialize = initialize |
44
|
|
|
self.constraints = constraints |
45
|
|
|
self.pass_through = pass_through |
46
|
|
|
self.max_score = max_score |
47
|
|
|
self.early_stopping = early_stopping |
48
|
|
|
self.random_state = random_state |
49
|
|
|
self.memory = memory |
50
|
|
|
self.memory_warm_start = memory_warm_start |
51
|
|
|
|
52
|
|
|
def pass_args(self, max_time, nth_process, verbosity): |
53
|
|
|
self.max_time = max_time |
54
|
|
|
self.nth_process = nth_process |
55
|
|
|
|
56
|
|
|
if "progress_bar" in verbosity: |
57
|
|
|
self.verbosity = ["progress_bar"] |
58
|
|
|
else: |
59
|
|
|
self.verbosity = [] |
60
|
|
|
|
61
|
|
View Code Duplication |
def convert_results2hyper(self): |
|
|
|
|
62
|
|
|
self.eval_times = sum(self.gfo_optimizer.eval_times) |
63
|
|
|
self.iter_times = sum(self.gfo_optimizer.iter_times) |
64
|
|
|
|
65
|
|
|
if self.gfo_optimizer.best_para is not None: |
66
|
|
|
value = self.hg_conv.para2value(self.gfo_optimizer.best_para) |
67
|
|
|
position = self.hg_conv.position2value(value) |
68
|
|
|
best_para = self.hg_conv.value2para(position) |
69
|
|
|
self.best_para = best_para |
70
|
|
|
else: |
71
|
|
|
self.best_para = None |
72
|
|
|
|
73
|
|
|
self.best_score = self.gfo_optimizer.best_score |
74
|
|
|
self.positions = self.gfo_optimizer.search_data |
75
|
|
|
self.search_data = self.hg_conv.positions2results(self.positions) |
76
|
|
|
|
77
|
|
|
results_dd = self.gfo_optimizer.search_data.drop_duplicates( |
78
|
|
|
subset=self.s_space.dim_keys, keep="first" |
79
|
|
|
) |
80
|
|
|
self.memory_values_df = results_dd[ |
81
|
|
|
self.s_space.dim_keys + ["score"] |
82
|
|
|
].reset_index(drop=True) |
83
|
|
|
|
84
|
|
View Code Duplication |
def _setup_process(self): |
|
|
|
|
85
|
|
|
self.hg_conv = HyperGradientConv(self.s_space) |
86
|
|
|
|
87
|
|
|
initialize = self.hg_conv.conv_initialize(self.initialize) |
88
|
|
|
search_space_positions = self.s_space.positions |
89
|
|
|
|
90
|
|
|
# conv warm start for smbo from values into positions |
91
|
|
|
if "warm_start_smbo" in self.opt_params: |
92
|
|
|
self.opt_params["warm_start_smbo"] = self.hg_conv.conv_memory_warm_start( |
93
|
|
|
self.opt_params["warm_start_smbo"] |
94
|
|
|
) |
95
|
|
|
|
96
|
|
|
gfo_constraints = [ |
97
|
|
|
Constraint(constraint, self.s_space) for constraint in self.constraints |
98
|
|
|
] |
99
|
|
|
|
100
|
|
|
self.gfo_optimizer = self.optimizer_class( |
101
|
|
|
search_space=search_space_positions, |
102
|
|
|
initialize=initialize, |
103
|
|
|
constraints=gfo_constraints, |
104
|
|
|
random_state=self.random_state, |
105
|
|
|
nth_process=self.nth_process, |
106
|
|
|
**self.opt_params, |
107
|
|
|
) |
108
|
|
|
|
109
|
|
|
self.conv = self.gfo_optimizer.conv |
110
|
|
|
|
111
|
|
|
def _search(self, p_bar): |
112
|
|
|
self._setup_process() |
113
|
|
|
|
114
|
|
|
memory_warm_start = self.hg_conv.conv_memory_warm_start(self.memory_warm_start) |
115
|
|
|
|
116
|
|
|
self.experiment.backend_adapter(ObjectiveFunction, self.s_space) |
117
|
|
|
|
118
|
|
|
self.gfo_optimizer.init_search( |
119
|
|
|
self.experiment.gfo_objective_function, |
120
|
|
|
self.n_iter, |
121
|
|
|
self.max_time, |
122
|
|
|
self.max_score, |
123
|
|
|
self.early_stopping, |
124
|
|
|
self.memory, |
125
|
|
|
memory_warm_start, |
126
|
|
|
False, |
127
|
|
|
) |
128
|
|
|
for nth_iter in range(self.n_iter): |
129
|
|
|
if p_bar: |
130
|
|
|
p_bar.set_description( |
131
|
|
|
"[" |
132
|
|
|
+ str(self.nth_process) |
133
|
|
|
+ "] " |
134
|
|
|
+ str(self.experiment.__class__.__name__) |
135
|
|
|
+ " (" |
136
|
|
|
+ self.optimizer_class.name |
137
|
|
|
+ ")", |
138
|
|
|
) |
139
|
|
|
|
140
|
|
|
self.gfo_optimizer.search_step(nth_iter) |
141
|
|
|
if self.gfo_optimizer.stop.check(): |
142
|
|
|
break |
143
|
|
|
|
144
|
|
|
if p_bar: |
145
|
|
|
p_bar.set_postfix( |
146
|
|
|
best_score=str(self.gfo_optimizer.score_best), |
147
|
|
|
best_pos=str(self.gfo_optimizer.pos_best), |
148
|
|
|
best_iter=str(self.gfo_optimizer.p_bar._best_since_iter), |
149
|
|
|
) |
150
|
|
|
|
151
|
|
|
p_bar.update(1) |
152
|
|
|
p_bar.refresh() |
153
|
|
|
|
154
|
|
|
self.gfo_optimizer.finish_search() |
155
|
|
|
|
156
|
|
|
self.convert_results2hyper() |
157
|
|
|
|
158
|
|
|
self._add_result_attributes( |
159
|
|
|
self.best_para, |
160
|
|
|
self.best_score, |
161
|
|
|
self.gfo_optimizer.p_bar._best_since_iter, |
162
|
|
|
self.eval_times, |
163
|
|
|
self.iter_times, |
164
|
|
|
self.search_data, |
165
|
|
|
self.gfo_optimizer.random_seed, |
166
|
|
|
) |
167
|
|
|
|