Total Complexity | 2 |
Total Lines | 63 |
Duplicated Lines | 0 % |
Changes | 0 |
1 | import numpy as np |
||
2 | from hyperactive import Hyperactive |
||
3 | from hyperactive.optimizers import BayesianOptimizer |
||
4 | |||
5 | |||
6 | from gradient_free_optimizers import RandomRestartHillClimbingOptimizer |
||
7 | |||
8 | |||
9 | def meta_opt(opt_para): |
||
10 | scores = [] |
||
11 | |||
12 | for i in range(33): |
||
13 | |||
14 | def ackley_function(para): |
||
15 | x = para["x"] |
||
16 | y = para["y"] |
||
17 | loss1 = -20 * np.exp(-0.2 * np.sqrt(0.5 * (x * x + y * y))) |
||
18 | loss2 = -np.exp(0.5 * (np.cos(2 * np.pi * x) + np.cos(2 * np.pi * y))) |
||
19 | loss3 = np.exp(1) |
||
20 | loss4 = 20 |
||
21 | |||
22 | loss = loss1 + loss2 + loss3 + loss4 |
||
23 | |||
24 | return -loss |
||
25 | |||
26 | dim_size = np.arange(-6, 6, 0.01) |
||
27 | |||
28 | search_space = { |
||
29 | "x": dim_size, |
||
30 | "y": dim_size, |
||
31 | } |
||
32 | |||
33 | opt = RandomRestartHillClimbingOptimizer( |
||
34 | search_space, |
||
35 | random_state=i, |
||
36 | epsilon=opt_para["epsilon"], |
||
37 | n_neighbours=opt_para["n_neighbours"], |
||
38 | n_iter_restart=opt_para["n_iter_restart"], |
||
39 | ) |
||
40 | opt.search( |
||
41 | ackley_function, |
||
42 | n_iter=100, |
||
43 | verbosity=False, |
||
44 | ) |
||
45 | |||
46 | scores.append(opt.best_score) |
||
47 | |||
48 | return np.array(scores).sum() |
||
49 | |||
50 | |||
51 | search_space = { |
||
52 | "epsilon": list(np.arange(0.01, 0.1, 0.01)), |
||
53 | "n_neighbours": list(range(1, 10)), |
||
54 | "n_iter_restart": list(range(2, 12)), |
||
55 | } |
||
56 | |||
57 | |||
58 | optimizer = BayesianOptimizer() |
||
59 | |||
60 | hyper = Hyperactive() |
||
61 | hyper.add_search(meta_opt, search_space, n_iter=120, optimizer=optimizer) |
||
62 | hyper.run() |
||
63 |