Passed
Push — master ( 83f02a...d8a265 )
by Simon
01:47
created

hyperactive.opt_args.GPR.__init__()   A

Complexity

Conditions 1

Size

Total Lines 3
Code Lines 3

Duplication

Lines 3
Ratio 100 %

Importance

Changes 0
Metric Value
eloc 3
dl 3
loc 3
rs 10
c 0
b 0
f 0
cc 1
nop 1
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
from .util import merge_dicts
6
7
from sklearn.gaussian_process import GaussianProcessRegressor
8
from sklearn.gaussian_process.kernels import Matern
9
from numpy.random import normal
10
11
12 View Code Duplication
class GPR:
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
13
    def __init__(self):
14
        self.gpr = GaussianProcessRegressor(
15
                kernel=Matern(nu=2.5), normalize_y=True, n_restarts_optimizer=10
16
            )
17
        
18
    def fit(self, X, y):
19
        self.gpr.fit(X, y)
20
21
    def predict(self, X):
22
        return self.gpr.predict(X, return_std=True)
23
24
25
class Arguments:
26
    def __init__(self, *args, **kwargs):
27
        kwargs_opt = {
28
            # HillClimbingOptimizer
29
            "epsilon": 0.03,
30
            "climb_dist": normal,
31
            "n_neighbours": 1,
32
            # StochasticHillClimbingOptimizer
33
            "p_down": 0.5,
34
            # TabuOptimizer
35
            "tabu_memory": 10,
36
            # RandomRestartHillClimbingOptimizer
37
            "n_restarts": 10,
38
            # RandomAnnealingOptimizer
39
            "epsilon_mod": 33,
40
            "annealing_rate": 0.99,
41
            # SimulatedAnnealingOptimizer
42
            "start_temp": 1,  # TODO
43
            # StochasticTunnelingOptimizer
44
            "gamma": 0.5,
45
            # ParallelTemperingOptimizer
46
            "system_temperatures": [0.1, 0.25, 0.5, 0.75, 1, 2.5, 5, 7.5, 10],
47
            "n_swaps": 10,
48
            # ParticleSwarmOptimizer
49
            "n_particles": 10,
50
            "inertia": 0.5,
51
            "cognitive_weight": 0.5,
52
            "social_weight": 0.5,
53
            # EvolutionStrategyOptimizer
54
            "individuals": 10,
55
            "mutation_rate": 0.7,
56
            "crossover_rate": 0.3,
57
            # BayesianOptimizer
58
            "warm_start_smbo": False,
59
            "xi": 0.01,
60
            "gpr": GPR(),
61
        }
62
63
        self.kwargs_opt = merge_dicts(kwargs_opt, kwargs)
64
65
        self._set_specific_args(self.kwargs_opt)
66
67
    def _set_specific_args(self, kwargs_opt):
68
        self.epsilon = kwargs_opt["epsilon"]
69
        self.climb_dist = kwargs_opt["climb_dist"]
70
        self.n_neighbours = kwargs_opt["n_neighbours"]
71
        self.p_down = kwargs_opt["p_down"]
72
        self.tabu_memory = kwargs_opt["tabu_memory"]
73
        self.n_restarts = kwargs_opt["n_restarts"]
74
        self.epsilon_mod = kwargs_opt["epsilon_mod"]
75
        self.annealing_rate = kwargs_opt["annealing_rate"]
76
        self.gamma = kwargs_opt["gamma"]
77
        self.system_temperatures = kwargs_opt["system_temperatures"]
78
        self.n_swaps = kwargs_opt["n_swaps"]
79
        self.n_particles = kwargs_opt["n_particles"]
80
        self.inertia = kwargs_opt["inertia"]
81
        self.cognitive_weight = kwargs_opt["cognitive_weight"]
82
        self.social_weight = kwargs_opt["social_weight"]
83
        self.individuals = kwargs_opt["individuals"]
84
        self.mutation_rate = kwargs_opt["mutation_rate"]
85
        self.crossover_rate = kwargs_opt["crossover_rate"]
86
        self.warm_start_smbo = kwargs_opt["warm_start_smbo"]
87
        self.xi = kwargs_opt["xi"]
88
        self.gpr = kwargs_opt["gpr"]
89