Passed
Push — master ( 61a8e6...a7d091 )
by Simon
03:21
created

hyperactive.hyperactive.Hyperactive.search()   A

Complexity

Conditions 1

Size

Total Lines 18
Code Lines 5

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 5
dl 0
loc 18
rs 10
c 0
b 0
f 0
cc 1
nop 3
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import time
6
7
from .core import Core
8
from .opt_args import Arguments
9
from . import (
10
    HillClimbingOptimizer,
11
    StochasticHillClimbingOptimizer,
12
    TabuOptimizer,
13
    RandomSearchOptimizer,
14
    RandomRestartHillClimbingOptimizer,
15
    RandomAnnealingOptimizer,
16
    SimulatedAnnealingOptimizer,
17
    StochasticTunnelingOptimizer,
18
    ParallelTemperingOptimizer,
19
    ParticleSwarmOptimizer,
20
    EvolutionStrategyOptimizer,
21
    BayesianOptimizer,
22
)
23
24
25
class Hyperactive:
26
    def __init__(self, *args, **kwargs):
27
28
        """
29
30
        Parameters
31
        ----------
32
33
        search_config: dict
34
            A dictionary providing the model and hyperparameter search space for the
35
            optimization process.
36
        n_iter: int
37
            The number of iterations the optimizer performs.
38
        metric: string, optional (default: "accuracy")
39
            The metric the model is evaluated by.
40
        n_jobs: int, optional (default: 1)
41
            The number of searches to run in parallel.
42
        cv: int, optional (default: 3)
43
            The number of folds for the cross validation.
44
        verbosity: int, optional (default: 1)
45
            Verbosity level. 1 prints out warm_start points and their scores.
46
        random_state: int, optional (default: None)
47
            Sets the random seed.
48
        warm_start: dict, optional (default: False)
49
            Dictionary that definies a start point for the optimizer.
50
        memory: bool, optional (default: True)
51
            A memory, that saves the evaluation during the optimization to save time when
52
            optimizer returns to position.
53
        scatter_init: int, optional (default: False)
54
            Defines the number n of random positions that should be evaluated with 1/n the
55
            training data, to find a better initial position.
56
57
        Returns
58
        -------
59
        None
60
61
        """
62
63
        optimizer_dict = {
64
            "HillClimbing": HillClimbingOptimizer,
65
            "StochasticHillClimbing": StochasticHillClimbingOptimizer,
66
            "TabuSearch": TabuOptimizer,
67
            "RandomSearch": RandomSearchOptimizer,
68
            "RandomRestartHillClimbing": RandomRestartHillClimbingOptimizer,
69
            "RandomAnnealing": RandomAnnealingOptimizer,
70
            "SimulatedAnnealing": SimulatedAnnealingOptimizer,
71
            "StochasticTunneling": StochasticTunnelingOptimizer,
72
            "ParallelTempering": ParallelTemperingOptimizer,
73
            "ParticleSwarm": ParticleSwarmOptimizer,
74
            "EvolutionStrategy": EvolutionStrategyOptimizer,
75
            "Bayesian": BayesianOptimizer,
76
        }
77
78
        _core_ = Core(*args, **kwargs)
79
        _arg_ = Arguments(**_core_.opt_para)
80
81
        optimizer_class = optimizer_dict[_core_.optimizer]
82
        self._optimizer_ = optimizer_class(_core_, _arg_)
83
84
        self.pos_list = self._optimizer_.pos_list
85
        self.score_list = self._optimizer_.score_list
86
87
    def search(self, X, y):
88
        """Public method for starting the search with the training data (X, y)
89
90
        Parameters
91
        ----------
92
        X : array-like or sparse matrix of shape = [n_samples, n_features]
93
94
        y : array-like, shape = [n_samples] or [n_samples, n_outputs]
95
96
        Returns
97
        -------
98
        None
99
        """
100
        start_time = time.time()
101
        self._optimizer_._fit(X, y)
102
        self.score_best = self._optimizer_.score_best
103
104
        self.total_time = time.time() - start_time
105
106
    def get_total_time(self):
107
        return self.total_time
108
109
    def get_eval_time(self):
110
        return self._optimizer_.eval_time
111
112
    def get_results(self):
113
        return self._optimizer_.results
114
115
    def save_report(self):
116
        pass
117