Passed
Push — master ( 3e621d...6f6c3c )
by Simon
01:26
created

hyperactive.hyperactive.Hyperactive.search()   B

Complexity

Conditions 4

Size

Total Lines 54
Code Lines 37

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 37
dl 0
loc 54
rs 8.9919
c 0
b 0
f 0
cc 4
nop 8

How to fix   Long Method    Many Parameters   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import time
6
import warnings
7
8
from .main_args import MainArgs
9
from .opt_args import Arguments
10
11
from . import (
12
    HillClimbingOptimizer,
13
    StochasticHillClimbingOptimizer,
14
    TabuOptimizer,
15
    RandomSearchOptimizer,
16
    RandomRestartHillClimbingOptimizer,
17
    RandomAnnealingOptimizer,
18
    SimulatedAnnealingOptimizer,
19
    StochasticTunnelingOptimizer,
20
    ParallelTemperingOptimizer,
21
    ParticleSwarmOptimizer,
22
    EvolutionStrategyOptimizer,
23
    BayesianOptimizer,
24
)
25
26
27
def stop_warnings():
28
    # because sklearn warnings are annoying when they appear 100 times
29
    def warn(*args, **kwargs):
30
        pass
31
32
    import warnings
33
34
    warnings.warn = warn
35
36
37
class Hyperactive:
38
    """
39
    Hyperactive main class
40
    """
41
42
    def __init__(self, X, y, memory=True, random_state=1, verbosity=2, warnings=False):
43
        self._main_args_ = MainArgs(X, y, memory, random_state, verbosity)
44
45
        if not warnings:
46
            stop_warnings()
47
48
        self.optimizer_dict = {
49
            "HillClimbing": HillClimbingOptimizer,
50
            "StochasticHillClimbing": StochasticHillClimbingOptimizer,
51
            "TabuSearch": TabuOptimizer,
52
            "RandomSearch": RandomSearchOptimizer,
53
            "RandomRestartHillClimbing": RandomRestartHillClimbingOptimizer,
54
            "RandomAnnealing": RandomAnnealingOptimizer,
55
            "SimulatedAnnealing": SimulatedAnnealingOptimizer,
56
            "StochasticTunneling": StochasticTunnelingOptimizer,
57
            "ParallelTempering": ParallelTemperingOptimizer,
58
            "ParticleSwarm": ParticleSwarmOptimizer,
59
            "EvolutionStrategy": EvolutionStrategyOptimizer,
60
            "Bayesian": BayesianOptimizer,
61
        }
62
63
    def search(
64
        self,
65
        search_config,
66
        max_time=None,
67
        n_iter=10,
68
        optimizer="RandomSearch",
69
        n_jobs=1,
70
        warm_start=False,
71
        scatter_init=False,
72
    ):
73
        """
74
        run search
75
        """
76
77
        start_time = time.time()
78
79
        self._main_args_.search_args(
80
            search_config, max_time, n_iter, optimizer, n_jobs, warm_start, scatter_init
81
        )
82
        self._opt_args_ = Arguments(self._main_args_.opt_para)
83
        optimizer_class = self.optimizer_dict[self._main_args_.optimizer]
84
85
        try:
86
            import ray
87
88
            if ray.is_initialized():
89
                ray_ = True
90
            else:
91
                ray_ = False
92
        except ImportError:
93
            warnings.warn("failed to import ray", ImportWarning)
94
            ray_ = False
95
96
        if ray_:
97
            optimizer_class = ray.remote(optimizer_class)
98
            opts = [
99
                optimizer_class.remote(self._main_args_, self._opt_args_)
100
                for job in range(self._main_args_.n_jobs)
101
            ]
102
            searches = [
103
                opt.search.remote(job, ray_=ray_) for job, opt in enumerate(opts)
104
            ]
105
            ray.get(searches)
106
        else:
107
            self._optimizer_ = optimizer_class(self._main_args_, self._opt_args_)
108
            self._optimizer_.search()
109
110
        self.results_params = self._optimizer_.results_params
111
        self.results_models = self._optimizer_.results_models
112
113
        self.pos_list = self._optimizer_.pos_list
114
        self.score_list = self._optimizer_.score_list
115
116
        self.total_time = time.time() - start_time
117
118
    def get_total_time(self):
119
        return self.total_time
120
121
    def get_eval_time(self):
122
        return self._optimizer_.eval_time
123
124
    def save_report(self):
125
        pass
126