Passed
Push — master ( 3f2c9d...310ec2 )
by Simon
01:51
created

hyperactive.hyperactive.Hyperactive.search()   A

Complexity

Conditions 4

Size

Total Lines 48
Code Lines 36

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 36
dl 0
loc 48
rs 9.016
c 0
b 0
f 0
cc 4
nop 8

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
import time
6
import warnings
7
8
from .main_args import MainArgs
9
from .opt_args import Arguments
10
11
from . import (
12
    HillClimbingOptimizer,
13
    StochasticHillClimbingOptimizer,
14
    TabuOptimizer,
15
    RandomSearchOptimizer,
16
    RandomRestartHillClimbingOptimizer,
17
    RandomAnnealingOptimizer,
18
    SimulatedAnnealingOptimizer,
19
    StochasticTunnelingOptimizer,
20
    ParallelTemperingOptimizer,
21
    ParticleSwarmOptimizer,
22
    EvolutionStrategyOptimizer,
23
    BayesianOptimizer,
24
)
25
26
27
def stop_warnings():
28
    # because sklearn warnings are annoying when they appear 100 times
29
    def warn(*args, **kwargs):
30
        pass
31
32
    import warnings
33
34
    warnings.warn = warn
35
36
37
class Hyperactive:
38
    """
39
    """
40
41
    def __init__(self, X, y, verbosity=2, warnings=False, random_state=1, memory=True):
42
        self._main_args_ = MainArgs(X, y, verbosity, random_state, memory)
43
44
        if not warnings:
45
            stop_warnings()
46
47
        self.optimizer_dict = {
48
            "HillClimbing": HillClimbingOptimizer,
49
            "StochasticHillClimbing": StochasticHillClimbingOptimizer,
50
            "TabuSearch": TabuOptimizer,
51
            "RandomSearch": RandomSearchOptimizer,
52
            "RandomRestartHillClimbing": RandomRestartHillClimbingOptimizer,
53
            "RandomAnnealing": RandomAnnealingOptimizer,
54
            "SimulatedAnnealing": SimulatedAnnealingOptimizer,
55
            "StochasticTunneling": StochasticTunnelingOptimizer,
56
            "ParallelTempering": ParallelTemperingOptimizer,
57
            "ParticleSwarm": ParticleSwarmOptimizer,
58
            "EvolutionStrategy": EvolutionStrategyOptimizer,
59
            "Bayesian": BayesianOptimizer,
60
        }
61
62
    def search(
63
        self,
64
        search_config,
65
        max_time=None,
66
        n_iter=10,
67
        optimizer="RandomSearch",
68
        n_jobs=1,
69
        warm_start=False,
70
        scatter_init=False,
71
    ):
72
        start_time = time.time()
73
74
        self._main_args_.search_args(
75
            search_config, max_time, n_iter, optimizer, n_jobs, warm_start, scatter_init
76
        )
77
        self._opt_args_ = Arguments(self._main_args_.opt_para)
78
        optimizer_class = self.optimizer_dict[self._main_args_.optimizer]
79
80
        try:
81
            import ray
82
83
            if ray.is_initialized():
84
                ray_ = True
85
        except ImportError:
86
            warnings.warn("failed to import ray", ImportWarning)
87
            ray_ = False
88
89
        if ray_:
0 ignored issues
show
introduced by
The variable ray_ does not seem to be defined in case ray.is_initialized() on line 83 is False. Are you sure this can never be the case?
Loading history...
90
            optimizer_class = ray.remote(optimizer_class)
91
            opts = [
92
                optimizer_class.remote(self._main_args_, self._opt_args_)
93
                for job in range(self._main_args_.n_jobs)
94
            ]
95
            searches = [
96
                opt.search.remote(job, ray_=ray_) for job, opt in enumerate(opts)
97
            ]
98
            ray.get(searches)
99
        else:
100
            self._optimizer_ = optimizer_class(self._main_args_, self._opt_args_)
101
            self._optimizer_.search()
102
103
        self.results_params = self._optimizer_.results_params
104
        self.results_models = self._optimizer_.results_models
105
106
        self.pos_list = self._optimizer_.pos_list
107
        self.score_list = self._optimizer_.score_list
108
109
        self.total_time = time.time() - start_time
110
111
    def get_total_time(self):
112
        return self.total_time
113
114
    def get_eval_time(self):
115
        return self._optimizer_.eval_time
116
117
    def save_report(self):
118
        pass
119