Passed
Push — master ( 6fb8dd...219173 )
by Simon
03:54 queued 12s
created

BaseOptimizationStrategy.setup_search()   A

Complexity

Conditions 2

Size

Total Lines 37
Code Lines 32

Duplication

Lines 37
Ratio 100 %

Importance

Changes 0
Metric Value
cc 2
eloc 32
nop 14
dl 37
loc 37
rs 9.112
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
6
from .optimizer_attributes import OptimizerAttributes
7
8
9
class BaseOptimizationStrategy(OptimizerAttributes):
10
    def __init__(self):
11
        super().__init__()
12
13 View Code Duplication
    def setup_search(
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
14
        self,
15
        objective_function,
16
        s_space,
17
        n_iter,
18
        initialize,
19
        pass_through,
20
        callbacks,
21
        catch,
22
        max_score,
23
        early_stopping,
24
        random_state,
25
        memory,
26
        memory_warm_start,
27
        verbosity,
28
    ):
29
        self.objective_function = objective_function
30
        self.s_space = s_space
31
        self.n_iter = n_iter
32
33
        self.initialize = initialize
34
        self.pass_through = pass_through
35
        self.callbacks = callbacks
36
        self.catch = catch
37
        self.max_score = max_score
38
        self.early_stopping = early_stopping
39
        self.random_state = random_state
40
        self.memory = memory
41
        self.memory_warm_start = memory_warm_start
42
        self.verbosity = verbosity
43
44
        self._max_time = None
45
46
        if "progress_bar" in self.verbosity:
47
            self.verbosity = []
48
        else:
49
            self.verbosity = []
50
51
    @property
52
    def max_time(self):
53
        return self._max_time
54
55
    @max_time.setter
56
    def max_time(self, value):
57
        self._max_time = value
58
59
        for optimizer_setup in self.optimizer_setup_l:
60
            optimizer_setup["optimizer"].max_time = value
61
62
    def search(self, nth_process, p_bar):
63
        for optimizer_setup in self.optimizer_setup_l:
64
            hyper_opt = optimizer_setup["optimizer"]
65
            duration = optimizer_setup["duration"]
66
            opt_strat_early_stopping = optimizer_setup["early_stopping"]
67
68
            if opt_strat_early_stopping:
69
                early_stopping = opt_strat_early_stopping
70
            else:
71
                early_stopping = self.early_stopping
72
73
            n_iter = round(self.n_iter * duration / self.duration_sum)
74
75
            # initialize
76
            if self.best_para is not None:
77
                initialize = {}
78
                if "warm_start" in initialize:
79
                    initialize["warm_start"].append(self.best_para)
80
                else:
81
                    initialize["warm_start"] = [self.best_para]
82
            else:
83
                initialize = dict(self.initialize)
84
85
            # memory_warm_start
86
            if self.search_data is not None:
87
                memory_warm_start = self.search_data
88
            else:
89
                memory_warm_start = self.memory_warm_start
90
91
            # warm_start_smbo
92
            if (
93
                hyper_opt.optimizer_class.optimizer_type == "sequential"
94
                and self.search_data is not None
95
            ):
96
                hyper_opt.opt_params["warm_start_smbo"] = self.search_data
97
98
            hyper_opt.setup_search(
99
                objective_function=self.objective_function,
100
                s_space=self.s_space,
101
                n_iter=n_iter,
102
                initialize=initialize,
103
                pass_through=self.pass_through,
104
                callbacks=self.callbacks,
105
                catch=self.catch,
106
                max_score=self.max_score,
107
                early_stopping=early_stopping,
108
                random_state=self.random_state,
109
                memory=self.memory,
110
                memory_warm_start=memory_warm_start,
111
                verbosity=self.verbosity,
112
            )
113
114
            hyper_opt.search(nth_process, p_bar)
115
116
            self._add_result_attributes(
117
                hyper_opt.best_para,
118
                hyper_opt.best_score,
119
                hyper_opt.best_since_iter,
120
                hyper_opt.eval_times,
121
                hyper_opt.iter_times,
122
                hyper_opt.search_data,
123
                hyper_opt.gfo_optimizer.random_seed,
124
            )
125