Passed
Push — master ( 199200...2b2f7b )
by Simon
04:29
created

BaseOptimizationStrategy.setup_search()   A

Complexity

Conditions 2

Size

Total Lines 37
Code Lines 32

Duplication

Lines 37
Ratio 100 %

Importance

Changes 0
Metric Value
cc 2
eloc 32
nop 14
dl 37
loc 37
rs 9.112
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
6
from .optimizer_attributes import OptimizerAttributes
7
8
9
class BaseOptimizationStrategy(OptimizerAttributes):
10
    def __init__(self):
11
        super().__init__()
12
13 View Code Duplication
    def setup_search(
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
14
        self,
15
        objective_function,
16
        s_space,
17
        n_iter,
18
        initialize,
19
        pass_through,
20
        callbacks,
21
        catch,
22
        max_score,
23
        early_stopping,
24
        random_state,
25
        memory,
26
        memory_warm_start,
27
        verbosity,
28
    ):
29
        self.objective_function = objective_function
30
        self.s_space = s_space
31
        self.n_iter = n_iter
32
33
        self.initialize = initialize
34
        self.pass_through = pass_through
35
        self.callbacks = callbacks
36
        self.catch = catch
37
        self.max_score = max_score
38
        self.early_stopping = early_stopping
39
        self.random_state = random_state
40
        self.memory = memory
41
        self.memory_warm_start = memory_warm_start
42
        self.verbosity = verbosity
43
44
        self._max_time = None
45
46
        if "progress_bar" in self.verbosity:
47
            self.verbosity = []
48
        else:
49
            self.verbosity = []
50
51
    @property
52
    def max_time(self):
53
        return self._max_time
54
55
    @max_time.setter
56
    def max_time(self, value):
57
        self._max_time = value
58
59
        for optimizer_setup in self.optimizer_setup_l:
60
            optimizer_setup["optimizer"].max_time = value
61
62
    def search(self, nth_process, p_bar):
63
        for optimizer_setup in self.optimizer_setup_l:
64
            hyper_opt = optimizer_setup["optimizer"]
65
            duration = optimizer_setup["duration"]
66
67
            n_iter = round(self.n_iter * duration / self.duration_sum)
68
69
            # initialize
70
            if self.best_para is not None:
71
                initialize = {}
72
                if "warm_start" in initialize:
73
                    initialize["warm_start"].append(self.best_para)
74
                else:
75
                    initialize["warm_start"] = [self.best_para]
76
            else:
77
                initialize = dict(self.initialize)
78
79
            # memory_warm_start
80
            if self.search_data is not None:
81
                memory_warm_start = self.search_data
82
            else:
83
                memory_warm_start = self.memory_warm_start
84
85
            # warm_start_smbo
86
            if (
87
                hyper_opt.optimizer_class.optimizer_type == "sequential"
88
                and self.search_data is not None
89
            ):
90
                hyper_opt.opt_params["warm_start_smbo"] = self.search_data
91
92
            hyper_opt.setup_search(
93
                objective_function=self.objective_function,
94
                s_space=self.s_space,
95
                n_iter=n_iter,
96
                initialize=initialize,
97
                pass_through=self.pass_through,
98
                callbacks=self.callbacks,
99
                catch=self.catch,
100
                max_score=self.max_score,
101
                early_stopping=self.early_stopping,
102
                random_state=self.random_state,
103
                memory=self.memory,
104
                memory_warm_start=memory_warm_start,
105
                verbosity=self.verbosity,
106
            )
107
108
            hyper_opt.search(nth_process, p_bar)
109
110
            self._add_result_attributes(
111
                hyper_opt.best_para,
112
                hyper_opt.best_score,
113
                hyper_opt.best_since_iter,
114
                hyper_opt.eval_times,
115
                hyper_opt.iter_times,
116
                hyper_opt.search_data,
117
                hyper_opt.gfo_optimizer.random_seed,
118
            )
119