hyperactive.optimizers.strategies.optimization_strategy   A
last analyzed

Complexity

Total Complexity 14

Size/Duplication

Total Lines 133
Duplicated Lines 29.32 %

Importance

Changes 0
Metric Value
wmc 14
eloc 93
dl 39
loc 133
rs 10
c 0
b 0
f 0

4 Methods

Rating   Name   Duplication   Size   Complexity  
A BaseOptimizationStrategy.__init__() 0 2 1
A BaseOptimizationStrategy.setup_search() 39 40 2
C BaseOptimizationStrategy.search() 0 64 8
A BaseOptimizationStrategy.max_time() 0 4 2

How to fix   Duplicated Code   

Duplicated Code

Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.

Common duplication problems, and corresponding solutions are:

1
"""optimization_strategy module for Hyperactive optimization."""
2
3
# Email: [email protected]
4
# License: MIT License
5
6
from .optimizer_attributes import OptimizerAttributes
7
8
9
class BaseOptimizationStrategy(OptimizerAttributes):
10
    """BaseOptimizationStrategy class."""
11
12
    def __init__(self):
13
        super().__init__()
14
15 View Code Duplication
    def setup_search(
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
16
        self,
17
        objective_function,
18
        s_space,
19
        n_iter,
20
        initialize,
21
        constraints,
22
        pass_through,
23
        callbacks,
24
        catch,
25
        max_score,
26
        early_stopping,
27
        random_state,
28
        memory,
29
        memory_warm_start,
30
        verbosity,
31
    ):
32
        """Set up search parameters."""
33
        self.objective_function = objective_function
34
        self.s_space = s_space
35
        self.n_iter = n_iter
36
37
        self.initialize = initialize
38
        self.constraints = constraints
39
        self.pass_through = pass_through
40
        self.callbacks = callbacks
41
        self.catch = catch
42
        self.max_score = max_score
43
        self.early_stopping = early_stopping
44
        self.random_state = random_state
45
        self.memory = memory
46
        self.memory_warm_start = memory_warm_start
47
        self.verbosity = verbosity
48
49
        self._max_time = None
50
51
        if "progress_bar" in self.verbosity:
52
            self.verbosity = []
53
        else:
54
            self.verbosity = []
55
56
    @property
57
    def max_time(self):
58
        """Max Time function."""
59
        return self._max_time
60
61
    @max_time.setter
62
    def max_time(self, value):
63
        """Max Time function."""
64
        self._max_time = value
65
66
        for optimizer_setup in self.optimizer_setup_l:
67
            optimizer_setup["optimizer"].max_time = value
68
69
    def search(self, nth_process, p_bar):
70
        """Search function."""
71
        for optimizer_setup in self.optimizer_setup_l:
72
            hyper_opt = optimizer_setup["optimizer"]
73
            duration = optimizer_setup["duration"]
74
            opt_strat_early_stopping = optimizer_setup["early_stopping"]
75
76
            if opt_strat_early_stopping:
77
                early_stopping = opt_strat_early_stopping
78
            else:
79
                early_stopping = self.early_stopping
80
81
            n_iter = round(self.n_iter * duration / self.duration_sum)
82
83
            # initialize
84
            if self.best_para is not None:
85
                initialize = {}
86
                if "warm_start" in initialize:
87
                    initialize["warm_start"].append(self.best_para)
88
                else:
89
                    initialize["warm_start"] = [self.best_para]
90
            else:
91
                initialize = dict(self.initialize)
92
93
            # memory_warm_start
94
            if self.search_data is not None:
95
                memory_warm_start = self.search_data
96
            else:
97
                memory_warm_start = self.memory_warm_start
98
99
            # warm_start_smbo
100
            if (
101
                hyper_opt.optimizer_class.optimizer_type == "sequential"
102
                and self.search_data is not None
103
            ):
104
                hyper_opt.opt_params["warm_start_smbo"] = self.search_data
105
106
            hyper_opt.setup_search(
107
                objective_function=self.objective_function,
108
                s_space=self.s_space,
109
                n_iter=n_iter,
110
                initialize=initialize,
111
                constraints=self.constraints,
112
                pass_through=self.pass_through,
113
                callbacks=self.callbacks,
114
                catch=self.catch,
115
                max_score=self.max_score,
116
                early_stopping=early_stopping,
117
                random_state=self.random_state,
118
                memory=self.memory,
119
                memory_warm_start=memory_warm_start,
120
                verbosity=self.verbosity,
121
            )
122
123
            hyper_opt.search(nth_process, p_bar)
124
125
            self._add_result_attributes(
126
                hyper_opt.best_para,
127
                hyper_opt.best_score,
128
                hyper_opt.best_since_iter,
129
                hyper_opt.eval_times,
130
                hyper_opt.iter_times,
131
                hyper_opt.search_data,
132
                hyper_opt.gfo_optimizer.random_seed,
133
            )
134