Passed
Pull Request — master (#110)
by
unknown
12:02 queued 10:25
created

HillClimbing.__init__()   A

Complexity

Conditions 2

Size

Total Lines 30
Code Lines 26

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 2
eloc 26
nop 11
dl 0
loc 30
rs 9.256
c 0
b 0
f 0

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
"""Hill climbing optimizer from gfo."""
2
3
from gradient_free_optimizers import HillClimbingOptimizer
4
from hyperactive.base import BaseOptimizer
5
6
7
class HillClimbing(BaseOptimizer):
8
    """Hill climbing optimizer.
9
10
    Parameters
11
    ----------
12
    experiment : BaseExperiment, optional
13
        The experiment to optimize parameters for.
14
        Optional, can be passed later in ``add_search``.
15
    random_state : None, int, default=None
16
        If None, create a new random state. If int, create a new random state
17
        seeded with the value.
18
    rand_rest_p : float, default=0.1
19
        The probability of a random iteration during the the search process.
20
    epsilon : float, default=0.01
21
        The step-size for the climbing.
22
    distribution : str, default="uniform"
23
        The type of distribution to sample from.
24
    n_neighbours : int, default=10
25
        The number of neighbours to sample and evaluate before moving to the best
26
        of those neighbours.
27
    search_space : dict[str, list]
28
        The search space to explore. A dictionary with parameter
29
        names as keys and a numpy array as values.
30
        Optional, can be passed later in ``add_search``.
31
    initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
32
        The method to generate initial positions. A dictionary with
33
        the following key literals and the corresponding value type:
34
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
35
    constraints : list[callable], default=[]
36
        A list of constraints, where each constraint is a callable.
37
        The callable returns `True` or `False` dependend on the input parameters.
38
    n_iter : int, default=100
39
        The number of iterations to run the optimizer.
40
    """
41
42
    def __init__(
43
        self,
44
        experiment=None,
45
        random_state=None,
46
        rand_rest_p=0.1,
47
        epsilon=0.01,
48
        distribution="uniform",
49
        n_neighbours=10,
50
        search_space=None,
51
        initialize=None,
52
        constraints=None,
53
        n_iter=100,
54
    ):
55
        self.random_state = random_state
56
        self.rand_rest_p = rand_rest_p
57
        self.epsilon = epsilon
58
        self.distribution = distribution
59
        self.n_neighbours = n_neighbours
60
        self.search_space = search_space
61
        self.initialize = initialize
62
        self.constraints = constraints
63
        self.n_iter = n_iter
64
        self.experiment = experiment
65
66
        super().__init__()
67
68
        if initialize is None:
69
            self._initialize = {"grid": 4, "random": 2, "vertices": 4}
70
        else:
71
            self._initialize = initialize
72
73
    def get_search_config(self):
74
        """Get the search configuration.
75
76
        Returns
77
        -------
78
        dict with str keys
79
            The search configuration dictionary.
80
        """
81
        search_config = super().get_search_config()
82
        search_config["initialize"] = self._initialize
83
84
    def _run(self, experiment, **search_config):
85
        """Run the optimization search process."""
86
        n_iter = search_config.pop("n_iter", 100)
87
        max_time = search_config.pop("max_time", None)
88
89
        hcopt = HillClimbingOptimizer(**search_config)
90
91
        hcopt.search(
92
            objective_function=experiment,
93
            n_iter=n_iter,
94
            max_time=max_time,
95
        )
96
        self.best_params_ = hcopt.best_params()
97
98
        return self.best_params_
99