Passed
Pull Request — master (#110)
by
unknown
02:18
created

HillClimbing.__init__()   A

Complexity

Conditions 2

Size

Total Lines 32
Code Lines 28

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 28
dl 0
loc 32
rs 9.208
c 0
b 0
f 0
cc 2
nop 12

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
"""Hill climbing optimizer from gfo."""
2
3
from gradient_free_optimizers import HillClimbingOptimizer
4
from hyperactive.base import BaseOptimizer
5
from skbase.utils.stdout_mute import StdoutMute
6
7
8
class HillClimbing(BaseOptimizer):
9
    """Hill climbing optimizer.
10
11
    Parameters
12
    ----------
13
    experiment : BaseExperiment, optional
14
        The experiment to optimize parameters for.
15
        Optional, can be passed later in ``add_search``.
16
    random_state : None, int, default=None
17
        If None, create a new random state. If int, create a new random state
18
        seeded with the value.
19
    rand_rest_p : float, default=0.1
20
        The probability of a random iteration during the the search process.
21
    epsilon : float, default=0.01
22
        The step-size for the climbing.
23
    distribution : str, default="normal"
24
        The type of distribution to sample from.
25
    n_neighbours : int, default=10
26
        The number of neighbours to sample and evaluate before moving to the best
27
        of those neighbours.
28
    search_space : dict[str, list]
29
        The search space to explore. A dictionary with parameter
30
        names as keys and a numpy array as values.
31
        Optional, can be passed later in ``add_search``.
32
    initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
33
        The method to generate initial positions. A dictionary with
34
        the following key literals and the corresponding value type:
35
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
36
    constraints : list[callable], default=[]
37
        A list of constraints, where each constraint is a callable.
38
        The callable returns `True` or `False` dependend on the input parameters.
39
    n_iter : int, default=100
40
        The number of iterations to run the optimizer.
41
    verbose : bool, default=False
42
        If True, print the progress of the optimization process.
43
44
    Examples
45
    --------
46
    Hill climbing applied to scikit-learn parameter tuning:
47
48
    1. defining the experiment to optimize:
49
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
50
    >>> from sklearn.datasets import load_iris
51
    >>> from sklearn.svm import SVC
52
    >>>
53
    >>> X, y = load_iris(return_X_y=True)
54
    >>>
55
    >>> sklearn_exp = SklearnCvExperiment(
56
    ...     estimator=SVC(),
57
    ...     X=X,
58
    ...     y=y,
59
    ... )
60
61
    2. setting up the hill climbing optimizer:
62
    >>> from hyperactive.opt import HillClimbing
63
    >>> import numpy as np
64
    >>> 
65
    >>> hillclimbing_config = {
66
    ...     "search_space": {
67
    ...         "C": np.array([0.01, 0.1, 1, 10]),
68
    ...         "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
69
    ...     },
70
    ...     "n_iter": 100,
71
    ... }
72
    >>> hillclimbing = HillClimbing(sklearn_exp, **hillclimbing_config)
73
74
    3. running the hill climbing search:
75
    >>> best_params = hillclimbing.run()
76
77
    Best parameters can also be accessed via the attributes:
78
    >>> best_params = hillclimbing.best_params_
79
    """
80
81
    def __init__(
82
        self,
83
        experiment=None,
84
        random_state=None,
85
        rand_rest_p=0.1,
86
        epsilon=0.01,
87
        distribution="normal",
88
        n_neighbours=10,
89
        search_space=None,
90
        initialize=None,
91
        constraints=None,
92
        n_iter=100,
93
        verbose=False,
94
    ):
95
        self.random_state = random_state
96
        self.rand_rest_p = rand_rest_p
97
        self.epsilon = epsilon
98
        self.distribution = distribution
99
        self.n_neighbours = n_neighbours
100
        self.search_space = search_space
101
        self.initialize = initialize
102
        self.constraints = constraints
103
        self.n_iter = n_iter
104
        self.experiment = experiment
105
        self.verbose = verbose
106
107
        super().__init__()
108
109
        if initialize is None:
110
            self._initialize = {"grid": 4, "random": 2, "vertices": 4}
111
        else:
112
            self._initialize = initialize
113
114
    def get_search_config(self):
115
        """Get the search configuration.
116
117
        Returns
118
        -------
119
        dict with str keys
120
            The search configuration dictionary.
121
        """
122
        search_config = super().get_search_config()
123
        search_config["initialize"] = self._initialize
124
        return search_config
125
126
    def _run(self, experiment, **search_config):
127
        """Run the optimization search process."""
128
        n_iter = search_config.pop("n_iter", 100)
129
        max_time = search_config.pop("max_time", None)
130
131
        hcopt = HillClimbingOptimizer(**search_config)
132
133
        with StdoutMute(active=not self.verbose):
134
            hcopt.search(
135
                objective_function=experiment.score,
136
                n_iter=n_iter,
137
                max_time=max_time,
138
            )
139
        self.best_params_ = hcopt.best_para
140
141
        return self.best_params_
142