Passed
Pull Request — master (#110)
by
unknown
01:38
created

HillClimbing.__init__()   A

Complexity

Conditions 2

Size

Total Lines 32
Code Lines 28

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 28
dl 0
loc 32
rs 9.208
c 0
b 0
f 0
cc 2
nop 12

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
"""Hill climbing optimizer from gfo."""
2
3
from gradient_free_optimizers import HillClimbingOptimizer
4
from hyperactive.base import BaseOptimizer
5
from skbase.utils.stdout_mute import StdoutMute
6
7
8
class HillClimbing(BaseOptimizer):
9
    """Hill climbing optimizer.
10
11
    Parameters
12
    ----------
13
    experiment : BaseExperiment, optional
14
        The experiment to optimize parameters for.
15
        Optional, can be passed later in ``add_search``.
16
    random_state : None, int, default=None
17
        If None, create a new random state. If int, create a new random state
18
        seeded with the value.
19
    rand_rest_p : float, default=0.1
20
        The probability of a random iteration during the the search process.
21
    epsilon : float, default=0.01
22
        The step-size for the climbing.
23
    distribution : str, default="normal"
24
        The type of distribution to sample from.
25
    n_neighbours : int, default=10
26
        The number of neighbours to sample and evaluate before moving to the best
27
        of those neighbours.
28
    search_space : dict[str, list]
29
        The search space to explore. A dictionary with parameter
30
        names as keys and a numpy array as values.
31
        Optional, can be passed later in ``add_search``.
32
    initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
33
        The method to generate initial positions. A dictionary with
34
        the following key literals and the corresponding value type:
35
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
36
    constraints : list[callable], default=[]
37
        A list of constraints, where each constraint is a callable.
38
        The callable returns `True` or `False` dependend on the input parameters.
39
    n_iter : int, default=100
40
        The number of iterations to run the optimizer.
41
    verbose : bool, default=False
42
        If True, print the progress of the optimization process.
43
44
    Examples
45
    --------
46
    Hill climbing applied to scikit-learn parameter tuning:
47
48
    1. defining the experiment to optimize:
49
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
50
    >>> from sklearn.datasets import load_iris
51
    >>> from sklearn.svm import SVC
52
    >>>
53
    >>> X, y = load_iris(return_X_y=True)
54
    >>>
55
    >>> sklearn_exp = SklearnCvExperiment(
56
    ...     estimator=SVC(),
57
    ...     X=X,
58
    ...     y=y,
59
    ... )
60
61
    2. setting up the hill climbing optimizer:
62
    >>> from hyperactive.opt import HillClimbing
63
    >>> import numpy as np
64
    >>> 
65
    >>> hillclimbing_config = {
66
    ...     "search_space": {
67
    ...         "C": np.array([0.01, 0.1, 1, 10]),
68
    ...         "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
69
    ...     },
70
    ...     "n_iter": 100,
71
    ... }
72
    >>> hillclimbing = HillClimbing(sklearn_exp, **hillclimbing_config)
73
74
    3. running the hill climbing search:
75
    >>> best_params = hillclimbing.run()
76
77
    Best parameters can also be accessed via the attributes:
78
    >>> best_params = hillclimbing.best_params_
79
    """
80
81
    _tags = {
82
        "python_dependencies": ["gradient-free-optimizers>=1.5.0"],
83
    }
84
85
    def __init__(
86
        self,
87
        experiment=None,
88
        random_state=None,
89
        rand_rest_p=0.1,
90
        epsilon=0.01,
91
        distribution="normal",
92
        n_neighbours=10,
93
        search_space=None,
94
        initialize=None,
95
        constraints=None,
96
        n_iter=100,
97
        verbose=False,
98
    ):
99
        self.random_state = random_state
100
        self.rand_rest_p = rand_rest_p
101
        self.epsilon = epsilon
102
        self.distribution = distribution
103
        self.n_neighbours = n_neighbours
104
        self.search_space = search_space
105
        self.initialize = initialize
106
        self.constraints = constraints
107
        self.n_iter = n_iter
108
        self.experiment = experiment
109
        self.verbose = verbose
110
111
        super().__init__()
112
113
        if initialize is None:
114
            self._initialize = {"grid": 4, "random": 2, "vertices": 4}
115
        else:
116
            self._initialize = initialize
117
118
    def get_search_config(self):
119
        """Get the search configuration.
120
121
        Returns
122
        -------
123
        dict with str keys
124
            The search configuration dictionary.
125
        """
126
        search_config = super().get_search_config()
127
        search_config["initialize"] = self._initialize
128
        del search_config["verbose"]
129
        return search_config
130
131
    def _run(self, experiment, **search_config):
132
        """Run the optimization search process."""
133
        n_iter = search_config.pop("n_iter", 100)
134
        max_time = search_config.pop("max_time", None)
135
136
        hcopt = HillClimbingOptimizer(**search_config)
137
138
        with StdoutMute(active=not self.verbose):
139
            hcopt.search(
140
                objective_function=experiment.score,
141
                n_iter=n_iter,
142
                max_time=max_time,
143
            )
144
        self.best_params_ = hcopt.best_para
145
146
        return self.best_params_
147
148
    @classmethod
149
    def get_test_params(cls, parameter_set="default"):
150
        """Return testing parameter settings for the skbase object.
151
152
        ``get_test_params`` is a unified interface point to store
153
        parameter settings for testing purposes. This function is also
154
        used in ``create_test_instance`` and ``create_test_instances_and_names``
155
        to construct test instances.
156
157
        ``get_test_params`` should return a single ``dict``, or a ``list`` of ``dict``.
158
159
        Each ``dict`` is a parameter configuration for testing,
160
        and can be used to construct an "interesting" test instance.
161
        A call to ``cls(**params)`` should
162
        be valid for all dictionaries ``params`` in the return of ``get_test_params``.
163
164
        The ``get_test_params`` need not return fixed lists of dictionaries,
165
        it can also return dynamic or stochastic parameter settings.
166
167
        Parameters
168
        ----------
169
        parameter_set : str, default="default"
170
            Name of the set of test parameters to return, for use in tests. If no
171
            special parameters are defined for a value, will return `"default"` set.
172
173
        Returns
174
        -------
175
        params : dict or list of dict, default = {}
176
            Parameters to create testing instances of the class
177
            Each dict are parameters to construct an "interesting" test instance, i.e.,
178
            `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance.
179
            `create_test_instance` uses the first (or only) dictionary in `params`
180
        """
181
        import numpy as np
182
        from hyperactive.experiment.integrations import SklearnCvExperiment
183
184
        sklearn_exp = SklearnCvExperiment.create_test_instance()
185
        params_sklearn = {
186
            "experiment": sklearn_exp,
187
            "search_space": {
188
                "C": np.array([0.01, 0.1, 1, 10]),
189
                "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
190
            },
191
            "n_iter": 100,
192
        }
193
194
        from hyperactive.experiment.toy import Ackley
195
196
        ackley_exp = Ackley.create_test_instance()
197
        params_ackley = {
198
            "experiment": ackley_exp,
199
            "search_space": {
200
                "x0": np.linspace(-5, 5, 10),
201
                "x1": np.linspace(-5, 5, 10),
202
            },
203
            "n_iter": 100,
204
        }
205
        
206
        return [params_sklearn, params_ackley]
207