GridSearchSk._check_param_grid()   C
last analyzed

Complexity

Conditions 9

Size

Total Lines 21
Code Lines 13

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 13
dl 0
loc 21
rs 6.6666
c 0
b 0
f 0
cc 9
nop 2
1
"""Grid search optimizer."""
2
# copyright: hyperactive developers, MIT License (see LICENSE file)
3
4
from collections.abc import Sequence
5
6
import numpy as np
7
from sklearn.model_selection import ParameterGrid
8
9
from hyperactive.base import BaseOptimizer
10
11
12
class GridSearchSk(BaseOptimizer):
13
    """Grid search optimizer, with backend selection and sklearn style parameter grid.
14
15
    Parameters
16
    ----------
17
    param_grid : dict[str, list]
18
        The search space to explore. A dictionary with parameter
19
        names as keys and a numpy array as values.
20
    error_score : float, default=np.nan
21
        The score to assign if an error occurs during the evaluation of a parameter set.
22
    experiment : BaseExperiment, optional
23
        The experiment to optimize parameters for.
24
        Optional, can be passed later via ``set_params``.
25
26
    Example
27
    -------
28
    Grid search applied to scikit-learn parameter tuning:
29
30
    1. defining the experiment to optimize:
31
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
32
    >>> from sklearn.datasets import load_iris
33
    >>> from sklearn.svm import SVC
34
    >>>
35
    >>> X, y = load_iris(return_X_y=True)
36
    >>>
37
    >>> sklearn_exp = SklearnCvExperiment(
38
    ...     estimator=SVC(),
39
    ...     X=X,
40
    ...     y=y,
41
    ... )
42
43
    2. setting up the grid search optimizer:
44
    >>> from hyperactive.opt import GridSearchSk as GridSearch
45
    >>> param_grid = {
46
    ...     "C": [0.01, 0.1, 1, 10],
47
    ...     "gamma": [0.0001, 0.01, 0.1, 1, 10],
48
    ... }
49
    >>> grid_search = GridSearch(param_grid, experiment=sklearn_exp)
50
51
    3. running the grid search:
52
    >>> best_params = grid_search.solve()
53
54
    Best parameters can also be accessed via the attributes:
55
    >>> best_params = grid_search.best_params_
56
    """
57
58
    def __init__(
59
        self,
60
        param_grid=None,
61
        error_score=np.nan,
62
        experiment=None,
63
    ):
64
        self.experiment = experiment
65
        self.param_grid = param_grid
66
        self.error_score = error_score
67
68
        super().__init__()
69
70
    def _check_param_grid(self, param_grid):
71
        """_check_param_grid from sklearn 1.0.2, before it was removed."""
72
        if hasattr(param_grid, "items"):
73
            param_grid = [param_grid]
74
75
        for p in param_grid:
76
            for name, v in p.items():
77
                if isinstance(v, np.ndarray) and v.ndim > 1:
78
                    raise ValueError("Parameter array should be one-dimensional.")
79
80
                if isinstance(v, str) or not isinstance(v, (np.ndarray, Sequence)):
81
                    raise ValueError(
82
                        f"Parameter grid for parameter ({name}) needs to"
83
                        f" be a list or numpy array, but got ({type(v)})."
84
                        " Single values need to be wrapped in a list"
85
                        " with one element."
86
                    )
87
88
                if len(v) == 0:
89
                    raise ValueError(
90
                        f"Parameter values for parameter ({name}) need "
91
                        "to be a non-empty sequence."
92
                    )
93
94
    def _solve(self, experiment, param_grid, error_score):
95
        """Run the optimization search process."""
96
        self._check_param_grid(param_grid)
97
        candidate_params = list(ParameterGrid(param_grid))
98
99
        scores = []
100
        for candidate_param in candidate_params:
101
            try:
102
                score = experiment(**candidate_param)
103
            except Exception:  # noqa: B904
104
                # Catch all exceptions and assign error_score
105
                score = error_score
106
            scores.append(score)
107
108
        best_index = np.argmin(scores)
109
        best_params = candidate_params[best_index]
110
111
        self.best_index_ = best_index
112
        self.best_score_ = scores[best_index]
113
114
        return best_params
115
116
    @classmethod
117
    def get_test_params(cls, parameter_set="default"):
118
        """Return testing parameter settings for the skbase object.
119
120
        ``get_test_params`` is a unified interface point to store
121
        parameter settings for testing purposes. This function is also
122
        used in ``create_test_instance`` and ``create_test_instances_and_names``
123
        to construct test instances.
124
125
        ``get_test_params`` should return a single ``dict``, or a ``list`` of ``dict``.
126
127
        Each ``dict`` is a parameter configuration for testing,
128
        and can be used to construct an "interesting" test instance.
129
        A call to ``cls(**params)`` should
130
        be valid for all dictionaries ``params`` in the return of ``get_test_params``.
131
132
        The ``get_test_params`` need not return fixed lists of dictionaries,
133
        it can also return dynamic or stochastic parameter settings.
134
135
        Parameters
136
        ----------
137
        parameter_set : str, default="default"
138
            Name of the set of test parameters to return, for use in tests. If no
139
            special parameters are defined for a value, will return `"default"` set.
140
141
        Returns
142
        -------
143
        params : dict or list of dict, default = {}
144
            Parameters to create testing instances of the class
145
            Each dict are parameters to construct an "interesting" test instance, i.e.,
146
            `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance.
147
            `create_test_instance` uses the first (or only) dictionary in `params`
148
        """
149
        from hyperactive.experiment.integrations import SklearnCvExperiment
150
151
        sklearn_exp = SklearnCvExperiment.create_test_instance()
152
        param_grid = {
153
            "C": [0.01, 0.1, 1, 10],
154
            "gamma": [0.0001, 0.01, 0.1, 1, 10],
155
        }
156
        params_sklearn = {
157
            "experiment": sklearn_exp,
158
            "param_grid": param_grid,
159
        }
160
161
        from hyperactive.experiment.toy import Ackley
162
163
        ackley_exp = Ackley.create_test_instance()
164
        param_grid = {
165
            "x0": np.linspace(-5, 5, 10),
166
            "x1": np.linspace(-5, 5, 10),
167
        }
168
        params_ackley = {
169
            "experiment": ackley_exp,
170
            "param_grid": param_grid,
171
        }
172
173
        return [params_sklearn, params_ackley]
174