Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

RandomRestartHillClimbing.__init__()   A

Complexity

Conditions 1

Size

Total Lines 29
Code Lines 27

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 27
dl 0
loc 29
rs 9.232
c 0
b 0
f 0
cc 1
nop 13

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class RandomRestartHillClimbing(_BaseGFOadapter):
5
    """Random restart hill climbing optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the the search process.
24
    epsilon : float
25
        The step-size for the climbing.
26
    distribution : str
27
        The type of distribution to sample from.
28
    n_neighbours : int
29
        The number of neighbours to sample and evaluate before moving to the best
30
        of those neighbours.
31
    n_iter_restart : int
32
        The number of iterations after which to restart at a random position.
33
34
    Examples
35
    --------
36
    Basic usage of RandomRestartHillClimbing with a scikit-learn experiment:
37
38
    1. defining the experiment to optimize:
39
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
40
    >>> from sklearn.datasets import load_iris
41
    >>> from sklearn.svm import SVC
42
    >>>
43
    >>> X, y = load_iris(return_X_y=True)
44
    >>>
45
    >>> sklearn_exp = SklearnCvExperiment(
46
    ...     estimator=SVC(),
47
    ...     X=X,
48
    ...     y=y,
49
    ... )
50
51
    2. setting up the randomRestartHillClimbing optimizer:
52
    >>> from hyperactive.opt import RandomRestartHillClimbing
53
    >>> import numpy as np
54
    >>>
55
    >>> config = {
56
    ...     "search_space": {
57
    ...         "C": [0.01, 0.1, 1, 10],
58
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
59
    ...     },
60
    ...     "n_iter": 100,
61
    ... }
62
    >>> optimizer = RandomRestartHillClimbing(experiment=sklearn_exp, **config)
63
64
    3. running the optimization:
65
    >>> best_params = optimizer.run()
66
67
    Best parameters can also be accessed via:
68
    >>> best_params = optimizer.best_params_
69
    """
70
71
    _tags = {
72
        "info:name": "Random Restart Hill Climbing",
73
        "info:local_vs_global": "local",
74
        "info:explore_vs_exploit": "mixed",
75
        "info:compute": "middle",
76
    }
77
78
    def __init__(
79
        self,
80
        search_space=None,
81
        initialize=None,
82
        constraints=None,
83
        random_state=None,
84
        rand_rest_p=0.1,
85
        epsilon=0.01,
86
        distribution="normal",
87
        n_neighbours=10,
88
        n_iter_restart=0.5,
89
        n_iter=100,
90
        verbose=False,
91
        experiment=None,
92
    ):
93
        self.random_state = random_state
94
        self.rand_rest_p = rand_rest_p
95
        self.epsilon = epsilon
96
        self.distribution = distribution
97
        self.n_neighbours = n_neighbours
98
        self.n_iter_restart = n_iter_restart
99
        self.search_space = search_space
100
        self.initialize = initialize
101
        self.constraints = constraints
102
        self.n_iter = n_iter
103
        self.experiment = experiment
104
        self.verbose = verbose
105
106
        super().__init__()
107
108
    def _get_gfo_class(self):
109
        """Get the GFO class to use.
110
111
        Returns
112
        -------
113
        class
114
            The GFO class to use. One of the concrete GFO classes
115
        """
116
        from gradient_free_optimizers import RandomRestartHillClimbingOptimizer
117
118
        return RandomRestartHillClimbingOptimizer
119
120
    @classmethod
121
    def get_test_params(cls, parameter_set="default"):
122
        """Get the test parameters for the optimizer.
123
124
        Returns
125
        -------
126
        dict with str keys
127
            The test parameters dictionary.
128
        """
129
        import numpy as np
130
131
        params = super().get_test_params()
132
        experiment = params[0]["experiment"]
133
        more_params = {
134
            "experiment": experiment,
135
            "n_iter_restart": 2,
136
            "search_space": {
137
                "C": [0.01, 0.1, 1, 10],
138
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
139
            },
140
            "n_iter": 100,
141
        }
142
        params.append(more_params)
143
        return params
144