Passed
Push — master ( 613b45...e6e7ce )
by Simon
01:49 queued 15s
created

HillClimbing.__init__()   A

Complexity

Conditions 1

Size

Total Lines 27
Code Lines 25

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 25
dl 0
loc 27
rs 9.28
c 0
b 0
f 0
cc 1
nop 12

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
"""Hill climbing optimizer from gfo."""
2
# copyright: hyperactive developers, MIT License (see LICENSE file)
3
4
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
5
6
7
class HillClimbing(_BaseGFOadapter):
8
    """Hill climbing optimizer.
9
10
    Parameters
11
    ----------
12
    search_space : dict[str, list]
13
        The search space to explore. A dictionary with parameter
14
        names as keys and a numpy array as values.
15
        Optional, can be passed later via ``set_params``.
16
    initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
17
        The method to generate initial positions. A dictionary with
18
        the following key literals and the corresponding value type:
19
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
20
    constraints : list[callable], default=[]
21
        A list of constraints, where each constraint is a callable.
22
        The callable returns `True` or `False` dependend on the input parameters.
23
    random_state : None, int, default=None
24
        If None, create a new random state. If int, create a new random state
25
        seeded with the value.
26
    rand_rest_p : float, default=0.1
27
        The probability of a random iteration during the the search process.
28
    epsilon : float, default=0.01
29
        The step-size for the climbing.
30
    distribution : str, default="normal"
31
        The type of distribution to sample from.
32
    n_neighbours : int, default=10
33
        The number of neighbours to sample and evaluate before moving to the best
34
        of those neighbours.
35
    n_iter : int, default=100
36
        The number of iterations to run the optimizer.
37
    verbose : bool, default=False
38
        If True, print the progress of the optimization process.
39
    experiment : BaseExperiment, optional
40
        The experiment to optimize parameters for.
41
        Optional, can be passed later via ``set_params``.
42
43
    Examples
44
    --------
45
    Hill climbing applied to scikit-learn parameter tuning:
46
47
    1. defining the experiment to optimize:
48
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
49
    >>> from sklearn.datasets import load_iris
50
    >>> from sklearn.svm import SVC
51
    >>>
52
    >>> X, y = load_iris(return_X_y=True)
53
    >>>
54
    >>> sklearn_exp = SklearnCvExperiment(
55
    ...     estimator=SVC(),
56
    ...     X=X,
57
    ...     y=y,
58
    ... )
59
60
    2. setting up the hill climbing optimizer:
61
    >>> from hyperactive.opt import HillClimbing
62
    >>> import numpy as np
63
    >>> 
64
    >>> hillclimbing_config = {
65
    ...     "search_space": {
66
    ...         "C": np.array([0.01, 0.1, 1, 10]),
67
    ...         "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
68
    ...     },
69
    ...     "n_iter": 100,
70
    ... }
71
    >>> hillclimbing = HillClimbing(experiment=sklearn_exp, **hillclimbing_config)
72
73
    3. running the hill climbing search:
74
    >>> best_params = hillclimbing.run()
75
76
    Best parameters can also be accessed via the attributes:
77
    >>> best_params = hillclimbing.best_params_
78
    """
79
80
    _tags = {
81
        "info:name": "Hill Climbing",
82
        "info:local_vs_global": "local",  # "local", "mixed", "global"
83
        "info:explore_vs_exploit": "exploit",  # "explore", "exploit", "mixed"
84
        "info:compute": "low",  # "low", "middle", "high"
85
    }
86
87
    def __init__(
88
        self,
89
        search_space=None,
90
        initialize=None,
91
        constraints=None,
92
        random_state=None,
93
        rand_rest_p=0.1,
94
        epsilon=0.01,
95
        distribution="normal",
96
        n_neighbours=10,
97
        n_iter=100,
98
        verbose=False,
99
        experiment=None,
100
    ):
101
        self.random_state = random_state
102
        self.rand_rest_p = rand_rest_p
103
        self.epsilon = epsilon
104
        self.distribution = distribution
105
        self.n_neighbours = n_neighbours
106
        self.search_space = search_space
107
        self.initialize = initialize
108
        self.constraints = constraints
109
        self.n_iter = n_iter
110
        self.experiment = experiment
111
        self.verbose = verbose
112
113
        super().__init__()
114
115
    def _get_gfo_class(self):
116
        """Get the GFO class to use.
117
118
        Returns
119
        -------
120
        class
121
            The GFO class to use. One of the concrete GFO classes
122
        """
123
        from gradient_free_optimizers import HillClimbingOptimizer
124
125
        return HillClimbingOptimizer
126