Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

hyperactive.opt.gfo._repulsing_hillclimbing   A

Complexity

Total Complexity 3

Size/Duplication

Total Lines 156
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 3
eloc 52
dl 0
loc 156
rs 10
c 0
b 0
f 0

3 Methods

Rating   Name   Duplication   Size   Complexity  
A RepulsingHillClimbing._get_gfo_class() 0 11 1
A RepulsingHillClimbing.__init__() 0 29 1
A RepulsingHillClimbing.get_test_params() 0 24 1
1
"""Hill climbing optimizer from gfo."""
2
3
# copyright: hyperactive developers, MIT License (see LICENSE file)
4
5
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
6
7
8
class RepulsingHillClimbing(_BaseGFOadapter):
9
    """Repulsing hill climbing optimizer.
10
11
    Parameters
12
    ----------
13
    search_space : dict[str, list]
14
        The search space to explore. A dictionary with parameter
15
        names as keys and a numpy array as values.
16
        Optional, can be passed later via ``set_params``.
17
    initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
18
        The method to generate initial positions. A dictionary with
19
        the following key literals and the corresponding value type:
20
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
21
    constraints : list[callable], default=[]
22
        A list of constraints, where each constraint is a callable.
23
        The callable returns `True` or `False` dependend on the input parameters.
24
    random_state : None, int, default=None
25
        If None, create a new random state. If int, create a new random state
26
        seeded with the value.
27
    rand_rest_p : float, default=0.1
28
        The probability of a random iteration during the the search process.
29
    epsilon : float, default=0.01
30
        The step-size for the climbing.
31
    distribution : str, default="normal"
32
        The type of distribution to sample from.
33
    n_neighbours : int, default=10
34
        The number of neighbours to sample and evaluate before moving to the best
35
        of those neighbours.
36
    repulsion_factor : float, default=5
37
        The factor to control the repulsion of the hill climbing process.
38
    n_iter : int, default=100
39
        The number of iterations to run the optimizer.
40
    verbose : bool, default=False
41
        If True, print the progress of the optimization process.
42
    experiment : BaseExperiment, optional
43
        The experiment to optimize parameters for.
44
        Optional, can be passed later via ``set_params``.
45
46
    Examples
47
    --------
48
    Hill climbing applied to scikit-learn parameter tuning:
49
50
    1. defining the experiment to optimize:
51
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
52
    >>> from sklearn.datasets import load_iris
53
    >>> from sklearn.svm import SVC
54
    >>>
55
    >>> X, y = load_iris(return_X_y=True)
56
    >>>
57
    >>> sklearn_exp = SklearnCvExperiment(
58
    ...     estimator=SVC(),
59
    ...     X=X,
60
    ...     y=y,
61
    ... )
62
63
    2. setting up the hill climbing optimizer:
64
    >>> from hyperactive.opt import RepulsingHillClimbing
65
    >>> import numpy as np
66
    >>>
67
    >>> config = {
68
    ...     "search_space": {
69
    ...         "C": [0.01, 0.1, 1, 10],
70
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
71
    ...     },
72
    ...     "n_iter": 100,
73
    ... }
74
    >>> hillclimbing = RepulsingHillClimbing(experiment=sklearn_exp, **config)
75
76
    3. running the hill climbing search:
77
    >>> best_params = hillclimbing.run()
78
79
    Best parameters can also be accessed via the attributes:
80
    >>> best_params = hillclimbing.best_params_
81
    """
82
83
    _tags = {
84
        "info:name": "Repulsing Hill Climbing",
85
        "info:local_vs_global": "mixed",  # "local", "mixed", "global"
86
        "info:explore_vs_exploit": "exploit",  # "explore", "exploit", "mixed"
87
        "info:compute": "low",  # "low", "middle", "high"
88
    }
89
90
    def __init__(
91
        self,
92
        search_space=None,
93
        initialize=None,
94
        constraints=None,
95
        random_state=None,
96
        rand_rest_p=0.1,
97
        epsilon=0.01,
98
        distribution="normal",
99
        n_neighbours=10,
100
        repulsion_factor=5,
101
        n_iter=100,
102
        verbose=False,
103
        experiment=None,
104
    ):
105
        self.random_state = random_state
106
        self.rand_rest_p = rand_rest_p
107
        self.epsilon = epsilon
108
        self.distribution = distribution
109
        self.n_neighbours = n_neighbours
110
        self.search_space = search_space
111
        self.initialize = initialize
112
        self.constraints = constraints
113
        self.repulsion_factor = repulsion_factor
114
        self.n_iter = n_iter
115
        self.experiment = experiment
116
        self.verbose = verbose
117
118
        super().__init__()
119
120
    def _get_gfo_class(self):
121
        """Get the GFO class to use.
122
123
        Returns
124
        -------
125
        class
126
            The GFO class to use. One of the concrete GFO classes
127
        """
128
        from gradient_free_optimizers import RepulsingHillClimbingOptimizer
129
130
        return RepulsingHillClimbingOptimizer
131
132
    @classmethod
133
    def get_test_params(cls, parameter_set="default"):
134
        """Get the test parameters for the optimizer.
135
136
        Returns
137
        -------
138
        dict with str keys
139
            The test parameters dictionary.
140
        """
141
        import numpy as np
142
143
        params = super().get_test_params()
144
        experiment = params[0]["experiment"]
145
        more_params = {
146
            "experiment": experiment,
147
            "repulsion_factor": 7,
148
            "search_space": {
149
                "C": [0.01, 0.1, 1, 10],
150
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
151
            },
152
            "n_iter": 100,
153
        }
154
        params.append(more_params)
155
        return params
156