Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

SimulatedAnnealing.__init__()   A

Complexity

Conditions 1

Size

Total Lines 31
Code Lines 29

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 29
dl 0
loc 31
rs 9.184
c 0
b 0
f 0
cc 1
nop 14

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class SimulatedAnnealing(_BaseGFOadapter):
5
    """Simulated annealing optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the the search process.
24
    epsilon : float
25
        The step-size for the climbing.
26
    distribution : str
27
        The type of distribution to sample from.
28
    n_neighbours : int
29
        The number of neighbours to sample and evaluate before moving to the best
30
        of those neighbours.
31
    annealing_rate : float
32
        The rate at which the temperature is annealed.
33
    start_temp : float
34
        The initial temperature.
35
    n_iter : int, default=100
36
        The number of iterations to run the optimizer.
37
    verbose : bool, default=False
38
        If True, print the progress of the optimization process.
39
    experiment : BaseExperiment, optional
40
        The experiment to optimize parameters for.
41
        Optional, can be passed later via ``set_params``.
42
43
    Examples
44
    --------
45
    Basic usage of SimulatedAnnealing with a scikit-learn experiment:
46
47
    1. defining the experiment to optimize:
48
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
49
    >>> from sklearn.datasets import load_iris
50
    >>> from sklearn.svm import SVC
51
    >>>
52
    >>> X, y = load_iris(return_X_y=True)
53
    >>>
54
    >>> sklearn_exp = SklearnCvExperiment(
55
    ...     estimator=SVC(),
56
    ...     X=X,
57
    ...     y=y,
58
    ... )
59
60
    2. setting up the simulatedAnnealing optimizer:
61
    >>> from hyperactive.opt import SimulatedAnnealing
62
    >>> import numpy as np
63
    >>>
64
    >>> config = {
65
    ...     "search_space": {
66
    ...         "C": [0.01, 0.1, 1, 10],
67
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
68
    ...     },
69
    ...     "n_iter": 100,
70
    ... }
71
    >>> optimizer = SimulatedAnnealing(experiment=sklearn_exp, **config)
72
73
    3. running the optimization:
74
    >>> best_params = optimizer.run()
75
76
    Best parameters can also be accessed via:
77
    >>> best_params = optimizer.best_params_
78
    """
79
80
    _tags = {
81
        "info:name": "Simulated Annealing",
82
        "info:local_vs_global": "global",
83
        "info:explore_vs_exploit": "explore",
84
        "info:compute": "middle",
85
    }
86
87
    def __init__(
88
        self,
89
        search_space=None,
90
        initialize=None,
91
        constraints=None,
92
        random_state=None,
93
        rand_rest_p=0.1,
94
        epsilon=0.01,
95
        distribution="normal",
96
        n_neighbours=10,
97
        annealing_rate=0.97,
98
        start_temp=1,
99
        n_iter=100,
100
        verbose=False,
101
        experiment=None,
102
    ):
103
        self.random_state = random_state
104
        self.rand_rest_p = rand_rest_p
105
        self.epsilon = epsilon
106
        self.distribution = distribution
107
        self.n_neighbours = n_neighbours
108
        self.annealing_rate = annealing_rate
109
        self.start_temp = start_temp
110
        self.search_space = search_space
111
        self.initialize = initialize
112
        self.constraints = constraints
113
        self.n_iter = n_iter
114
        self.experiment = experiment
115
        self.verbose = verbose
116
117
        super().__init__()
118
119
    def _get_gfo_class(self):
120
        """Get the GFO class to use.
121
122
        Returns
123
        -------
124
        class
125
            The GFO class to use. One of the concrete GFO classes
126
        """
127
        from gradient_free_optimizers import SimulatedAnnealingOptimizer
128
129
        return SimulatedAnnealingOptimizer
130
131
    @classmethod
132
    def get_test_params(cls, parameter_set="default"):
133
        """Get the test parameters for the optimizer.
134
135
        Returns
136
        -------
137
        dict with str keys
138
            The test parameters dictionary.
139
        """
140
        import numpy as np
141
142
        params = super().get_test_params()
143
        experiment = params[0]["experiment"]
144
        more_params = {
145
            "experiment": experiment,
146
            "start_temp": 0.33,
147
            "annealing_rate": 1.01,
148
            "search_space": {
149
                "C": [0.01, 0.1, 1, 10],
150
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
151
            },
152
            "n_iter": 100,
153
        }
154
        params.append(more_params)
155
        return params
156