Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

ParallelTempering.__init__()   A

Complexity

Conditions 1

Size

Total Lines 25
Code Lines 23

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 23
dl 0
loc 25
rs 9.328
c 0
b 0
f 0
cc 1
nop 11

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class ParallelTempering(_BaseGFOadapter):
5
    """Parallel tempering optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the the search process.
24
    epsilon : float
25
        The step-size for the climbing.
26
    distribution : str
27
        The type of distribution to sample from.
28
    n_neighbours : int
29
        The number of neighbours to sample and evaluate before moving to the best
30
        of those neighbours.
31
    n_iter : int, default=100
32
        The number of iterations to run the optimizer.
33
    verbose : bool, default=False
34
        If True, print the progress of the optimization process.
35
    experiment : BaseExperiment, optional
36
        The experiment to optimize parameters for.
37
        Optional, can be passed later via ``set_params``.
38
39
    Examples
40
    --------
41
    Basic usage of ParallelTempering with a scikit-learn experiment:
42
43
    1. defining the experiment to optimize:
44
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
45
    >>> from sklearn.datasets import load_iris
46
    >>> from sklearn.svm import SVC
47
    >>>
48
    >>> X, y = load_iris(return_X_y=True)
49
    >>>
50
    >>> sklearn_exp = SklearnCvExperiment(
51
    ...     estimator=SVC(),
52
    ...     X=X,
53
    ...     y=y,
54
    ... )
55
56
    2. setting up the parallelTempering optimizer:
57
    >>> from hyperactive.opt import ParallelTempering
58
    >>> import numpy as np
59
    >>>
60
    >>> config = {
61
    ...     "search_space": {
62
    ...         "C": [0.01, 0.1, 1, 10],
63
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
64
    ...     },
65
    ...     "n_iter": 100,
66
    ... }
67
    >>> optimizer = ParallelTempering(experiment=sklearn_exp, **config)
68
69
    3. running the optimization:
70
    >>> best_params = optimizer.run()
71
72
    Best parameters can also be accessed via:
73
    >>> best_params = optimizer.best_params_
74
    """
75
76
    _tags = {
77
        "info:name": "Parallel Tempering",
78
        "info:local_vs_global": "global",
79
        "info:explore_vs_exploit": "explore",
80
        "info:compute": "high",
81
    }
82
83
    def __init__(
84
        self,
85
        search_space=None,
86
        initialize=None,
87
        constraints=None,
88
        random_state=None,
89
        rand_rest_p=0.1,
90
        population: int = 5,
91
        n_iter_swap: int = 5,
92
        n_iter=100,
93
        verbose=False,
94
        experiment=None,
95
    ):
96
        self.random_state = random_state
97
        self.rand_rest_p = rand_rest_p
98
        self.population = population
99
        self.n_iter_swap = n_iter_swap
100
        self.search_space = search_space
101
        self.initialize = initialize
102
        self.constraints = constraints
103
        self.n_iter = n_iter
104
        self.experiment = experiment
105
        self.verbose = verbose
106
107
        super().__init__()
108
109
    def _get_gfo_class(self):
110
        """Get the GFO class to use.
111
112
        Returns
113
        -------
114
        class
115
            The GFO class to use. One of the concrete GFO classes
116
        """
117
        from gradient_free_optimizers import ParallelTemperingOptimizer
118
119
        return ParallelTemperingOptimizer
120
121
    @classmethod
122
    def get_test_params(cls, parameter_set="default"):
123
        """Get the test parameters for the optimizer.
124
125
        Returns
126
        -------
127
        dict with str keys
128
            The test parameters dictionary.
129
        """
130
        import numpy as np
131
132
        params = super().get_test_params()
133
        experiment = params[0]["experiment"]
134
        more_params = {
135
            "experiment": experiment,
136
            "population": 10,
137
            "n_iter_swap": 3,
138
            "search_space": {
139
                "C": [0.01, 0.1, 1, 10],
140
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
141
            },
142
            "n_iter": 100,
143
        }
144
        params.append(more_params)
145
        return params
146