Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

PowellsMethod.__init__()   A

Complexity

Conditions 1

Size

Total Lines 23
Code Lines 21

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 21
dl 0
loc 23
rs 9.376
c 0
b 0
f 0
cc 1
nop 10

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class PowellsMethod(_BaseGFOadapter):
5
    """Powell's method optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the search process.
24
    epsilon : float
25
        The step-size for the climbing.
26
    distribution : str
27
        The type of distribution to sample from.
28
    n_neighbours : int
29
        The number of neighbours to sample and evaluate before moving to the best
30
        of those neighbours.
31
    n_iter : int, default=100
32
        The number of iterations to run the optimizer.
33
    verbose : bool, default=False
34
        If True, print the progress of the optimization process.
35
    experiment : BaseExperiment, optional
36
        The experiment to optimize parameters for.
37
        Optional, can be passed later via ``set_params``.
38
39
    Examples
40
    --------
41
    Basic usage of PowellsMethod with a scikit-learn experiment:
42
43
    1. defining the experiment to optimize:
44
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
45
    >>> from sklearn.datasets import load_iris
46
    >>> from sklearn.svm import SVC
47
    >>>
48
    >>> X, y = load_iris(return_X_y=True)
49
    >>>
50
    >>> sklearn_exp = SklearnCvExperiment(
51
    ...     estimator=SVC(),
52
    ...     X=X,
53
    ...     y=y,
54
    ... )
55
56
    2. setting up the powellsMethod optimizer:
57
    >>> from hyperactive.opt import PowellsMethod
58
    >>> import numpy as np
59
    >>>
60
    >>> config = {
61
    ...     "search_space": {
62
    ...         "C": [0.01, 0.1, 1, 10],
63
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
64
    ...     },
65
    ...     "n_iter": 100,
66
    ... }
67
    >>> optimizer = PowellsMethod(experiment=sklearn_exp, **config)
68
69
    3. running the optimization:
70
    >>> best_params = optimizer.run()
71
72
    Best parameters can also be accessed via:
73
    >>> best_params = optimizer.best_params_
74
    """
75
76
    _tags = {
77
        "info:name": "Powell’s Method",
78
        "info:local_vs_global": "local",
79
        "info:explore_vs_exploit": "exploit",
80
        "info:compute": "low",
81
    }
82
83
    def __init__(
84
        self,
85
        search_space=None,
86
        initialize=None,
87
        constraints=None,
88
        random_state=None,
89
        rand_rest_p=0.1,
90
        iters_p_dim=10,
91
        n_iter=100,
92
        verbose=False,
93
        experiment=None,
94
    ):
95
        self.random_state = random_state
96
        self.rand_rest_p = rand_rest_p
97
        self.iters_p_dim = iters_p_dim
98
        self.search_space = search_space
99
        self.initialize = initialize
100
        self.constraints = constraints
101
        self.n_iter = n_iter
102
        self.experiment = experiment
103
        self.verbose = verbose
104
105
        super().__init__()
106
107
    def _get_gfo_class(self):
108
        """Get the GFO class to use.
109
110
        Returns
111
        -------
112
        class
113
            The GFO class to use. One of the concrete GFO classes
114
        """
115
        from gradient_free_optimizers import PowellsMethod
116
117
        return PowellsMethod
118
119
    @classmethod
120
    def get_test_params(cls, parameter_set="default"):
121
        """Get the test parameters for the optimizer.
122
123
        Returns
124
        -------
125
        dict with str keys
126
            The test parameters dictionary.
127
        """
128
        import numpy as np
129
130
        params = super().get_test_params()
131
        experiment = params[0]["experiment"]
132
        more_params = {
133
            "experiment": experiment,
134
            "iters_p_dim": 3,
135
            "search_space": {
136
                "C": [0.01, 0.1, 1, 10],
137
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
138
            },
139
            "n_iter": 100,
140
        }
141
        params.append(more_params)
142
        return params
143