Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

RandomSearch.__init__()   A

Complexity

Conditions 1

Size

Total Lines 19
Code Lines 17

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 17
dl 0
loc 19
rs 9.55
c 0
b 0
f 0
cc 1
nop 8

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class RandomSearch(_BaseGFOadapter):
5
    """Random search optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
        Optional, can be passed later via ``set_params``.
13
    initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
14
        The method to generate initial positions. A dictionary with
15
        the following key literals and the corresponding value type:
16
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
17
    constraints : list[callable], default=[]
18
        A list of constraints, where each constraint is a callable.
19
        The callable returns `True` or `False` dependend on the input parameters.
20
    random_state : None, int, default=None
21
        If None, create a new random state. If int, create a new random state
22
        seeded with the value.
23
    n_iter : int, default=100
24
        The number of iterations to run the optimizer.
25
    verbose : bool, default=False
26
        If True, print the progress of the optimization process.
27
    experiment : BaseExperiment, optional
28
        The experiment to optimize parameters for.
29
        Optional, can be passed later via ``set_params``.
30
31
    Examples
32
    --------
33
    Basic usage of RandomSearch with a scikit-learn experiment:
34
35
    1. defining the experiment to optimize:
36
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
37
    >>> from sklearn.datasets import load_iris
38
    >>> from sklearn.svm import SVC
39
    >>>
40
    >>> X, y = load_iris(return_X_y=True)
41
    >>>
42
    >>> sklearn_exp = SklearnCvExperiment(
43
    ...     estimator=SVC(),
44
    ...     X=X,
45
    ...     y=y,
46
    ... )
47
48
    2. setting up the randomSearch optimizer:
49
    >>> from hyperactive.opt import RandomSearch
50
    >>> import numpy as np
51
    >>>
52
    >>> config = {
53
    ...     "search_space": {
54
    ...         "C": [0.01, 0.1, 1, 10],
55
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
56
    ...     },
57
    ...     "n_iter": 100,
58
    ... }
59
    >>> optimizer = RandomSearch(experiment=sklearn_exp, **config)
60
61
    3. running the optimization:
62
    >>> best_params = optimizer.run()
63
64
    Best parameters can also be accessed via:
65
    >>> best_params = optimizer.best_params_
66
    """
67
68
    _tags = {
69
        "info:name": "Random Search",
70
        "info:local_vs_global": "global",
71
        "info:explore_vs_exploit": "explore",
72
        "info:compute": "low",
73
    }
74
75
    def __init__(
76
        self,
77
        search_space=None,
78
        initialize=None,
79
        constraints=None,
80
        random_state=None,
81
        n_iter=100,
82
        verbose=False,
83
        experiment=None,
84
    ):
85
        self.random_state = random_state
86
        self.search_space = search_space
87
        self.initialize = initialize
88
        self.constraints = constraints
89
        self.n_iter = n_iter
90
        self.experiment = experiment
91
        self.verbose = verbose
92
93
        super().__init__()
94
95
    def _get_gfo_class(self):
96
        """Get the GFO class to use.
97
98
        Returns
99
        -------
100
        class
101
            The GFO class to use. One of the concrete GFO classes
102
        """
103
        from gradient_free_optimizers import RandomSearchOptimizer
104
105
        return RandomSearchOptimizer
106
107
    @classmethod
108
    def get_test_params(cls, parameter_set="default"):
109
        """Get the test parameters for the optimizer.
110
111
        Returns
112
        -------
113
        dict with str keys
114
            The test parameters dictionary.
115
        """
116
        import numpy as np
117
118
        params = super().get_test_params()
119
        experiment = params[0]["experiment"]
120
        more_params = {
121
            "experiment": experiment,
122
            "search_space": {
123
                "C": [0.01, 0.1, 1, 10],
124
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
125
            },
126
            "n_iter": 100,
127
        }
128
        params.append(more_params)
129
        return params
130