Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

BayesianOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 32
Code Lines 29

Duplication

Lines 32
Ratio 100 %

Importance

Changes 0
Metric Value
eloc 29
dl 32
loc 32
rs 9.184
c 0
b 0
f 0
cc 1
nop 14

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4 View Code Duplication
class BayesianOptimizer(_BaseGFOadapter):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
5
    """Bayesian optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the search process.
24
    warm_start_smbo
25
        The warm start for SMBO.
26
    max_sample_size : int
27
        The maximum number of points to sample.
28
    sampling : dict
29
        The sampling method to use.
30
    replacement : bool
31
        Whether to sample with replacement.
32
    gpr : dict
33
        The Gaussian Process Regressor to use.
34
    xi : float
35
        The exploration-exploitation trade-off parameter.
36
    n_iter : int, default=100
37
        The number of iterations to run the optimizer.
38
    verbose : bool, default=False
39
        If True, print the progress of the optimization process.
40
    experiment : BaseExperiment, optional
41
        The experiment to optimize parameters for.
42
        Optional, can be passed later via ``set_params``.
43
44
    Examples
45
    --------
46
    Basic usage of BayesianOptimizer with a scikit-learn experiment:
47
48
    1. defining the experiment to optimize:
49
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
50
    >>> from sklearn.datasets import load_iris
51
    >>> from sklearn.svm import SVC
52
    >>>
53
    >>> X, y = load_iris(return_X_y=True)
54
    >>>
55
    >>> sklearn_exp = SklearnCvExperiment(
56
    ...     estimator=SVC(),
57
    ...     X=X,
58
    ...     y=y,
59
    ... )
60
61
    2. setting up the bayesianOptimizer optimizer:
62
    >>> from hyperactive.opt import BayesianOptimizer
63
    >>> import numpy as np
64
    >>>
65
    >>> config = {
66
    ...     "search_space": {
67
    ...         "C": [0.01, 0.1, 1, 10],
68
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
69
    ...     },
70
    ...     "n_iter": 100,
71
    ... }
72
    >>> optimizer = BayesianOptimizer(experiment=sklearn_exp, **config)
73
74
    3. running the optimization:
75
    >>> best_params = optimizer.run()
76
77
    Best parameters can also be accessed via:
78
    >>> best_params = optimizer.best_params_
79
    """
80
81
    _tags = {
82
        "info:name": "Bayesian Optimization",
83
        "info:local_vs_global": "global",
84
        "info:explore_vs_exploit": "exploit",
85
        "info:compute": "high",
86
    }
87
88
    def __init__(
89
        self,
90
        search_space=None,
91
        initialize=None,
92
        constraints=None,
93
        random_state=None,
94
        rand_rest_p=0.1,
95
        warm_start_smbo=None,
96
        max_sample_size=10000000,
97
        sampling=None,
98
        replacement=True,
99
        xi=0.03,
100
        n_iter=100,
101
        verbose=False,
102
        experiment=None,
103
    ):
104
        self.random_state = random_state
105
        self.rand_rest_p = rand_rest_p
106
107
        self.warm_start_smbo = warm_start_smbo
108
        self.max_sample_size = max_sample_size
109
        self.sampling = sampling
110
        self.search_space = search_space
111
        self.initialize = initialize
112
        self.constraints = constraints
113
        self.replacement = replacement
114
        self.xi = xi
115
        self.n_iter = n_iter
116
        self.experiment = experiment
117
        self.verbose = verbose
118
119
        super().__init__()
120
121
    def _get_gfo_class(self):
122
        """Get the GFO class to use.
123
124
        Returns
125
        -------
126
        class
127
            The GFO class to use. One of the concrete GFO classes
128
        """
129
        from gradient_free_optimizers import BayesianOptimizer
130
131
        return BayesianOptimizer
132
133
    @classmethod
134
    def get_test_params(cls, parameter_set="default"):
135
        """Get the test parameters for the optimizer.
136
137
        Returns
138
        -------
139
        dict with str keys
140
            The test parameters dictionary.
141
        """
142
        import numpy as np
143
144
        params = super().get_test_params()
145
        experiment = params[0]["experiment"]
146
        more_params = {
147
            "experiment": experiment,
148
            "xi": 0.33,
149
            "search_space": {
150
                "C": [0.01, 0.1, 1, 10],
151
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
152
            },
153
            "n_iter": 100,
154
        }
155
        params.append(more_params)
156
        return params
157