Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

ForestOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 35
Code Lines 33

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 33
dl 0
loc 35
rs 9.0879
c 0
b 0
f 0
cc 1
nop 16

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class ForestOptimizer(_BaseGFOadapter):
5
    """Forest optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the the search process.
24
    warm_start_smbo
25
        The warm start for SMBO.
26
    max_sample_size : int
27
        The maximum number of points to sample.
28
    sampling : dict
29
        The sampling method to use.
30
    replacement : bool
31
        Whether to sample with replacement.
32
    tree_regressor : str
33
        The tree regressor model to use.
34
    tree_para : dict
35
        The model specific parameters for the tree regressor.
36
    xi : float
37
        The xi parameter for the tree regressor.
38
    n_iter : int, default=100
39
        The number of iterations to run the optimizer.
40
    verbose : bool, default=False
41
        If True, print the progress of the optimization process.
42
    experiment : BaseExperiment, optional
43
        The experiment to optimize parameters for.
44
        Optional, can be passed later via ``set_params``.
45
46
    Examples
47
    --------
48
    Basic usage of ForestOptimizer with a scikit-learn experiment:
49
50
    1. defining the experiment to optimize:
51
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
52
    >>> from sklearn.datasets import load_iris
53
    >>> from sklearn.svm import SVC
54
    >>>
55
    >>> X, y = load_iris(return_X_y=True)
56
    >>>
57
    >>> sklearn_exp = SklearnCvExperiment(
58
    ...     estimator=SVC(),
59
    ...     X=X,
60
    ...     y=y,
61
    ... )
62
63
    2. setting up the forestOptimizer optimizer:
64
    >>> from hyperactive.opt import ForestOptimizer
65
    >>> import numpy as np
66
    >>>
67
    >>> config = {
68
    ...     "search_space": {
69
    ...         "C": [0.01, 0.1, 1, 10],
70
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
71
    ...     },
72
    ...     "n_iter": 100,
73
    ... }
74
    >>> optimizer = ForestOptimizer(experiment=sklearn_exp, **config)
75
76
    3. running the optimization:
77
    >>> best_params = optimizer.run()
78
79
    Best parameters can also be accessed via:
80
    >>> best_params = optimizer.best_params_
81
    """
82
83
    _tags = {
84
        "info:name": "Forest Optimizer",
85
        "info:local_vs_global": "global",
86
        "info:explore_vs_exploit": "explore",
87
        "info:compute": "middle",
88
    }
89
90
    def __init__(
91
        self,
92
        search_space=None,
93
        initialize=None,
94
        constraints=None,
95
        random_state=None,
96
        rand_rest_p=0.1,
97
        warm_start_smbo=None,
98
        max_sample_size=10000000,
99
        sampling=None,
100
        replacement=True,
101
        tree_regressor="extra_tree",
102
        tree_para=None,
103
        xi=0.03,
104
        n_iter=100,
105
        verbose=False,
106
        experiment=None,
107
    ):
108
        self.random_state = random_state
109
        self.rand_rest_p = rand_rest_p
110
        self.warm_start_smbo = warm_start_smbo
111
        self.max_sample_size = max_sample_size
112
        self.sampling = sampling
113
        self.replacement = replacement
114
        self.tree_regressor = tree_regressor
115
        self.tree_para = tree_para
116
        self.xi = xi
117
        self.search_space = search_space
118
        self.initialize = initialize
119
        self.constraints = constraints
120
        self.n_iter = n_iter
121
        self.experiment = experiment
122
        self.verbose = verbose
123
124
        super().__init__()
125
126
    def _get_gfo_class(self):
127
        """Get the GFO class to use.
128
129
        Returns
130
        -------
131
        class
132
            The GFO class to use. One of the concrete GFO classes
133
        """
134
        from gradient_free_optimizers import ForestOptimizer
135
136
        return ForestOptimizer
137
138
    @classmethod
139
    def get_test_params(cls, parameter_set="default"):
140
        """Get the test parameters for the optimizer.
141
142
        Returns
143
        -------
144
        dict with str keys
145
            The test parameters dictionary.
146
        """
147
        import numpy as np
148
149
        params = super().get_test_params()
150
        experiment = params[0]["experiment"]
151
        more_params = {
152
            "experiment": experiment,
153
            "replacement": True,
154
            "tree_para": {"n_estimators": 50},
155
            "xi": 0.33,
156
            "search_space": {
157
                "C": [0.01, 0.1, 1, 10],
158
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
159
            },
160
            "n_iter": 100,
161
        }
162
        params.append(more_params)
163
        return params
164