Passed
Push — master ( c241e4...b050e9 )
by Simon
01:57
created

RandomOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 18
Code Lines 17

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 17
dl 0
loc 18
rs 9.55
c 0
b 0
f 0
cc 1
nop 8

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
"""Random optimizer."""
2
# copyright: hyperactive developers, MIT License (see LICENSE file)
3
4
from .._adapters._base_optuna_adapter import _BaseOptunaAdapter
5
6
7
class RandomOptimizer(_BaseOptunaAdapter):
8
    """Random optimizer.
9
10
    Parameters
11
    ----------
12
    param_space : dict[str, tuple or list or optuna distributions]
13
        The search space to explore. Dictionary with parameter names
14
        as keys and either tuples/lists of (low, high) or
15
        optuna distribution objects as values.
16
    n_trials : int, default=100
17
        Number of optimization trials.
18
    initialize : dict[str, int], default=None
19
        The method to generate initial positions. A dictionary with
20
        the following key literals and the corresponding value type:
21
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
22
    random_state : None, int, default=None
23
        If None, create a new random state. If int, create a new random state
24
        seeded with the value.
25
    early_stopping : int, default=None
26
        Number of trials after which to stop if no improvement.
27
    max_score : float, default=None
28
        Maximum score threshold. Stop optimization when reached.
29
    experiment : BaseExperiment, optional
30
        The experiment to optimize parameters for.
31
        Optional, can be passed later via ``set_params``.
32
33
    Examples
34
    --------
35
    Basic usage of RandomOptimizer with a scikit-learn experiment:
36
37
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
38
    >>> from hyperactive.opt.optuna import RandomOptimizer
39
    >>> from sklearn.datasets import load_iris
40
    >>> from sklearn.svm import SVC
41
    >>> X, y = load_iris(return_X_y=True)
42
    >>> sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y)
43
    >>> param_space = {
44
    ...     "C": (0.01, 10),
45
    ...     "gamma": (0.0001, 10),
46
    ... }
47
    >>> optimizer = RandomOptimizer(
48
    ...     param_space=param_space, n_trials=50, experiment=sklearn_exp
49
    ... )
50
    >>> best_params = optimizer.run()
51
    """
52
53
    _tags = {
54
        "info:name": "Random Optimizer",
55
        "info:local_vs_global": "global",
56
        "info:explore_vs_exploit": "explore",
57
        "info:compute": "low",
58
        "python_dependencies": ["optuna"],
59
    }
60
61
    def __init__(
62
        self,
63
        param_space=None,
64
        n_trials=100,
65
        initialize=None,
66
        random_state=None,
67
        early_stopping=None,
68
        max_score=None,
69
        experiment=None,
70
    ):
71
        super().__init__(
72
            param_space=param_space,
73
            n_trials=n_trials,
74
            initialize=initialize,
75
            random_state=random_state,
76
            early_stopping=early_stopping,
77
            max_score=max_score,
78
            experiment=experiment,
79
        )
80
81
    def _get_optimizer(self):
82
        """Get the Random optimizer.
83
84
        Returns
85
        -------
86
        optimizer
87
            The Optuna RandomOptimizer instance
88
        """
89
        import optuna
90
91
        optimizer_kwargs = {}
92
        if self.random_state is not None:
93
            optimizer_kwargs["seed"] = self.random_state
94
95
        return optuna.samplers.RandomSampler(**optimizer_kwargs)
96