Passed
Push — master ( c241e4...b050e9 )
by Simon
01:57
created

NSGAIIIOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 25
Code Lines 23

Duplication

Lines 25
Ratio 100 %

Importance

Changes 0
Metric Value
eloc 23
dl 25
loc 25
rs 9.328
c 0
b 0
f 0
cc 1
nop 11

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
"""NSGA-III multi-objective optimizer."""
2
# copyright: hyperactive developers, MIT License (see LICENSE file)
3
4
from .._adapters._base_optuna_adapter import _BaseOptunaAdapter
5
6
7 View Code Duplication
class NSGAIIIOptimizer(_BaseOptunaAdapter):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
8
    """NSGA-III multi-objective optimizer.
9
10
    Parameters
11
    ----------
12
    param_space : dict[str, tuple or list or optuna distributions]
13
        The search space to explore. Dictionary with parameter names
14
        as keys and either tuples/lists of (low, high) or
15
        optuna distribution objects as values.
16
    n_trials : int, default=100
17
        Number of optimization trials.
18
    initialize : dict[str, int], default=None
19
        The method to generate initial positions. A dictionary with
20
        the following key literals and the corresponding value type:
21
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
22
    random_state : None, int, default=None
23
        If None, create a new random state. If int, create a new random state
24
        seeded with the value.
25
    early_stopping : int, default=None
26
        Number of trials after which to stop if no improvement.
27
    max_score : float, default=None
28
        Maximum score threshold. Stop optimization when reached.
29
    population_size : int, default=50
30
        Population size for NSGA-III.
31
    mutation_prob : float, default=0.1
32
        Mutation probability for NSGA-III.
33
    crossover_prob : float, default=0.9
34
        Crossover probability for NSGA-III.
35
    experiment : BaseExperiment, optional
36
        The experiment to optimize parameters for.
37
        Optional, can be passed later via ``set_params``.
38
39
    Examples
40
    --------
41
    Basic usage of NSGAIIIOptimizer with a scikit-learn experiment:
42
43
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
44
    >>> from hyperactive.opt.optuna import NSGAIIIOptimizer
45
    >>> from sklearn.datasets import load_iris
46
    >>> from sklearn.svm import SVC
47
    >>> X, y = load_iris(return_X_y=True)
48
    >>> sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y)
49
    >>> param_space = {
50
    ...     "C": (0.01, 10),
51
    ...     "gamma": (0.0001, 10),
52
    ... }
53
    >>> optimizer = NSGAIIIOptimizer(
54
    ...     param_space=param_space, n_trials=50, experiment=sklearn_exp
55
    ... )
56
    >>> best_params = optimizer.run()
57
    """
58
59
    _tags = {
60
        "info:name": "NSGA-III Optimizer",
61
        "info:local_vs_global": "global",
62
        "info:explore_vs_exploit": "mixed",
63
        "info:compute": "high",
64
        "python_dependencies": ["optuna"],
65
    }
66
67
    def __init__(
68
        self,
69
        param_space=None,
70
        n_trials=100,
71
        initialize=None,
72
        random_state=None,
73
        early_stopping=None,
74
        max_score=None,
75
        population_size=50,
76
        mutation_prob=0.1,
77
        crossover_prob=0.9,
78
        experiment=None,
79
    ):
80
        self.population_size = population_size
81
        self.mutation_prob = mutation_prob
82
        self.crossover_prob = crossover_prob
83
84
        super().__init__(
85
            param_space=param_space,
86
            n_trials=n_trials,
87
            initialize=initialize,
88
            random_state=random_state,
89
            early_stopping=early_stopping,
90
            max_score=max_score,
91
            experiment=experiment,
92
        )
93
94
    def _get_optimizer(self):
95
        """Get the NSGA-III optimizer.
96
97
        Returns
98
        -------
99
        optimizer
100
            The Optuna NSGAIIIOptimizer instance
101
        """
102
        import optuna
103
104
        optimizer_kwargs = {
105
            "population_size": self.population_size,
106
            "mutation_prob": self.mutation_prob,
107
            "crossover_prob": self.crossover_prob,
108
        }
109
110
        if self.random_state is not None:
111
            optimizer_kwargs["seed"] = self.random_state
112
113
        return optuna.samplers.NSGAIIISampler(**optimizer_kwargs)
114
115
    @classmethod
116
    def get_test_params(cls, parameter_set="default"):
117
        """Return testing parameter settings for the optimizer."""
118
        params = super().get_test_params(parameter_set)
119
        params[0].update(
120
            {
121
                "population_size": 20,
122
                "mutation_prob": 0.2,
123
                "crossover_prob": 0.8,
124
            }
125
        )
126
        return params
127