Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

EvolutionStrategy.__init__()   A

Complexity

Conditions 1

Size

Total Lines 31
Code Lines 29

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 29
dl 0
loc 31
rs 9.184
c 0
b 0
f 0
cc 1
nop 14

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class EvolutionStrategy(_BaseGFOadapter):
5
    """Evolution strategy optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the the search process.
24
    population : int
25
        The number of individuals in the population.
26
    offspring : int
27
        The number of offspring to generate in each generation.
28
    replace_parents : bool
29
        If True, the parents are replaced with the offspring in the next
30
        generation. If False, the parents are kept in the next generation and the
31
        offspring are added to the population.
32
    mutation_rate : float
33
        The mutation rate for the mutation operator.
34
    crossover_rate : float
35
        The crossover rate for the crossover operator.
36
    n_iter : int, default=100
37
        The number of iterations to run the optimizer.
38
    verbose : bool, default=False
39
        If True, print the progress of the optimization process.
40
    experiment : BaseExperiment, optional
41
        The experiment to optimize parameters for.
42
        Optional, can be passed later via ``set_params``.
43
44
    Examples
45
    --------
46
    Basic usage of EvolutionStrategy with a scikit-learn experiment:
47
48
    1. defining the experiment to optimize:
49
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
50
    >>> from sklearn.datasets import load_iris
51
    >>> from sklearn.svm import SVC
52
    >>>
53
    >>> X, y = load_iris(return_X_y=True)
54
    >>>
55
    >>> sklearn_exp = SklearnCvExperiment(
56
    ...     estimator=SVC(),
57
    ...     X=X,
58
    ...     y=y,
59
    ... )
60
61
    2. setting up the evolutionStrategy optimizer:
62
    >>> from hyperactive.opt import EvolutionStrategy
63
    >>> import numpy as np
64
    >>>
65
    >>> config = {
66
    ...     "search_space": {
67
    ...         "C": [0.01, 0.1, 1, 10],
68
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
69
    ...     },
70
    ...     "n_iter": 100,
71
    ... }
72
    >>> optimizer = EvolutionStrategy(experiment=sklearn_exp, **config)
73
74
    3. running the optimization:
75
    >>> best_params = optimizer.run()
76
77
    Best parameters can also be accessed via:
78
    >>> best_params = optimizer.best_params_
79
    """
80
81
    _tags = {
82
        "info:name": "Evolution Strategy",
83
        "info:local_vs_global": "global",
84
        "info:explore_vs_exploit": "explore",
85
        "info:compute": "middle",
86
    }
87
88
    def __init__(
89
        self,
90
        search_space=None,
91
        initialize=None,
92
        constraints=None,
93
        random_state=None,
94
        rand_rest_p=0.1,
95
        population=10,
96
        offspring=20,
97
        replace_parents=False,
98
        mutation_rate=0.7,
99
        crossover_rate=0.3,
100
        n_iter=100,
101
        verbose=False,
102
        experiment=None,
103
    ):
104
        self.random_state = random_state
105
        self.rand_rest_p = rand_rest_p
106
        self.population = population
107
        self.offspring = offspring
108
        self.replace_parents = replace_parents
109
        self.mutation_rate = mutation_rate
110
        self.crossover_rate = crossover_rate
111
        self.search_space = search_space
112
        self.initialize = initialize
113
        self.constraints = constraints
114
        self.n_iter = n_iter
115
        self.experiment = experiment
116
        self.verbose = verbose
117
118
        super().__init__()
119
120
    def _get_gfo_class(self):
121
        """Get the GFO class to use.
122
123
        Returns
124
        -------
125
        class
126
            The GFO class to use. One of the concrete GFO classes
127
        """
128
        from gradient_free_optimizers import EvolutionStrategyOptimizer
129
130
        return EvolutionStrategyOptimizer
131
132
    @classmethod
133
    def get_test_params(cls, parameter_set="default"):
134
        """Get the test parameters for the optimizer.
135
136
        Returns
137
        -------
138
        dict with str keys
139
            The test parameters dictionary.
140
        """
141
        import numpy as np
142
143
        params = super().get_test_params()
144
        experiment = params[0]["experiment"]
145
        more_params = {
146
            "experiment": experiment,
147
            "population": 15,
148
            "offspring": 10,
149
            "replace_parents": True,
150
            "mutation_rate": 1,
151
            "crossover_rate": 2,
152
            "search_space": {
153
                "C": [0.01, 0.1, 1, 10],
154
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
155
            },
156
            "n_iter": 100,
157
        }
158
        params.append(more_params)
159
        return params
160