Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

GeneticAlgorithm.__init__()   A

Complexity

Conditions 1

Size

Total Lines 34
Code Lines 31

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 31
dl 0
loc 34
rs 9.1359
c 0
b 0
f 0
cc 1
nop 15

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class GeneticAlgorithm(_BaseGFOadapter):
5
    """Genetic algorithm optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the search process.
24
    population : int
25
        The number of individuals in the population.
26
    offspring : int
27
        The number of offspring to generate in each generation.
28
    crossover : str
29
        The crossover operator to use.
30
    n_parents : int
31
        The number of parents to select for crossover.
32
    mutation_rate : float
33
        The mutation rate.
34
    crossover_rate : float
35
        The crossover rate.
36
    n_iter : int, default=100
37
        The number of iterations to run the optimizer.
38
    verbose : bool, default=False
39
        If True, print the progress of the optimization process.
40
    experiment : BaseExperiment, optional
41
        The experiment to optimize parameters for.
42
        Optional, can be passed later via ``set_params``.
43
44
    Examples
45
    --------
46
    Basic usage of GeneticAlgorithm with a scikit-learn experiment:
47
48
    1. defining the experiment to optimize:
49
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
50
    >>> from sklearn.datasets import load_iris
51
    >>> from sklearn.svm import SVC
52
    >>>
53
    >>> X, y = load_iris(return_X_y=True)
54
    >>>
55
    >>> sklearn_exp = SklearnCvExperiment(
56
    ...     estimator=SVC(),
57
    ...     X=X,
58
    ...     y=y,
59
    ... )
60
61
    2. setting up the geneticAlgorithm optimizer:
62
    >>> from hyperactive.opt import GeneticAlgorithm
63
    >>> import numpy as np
64
    >>>
65
    >>> config = {
66
    ...     "search_space": {
67
    ...         "C": [0.01, 0.1, 1, 10],
68
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
69
    ...     },
70
    ...     "n_iter": 100,
71
    ... }
72
    >>> optimizer = GeneticAlgorithm(experiment=sklearn_exp, **config)
73
74
    3. running the optimization:
75
    >>> best_params = optimizer.run()
76
77
    Best parameters can also be accessed via:
78
    >>> best_params = optimizer.best_params_
79
    """
80
81
    _tags = {
82
        "info:name": "Genetic Algorithm",
83
        "info:local_vs_global": "global",
84
        "info:explore_vs_exploit": "explore",
85
        "info:compute": "high",
86
    }
87
88
    def __init__(
89
        self,
90
        search_space=None,
91
        initialize=None,
92
        constraints=None,
93
        random_state=None,
94
        rand_rest_p=0.1,
95
        population=10,
96
        offspring=10,
97
        crossover="discrete-recombination",
98
        n_parents=2,
99
        mutation_rate=0.5,
100
        crossover_rate=0.5,
101
        n_iter=100,
102
        verbose=False,
103
        experiment=None,
104
    ):
105
        self.random_state = random_state
106
        self.rand_rest_p = rand_rest_p
107
        self.population = population
108
        self.offspring = offspring
109
        self.crossover = crossover
110
        self.n_parents = n_parents
111
        self.mutation_rate = mutation_rate
112
        self.crossover_rate = crossover_rate
113
114
        self.search_space = search_space
115
        self.initialize = initialize
116
        self.constraints = constraints
117
        self.n_iter = n_iter
118
        self.experiment = experiment
119
        self.verbose = verbose
120
121
        super().__init__()
122
123
    def _get_gfo_class(self):
124
        """Get the GFO class to use.
125
126
        Returns
127
        -------
128
        class
129
            The GFO class to use. One of the concrete GFO classes
130
        """
131
        from gradient_free_optimizers import GeneticAlgorithmOptimizer
132
133
        return GeneticAlgorithmOptimizer
134
135
    @classmethod
136
    def get_test_params(cls, parameter_set="default"):
137
        """Get the test parameters for the optimizer.
138
139
        Returns
140
        -------
141
        dict with str keys
142
            The test parameters dictionary.
143
        """
144
        import numpy as np
145
146
        params = super().get_test_params()
147
        experiment = params[0]["experiment"]
148
        more_params = {
149
            "experiment": experiment,
150
            "population": 15,
151
            "offspring": 10,
152
            "n_parents": 3,
153
            "mutation_rate": 0.01,
154
            "crossover_rate": 0.02,
155
            "search_space": {
156
                "C": [0.01, 0.1, 1, 10],
157
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
158
            },
159
            "n_iter": 100,
160
        }
161
        params.append(more_params)
162
        return params
163