Passed
Push — master ( 588022...8a2a5a )
by Simon
01:36
created

ParticleSwarmOptimizer.__init__()   A

Complexity

Conditions 1

Size

Total Lines 31
Code Lines 29

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 29
dl 0
loc 31
rs 9.184
c 0
b 0
f 0
cc 1
nop 14

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4
class ParticleSwarmOptimizer(_BaseGFOadapter):
5
    """Particle swarm optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
    initialize : dict[str, int]
13
        The method to generate initial positions. A dictionary with
14
        the following key literals and the corresponding value type:
15
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
16
    constraints : list[callable]
17
        A list of constraints, where each constraint is a callable.
18
        The callable returns `True` or `False` dependend on the input parameters.
19
    random_state : None, int
20
        If None, create a new random state. If int, create a new random state
21
        seeded with the value.
22
    rand_rest_p : float
23
        The probability of a random iteration during the the search process.
24
    population : int
25
        The number of particles in the swarm.
26
    inertia : float
27
        The inertia of the swarm.
28
    cognitive_weight : float
29
        A factor of the movement towards the personal best position of the individual optimizers in the population.
30
    social_weight : float
31
        A factor of the movement towards the personal best position of the individual optimizers in the population.
32
    temp_weight : float
33
        The temperature weight of the swarm.
34
    n_iter : int, default=100
35
        The number of iterations to run the optimizer.
36
    verbose : bool, default=False
37
        If True, print the progress of the optimization process.
38
    experiment : BaseExperiment, optional
39
        The experiment to optimize parameters for.
40
        Optional, can be passed later via ``set_params``.
41
42
    Examples
43
    --------
44
    Basic usage of ParticleSwarmOptimizer with a scikit-learn experiment:
45
46
    1. defining the experiment to optimize:
47
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
48
    >>> from sklearn.datasets import load_iris
49
    >>> from sklearn.svm import SVC
50
    >>>
51
    >>> X, y = load_iris(return_X_y=True)
52
    >>>
53
    >>> sklearn_exp = SklearnCvExperiment(
54
    ...     estimator=SVC(),
55
    ...     X=X,
56
    ...     y=y,
57
    ... )
58
59
    2. setting up the particleSwarmOptimizer optimizer:
60
    >>> from hyperactive.opt import ParticleSwarmOptimizer
61
    >>> import numpy as np
62
    >>>
63
    >>> config = {
64
    ...     "search_space": {
65
    ...         "C": [0.01, 0.1, 1, 10],
66
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
67
    ...     },
68
    ...     "n_iter": 100,
69
    ... }
70
    >>> optimizer = ParticleSwarmOptimizer(experiment=sklearn_exp, **config)
71
72
    3. running the optimization:
73
    >>> best_params = optimizer.run()
74
75
    Best parameters can also be accessed via:
76
    >>> best_params = optimizer.best_params_
77
    """
78
79
    _tags = {
80
        "info:name": "Particle Swarm Optimization",
81
        "info:local_vs_global": "global",
82
        "info:explore_vs_exploit": "explore",
83
        "info:compute": "middle",
84
    }
85
86
    def __init__(
87
        self,
88
        search_space=None,
89
        initialize=None,
90
        constraints=None,
91
        random_state=None,
92
        rand_rest_p=0.1,
93
        population=10,
94
        inertia=0.5,
95
        cognitive_weight=0.5,
96
        social_weight=0.5,
97
        temp_weight=0.2,
98
        n_iter=100,
99
        verbose=False,
100
        experiment=None,
101
    ):
102
        self.random_state = random_state
103
        self.rand_rest_p = rand_rest_p
104
        self.population = population
105
        self.inertia = inertia
106
        self.cognitive_weight = cognitive_weight
107
        self.social_weight = social_weight
108
        self.temp_weight = temp_weight
109
        self.search_space = search_space
110
        self.initialize = initialize
111
        self.constraints = constraints
112
        self.n_iter = n_iter
113
        self.experiment = experiment
114
        self.verbose = verbose
115
116
        super().__init__()
117
118
    def _get_gfo_class(self):
119
        """Get the GFO class to use.
120
121
        Returns
122
        -------
123
        class
124
            The GFO class to use. One of the concrete GFO classes
125
        """
126
        from gradient_free_optimizers import ParticleSwarmOptimizer
127
128
        return ParticleSwarmOptimizer
129
130
    @classmethod
131
    def get_test_params(cls, parameter_set="default"):
132
        """Get the test parameters for the optimizer.
133
134
        Returns
135
        -------
136
        dict with str keys
137
            The test parameters dictionary.
138
        """
139
        import numpy as np
140
141
        params = super().get_test_params()
142
        experiment = params[0]["experiment"]
143
        more_params = {
144
            "experiment": experiment,
145
            "population": 15,
146
            "inertia": 0.9,
147
            "cognitive_weight": 0.9,
148
            "social_weight": 0.9,
149
            "temp_weight": 0.9,
150
            "search_space": {
151
                "C": [0.01, 0.1, 1, 10],
152
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
153
            },
154
            "n_iter": 100,
155
        }
156
        params.append(more_params)
157
        return params
158