SpiralOptimization._get_gfo_class()   A
last analyzed

Complexity

Conditions 1

Size

Total Lines 11
Code Lines 3

Duplication

Lines 11
Ratio 100 %

Importance

Changes 0
Metric Value
eloc 3
dl 11
loc 11
rs 10
c 0
b 0
f 0
cc 1
nop 1
1
from hyperactive.opt._adapters._gfo import _BaseGFOadapter
2
3
4 View Code Duplication
class SpiralOptimization(_BaseGFOadapter):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
5
    """Spiral optimizer.
6
7
    Parameters
8
    ----------
9
    search_space : dict[str, list]
10
        The search space to explore. A dictionary with parameter
11
        names as keys and a numpy array as values.
12
        Optional, can be passed later via ``set_params``.
13
    initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4}
14
        The method to generate initial positions. A dictionary with
15
        the following key literals and the corresponding value type:
16
        {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]}
17
    constraints : list[callable], default=[]
18
        A list of constraints, where each constraint is a callable.
19
        The callable returns `True` or `False` dependend on the input parameters.
20
    random_state : None, int, default=None
21
        If None, create a new random state. If int, create a new random state
22
        seeded with the value.
23
    rand_rest_p : float, default=0.1
24
        The probability of a random iteration during the the search process.
25
    population : int
26
        The number of particles in the swarm.
27
    decay_rate : float
28
        This parameter is a factor, that influences the radius of the particles
29
        during their spiral movement.
30
        Lower values accelerates the convergence of the particles to the best
31
        known position, while values above 1 eventually lead to a movement where
32
        the particles spiral away from each other.
33
    n_iter : int, default=100
34
        The number of iterations to run the optimizer.
35
    verbose : bool, default=False
36
        If True, print the progress of the optimization process.
37
    experiment : BaseExperiment, optional
38
        The experiment to optimize parameters for.
39
        Optional, can be passed later via ``set_params``.
40
41
    Examples
42
    --------
43
    Basic usage of SpiralOptimization with a scikit-learn experiment:
44
45
    1. defining the experiment to optimize:
46
    >>> from hyperactive.experiment.integrations import SklearnCvExperiment
47
    >>> from sklearn.datasets import load_iris
48
    >>> from sklearn.svm import SVC
49
    >>>
50
    >>> X, y = load_iris(return_X_y=True)
51
    >>>
52
    >>> sklearn_exp = SklearnCvExperiment(
53
    ...     estimator=SVC(),
54
    ...     X=X,
55
    ...     y=y,
56
    ... )
57
58
    2. setting up the spiralOptimization optimizer:
59
    >>> from hyperactive.opt import SpiralOptimization
60
    >>> import numpy as np
61
    >>>
62
    >>> config = {
63
    ...     "search_space": {
64
    ...         "C": [0.01, 0.1, 1, 10],
65
    ...         "gamma": [0.0001, 0.01, 0.1, 1, 10],
66
    ...     },
67
    ...     "n_iter": 100,
68
    ... }
69
    >>> optimizer = SpiralOptimization(experiment=sklearn_exp, **config)
70
71
    3. running the optimization:
72
    >>> best_params = optimizer.solve()
73
74
    Best parameters can also be accessed via:
75
    >>> best_params = optimizer.best_params_
76
    """
77
78
    _tags = {
79
        "info:name": "Spiral Optimization",
80
        "info:local_vs_global": "mixed",
81
        "info:explore_vs_exploit": "explore",
82
        "info:compute": "middle",
83
    }
84
85
    def __init__(
86
        self,
87
        search_space=None,
88
        initialize=None,
89
        constraints=None,
90
        random_state=None,
91
        rand_rest_p=0.1,
92
        population: int = 10,
93
        decay_rate: float = 0.99,
94
        n_iter=100,
95
        verbose=False,
96
        experiment=None,
97
    ):
98
        self.random_state = random_state
99
        self.rand_rest_p = rand_rest_p
100
        self.population = population
101
        self.decay_rate = decay_rate
102
        self.search_space = search_space
103
        self.initialize = initialize
104
        self.constraints = constraints
105
        self.n_iter = n_iter
106
        self.experiment = experiment
107
        self.verbose = verbose
108
109
        super().__init__()
110
111
    def _get_gfo_class(self):
112
        """Get the GFO class to use.
113
114
        Returns
115
        -------
116
        class
117
            The GFO class to use. One of the concrete GFO classes
118
        """
119
        from gradient_free_optimizers import SpiralOptimization
120
121
        return SpiralOptimization
122
123
    @classmethod
124
    def get_test_params(cls, parameter_set="default"):
125
        """Get the test parameters for the optimizer.
126
127
        Returns
128
        -------
129
        dict with str keys
130
            The test parameters dictionary.
131
        """
132
        params = super().get_test_params()
133
        experiment = params[0]["experiment"]
134
        more_params = {
135
            "experiment": experiment,
136
            "population": 20,
137
            "decay_rate": 0.9999,
138
            "search_space": {
139
                "C": [0.01, 0.1, 1, 10],
140
                "gamma": [0.0001, 0.01, 0.1, 1, 10],
141
            },
142
            "n_iter": 100,
143
        }
144
        params.append(more_params)
145
        return params
146