_BaseOptunaAdapter._suggest_params()   B
last analyzed

Complexity

Conditions 8

Size

Total Lines 32
Code Lines 14

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 14
dl 0
loc 32
rs 7.3333
c 0
b 0
f 0
cc 8
nop 3
1
"""Base adapter for Optuna optimizers."""
2
# copyright: hyperactive developers, MIT License (see LICENSE file)
3
4
from hyperactive.base import BaseOptimizer
5
6
__all__ = ["_BaseOptunaAdapter"]
7
8
9
class _BaseOptunaAdapter(BaseOptimizer):
10
    """Base adapter for Optuna optimizers."""
11
12
    _tags = {
13
        "python_dependencies": ["optuna"],
14
        "info:name": "Optuna-based optimizer",
15
    }
16
17
    def __init__(
18
        self,
19
        param_space=None,
20
        n_trials=100,
21
        initialize=None,
22
        random_state=None,
23
        early_stopping=None,
24
        max_score=None,
25
        experiment=None,
26
        **optimizer_kwargs,
27
    ):
28
        self.param_space = param_space
29
        self.n_trials = n_trials
30
        self.initialize = initialize
31
        self.random_state = random_state
32
        self.early_stopping = early_stopping
33
        self.max_score = max_score
34
        self.experiment = experiment
35
        self.optimizer_kwargs = optimizer_kwargs
36
        super().__init__()
37
38
    def _get_optimizer(self):
39
        """Get the Optuna optimizer to use.
40
41
        This method should be implemented by subclasses to return
42
        the specific optimizer class and its initialization parameters.
43
44
        Returns
45
        -------
46
        optimizer
47
            The Optuna optimizer instance
48
        """
49
        raise NotImplementedError("Subclasses must implement _get_optimizer")
50
51
    def _convert_param_space(self, param_space):
52
        """Convert parameter space to Optuna format.
53
54
        Parameters
55
        ----------
56
        param_space : dict
57
            The parameter space to convert
58
59
        Returns
60
        -------
61
        dict
62
            The converted parameter space
63
        """
64
        return param_space
65
66
    def _suggest_params(self, trial, param_space):
67
        """Suggest parameters using Optuna trial.
68
69
        Parameters
70
        ----------
71
        trial : optuna.Trial
72
            The Optuna trial object
73
        param_space : dict
74
            The parameter space
75
76
        Returns
77
        -------
78
        dict
79
            The suggested parameters
80
        """
81
        params = {}
82
        for key, space in param_space.items():
83
            if hasattr(space, "suggest"):  # optuna distribution object
84
                params[key] = trial._suggest(space, key)
85
            elif isinstance(space, tuple) and len(space) == 2:
86
                # Tuples are treated as ranges (low, high)
87
                low, high = space
88
                if isinstance(low, int) and isinstance(high, int):
89
                    params[key] = trial.suggest_int(key, low, high)
90
                else:
91
                    params[key] = trial.suggest_float(key, low, high, log=False)
92
            elif isinstance(space, list):
93
                # Lists are treated as categorical choices
94
                params[key] = trial.suggest_categorical(key, space)
95
            else:
96
                raise ValueError(f"Invalid parameter space for key '{key}': {space}")
97
        return params
98
99
    def _objective(self, trial):
100
        """Objective function for Optuna optimization.
101
102
        Parameters
103
        ----------
104
        trial : optuna.Trial
105
            The Optuna trial object
106
107
        Returns
108
        -------
109
        float
110
            The objective value
111
        """
112
        params = self._suggest_params(trial, self.param_space)
113
        score = self.experiment(**params)
114
115
        # Handle early stopping based on max_score
116
        if self.max_score is not None and score >= self.max_score:
117
            trial.study.stop()
118
119
        return score
120
121
    def _setup_initial_positions(self, study):
122
        """Set up initial starting positions if provided.
123
124
        Parameters
125
        ----------
126
        study : optuna.Study
127
            The Optuna study object
128
        """
129
        if self.initialize is not None:
130
            if isinstance(self.initialize, dict) and "warm_start" in self.initialize:
131
                warm_start_points = self.initialize["warm_start"]
132
                if isinstance(warm_start_points, list):
133
                    # For warm start, we manually add trials to the study history
134
                    # instead of using suggest methods to avoid distribution conflicts
135
                    for point in warm_start_points:
136
                        self.experiment(**point)
137
                        study.enqueue_trial(point)
138
139
    def _solve(self, experiment, param_space, n_trials, **kwargs):
140
        """Run the Optuna optimization.
141
142
        Parameters
143
        ----------
144
        experiment : callable
145
            The experiment to optimize
146
        param_space : dict
147
            The parameter space
148
        n_trials : int
149
            Number of trials
150
        **kwargs
151
            Additional parameters
152
153
        Returns
154
        -------
155
        dict
156
            The best parameters found
157
        """
158
        import optuna
159
160
        # Create optimizer with random state if provided
161
        optimizer = self._get_optimizer()
162
163
        # Create study
164
        study = optuna.create_study(
165
            direction="maximize",  # Assuming we want to maximize scores
166
            sampler=optimizer,
167
        )
168
169
        # Setup initial positions
170
        self._setup_initial_positions(study)
171
172
        # Setup early stopping callback
173
        callbacks = []
174
        if self.early_stopping is not None:
175
176
            def early_stopping_callback(study, trial):
177
                if len(study.trials) >= self.early_stopping:
178
                    study.stop()
179
180
            callbacks.append(early_stopping_callback)
181
182
        # Run optimization
183
        study.optimize(
184
            self._objective,
185
            n_trials=n_trials,
186
            callbacks=callbacks if callbacks else None,
187
        )
188
189
        self.best_score_ = study.best_value
190
        self.best_params_ = study.best_params
191
        return study.best_params
192
193
    @classmethod
194
    def get_test_params(cls, parameter_set="default"):
195
        """Return testing parameter settings for the optimizer."""
196
        from sklearn.datasets import load_iris
197
        from sklearn.svm import SVC
198
199
        from hyperactive.experiment.integrations import SklearnCvExperiment
200
201
        X, y = load_iris(return_X_y=True)
202
        sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y)
203
204
        param_space = {
205
            "C": (0.01, 10),
206
            "gamma": (0.0001, 10),
207
        }
208
209
        return [
210
            {
211
                "param_space": param_space,
212
                "n_trials": 10,
213
                "experiment": sklearn_exp,
214
            }
215
        ]
216