Passed
Push — master ( c241e4...b050e9 )
by Simon
01:57
created

hyperactive.opt._adapters._base_optuna_adapter   A

Complexity

Total Complexity 25

Size/Duplication

Total Lines 212
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 25
eloc 92
dl 0
loc 212
rs 10
c 0
b 0
f 0

8 Methods

Rating   Name   Duplication   Size   Complexity  
A _BaseOptunaAdapter._get_optimizer() 0 12 1
A _BaseOptunaAdapter._objective() 0 21 3
A _BaseOptunaAdapter._convert_param_space() 0 14 1
B _BaseOptunaAdapter._suggest_params() 0 32 8
A _BaseOptunaAdapter.get_test_params() 0 21 1
A _BaseOptunaAdapter.__init__() 0 20 1
A _BaseOptunaAdapter._run() 0 53 4
B _BaseOptunaAdapter._setup_initial_positions() 0 17 6
1
"""Base adapter for Optuna optimizers."""
2
# copyright: hyperactive developers, MIT License (see LICENSE file)
3
4
from hyperactive.base import BaseOptimizer
5
6
7
class _BaseOptunaAdapter(BaseOptimizer):
8
    """Base adapter for Optuna optimizers."""
9
10
    _tags = {
11
        "python_dependencies": ["optuna"],
12
        "info:name": "Optuna-based optimizer",
13
    }
14
15
    def __init__(
16
        self,
17
        param_space=None,
18
        n_trials=100,
19
        initialize=None,
20
        random_state=None,
21
        early_stopping=None,
22
        max_score=None,
23
        experiment=None,
24
        **optimizer_kwargs,
25
    ):
26
        self.param_space = param_space
27
        self.n_trials = n_trials
28
        self.initialize = initialize
29
        self.random_state = random_state
30
        self.early_stopping = early_stopping
31
        self.max_score = max_score
32
        self.experiment = experiment
33
        self.optimizer_kwargs = optimizer_kwargs
34
        super().__init__()
35
36
    def _get_optimizer(self):
37
        """Get the Optuna optimizer to use.
38
39
        This method should be implemented by subclasses to return
40
        the specific optimizer class and its initialization parameters.
41
42
        Returns
43
        -------
44
        optimizer
45
            The Optuna optimizer instance
46
        """
47
        raise NotImplementedError("Subclasses must implement _get_optimizer")
48
49
    def _convert_param_space(self, param_space):
50
        """Convert parameter space to Optuna format.
51
52
        Parameters
53
        ----------
54
        param_space : dict
55
            The parameter space to convert
56
57
        Returns
58
        -------
59
        dict
60
            The converted parameter space
61
        """
62
        return param_space
63
64
    def _suggest_params(self, trial, param_space):
65
        """Suggest parameters using Optuna trial.
66
67
        Parameters
68
        ----------
69
        trial : optuna.Trial
70
            The Optuna trial object
71
        param_space : dict
72
            The parameter space
73
74
        Returns
75
        -------
76
        dict
77
            The suggested parameters
78
        """
79
        params = {}
80
        for key, space in param_space.items():
81
            if hasattr(space, "suggest"):  # optuna distribution object
82
                params[key] = trial._suggest(space, key)
83
            elif isinstance(space, tuple) and len(space) == 2:
84
                # Tuples are treated as ranges (low, high)
85
                low, high = space
86
                if isinstance(low, int) and isinstance(high, int):
87
                    params[key] = trial.suggest_int(key, low, high)
88
                else:
89
                    params[key] = trial.suggest_float(key, low, high, log=False)
90
            elif isinstance(space, list):
91
                # Lists are treated as categorical choices
92
                params[key] = trial.suggest_categorical(key, space)
93
            else:
94
                raise ValueError(f"Invalid parameter space for key '{key}': {space}")
95
        return params
96
97
    def _objective(self, trial):
98
        """Objective function for Optuna optimization.
99
100
        Parameters
101
        ----------
102
        trial : optuna.Trial
103
            The Optuna trial object
104
105
        Returns
106
        -------
107
        float
108
            The objective value
109
        """
110
        params = self._suggest_params(trial, self.param_space)
111
        score = self.experiment(**params)
112
113
        # Handle early stopping based on max_score
114
        if self.max_score is not None and score >= self.max_score:
115
            trial.study.stop()
116
117
        return score
118
119
    def _setup_initial_positions(self, study):
120
        """Set up initial starting positions if provided.
121
122
        Parameters
123
        ----------
124
        study : optuna.Study
125
            The Optuna study object
126
        """
127
        if self.initialize is not None:
128
            if isinstance(self.initialize, dict) and "warm_start" in self.initialize:
129
                warm_start_points = self.initialize["warm_start"]
130
                if isinstance(warm_start_points, list):
131
                    # For warm start, we manually add trials to the study history
132
                    # instead of using suggest methods to avoid distribution conflicts
133
                    for point in warm_start_points:
134
                        self.experiment(**point)
135
                        study.enqueue_trial(point)
136
137
    def _run(self, experiment, param_space, n_trials, **kwargs):
138
        """Run the Optuna optimization.
139
140
        Parameters
141
        ----------
142
        experiment : callable
143
            The experiment to optimize
144
        param_space : dict
145
            The parameter space
146
        n_trials : int
147
            Number of trials
148
        **kwargs
149
            Additional parameters
150
151
        Returns
152
        -------
153
        dict
154
            The best parameters found
155
        """
156
        import optuna
157
158
        # Create optimizer with random state if provided
159
        optimizer = self._get_optimizer()
160
161
        # Create study
162
        study = optuna.create_study(
163
            direction="maximize",  # Assuming we want to maximize scores
164
            sampler=optimizer,
165
        )
166
167
        # Setup initial positions
168
        self._setup_initial_positions(study)
169
170
        # Setup early stopping callback
171
        callbacks = []
172
        if self.early_stopping is not None:
173
174
            def early_stopping_callback(study, trial):
175
                if len(study.trials) >= self.early_stopping:
176
                    study.stop()
177
178
            callbacks.append(early_stopping_callback)
179
180
        # Run optimization
181
        study.optimize(
182
            self._objective,
183
            n_trials=n_trials,
184
            callbacks=callbacks if callbacks else None,
185
        )
186
187
        self.best_score_ = study.best_value
188
        self.best_params_ = study.best_params
189
        return study.best_params
190
191
    @classmethod
192
    def get_test_params(cls, parameter_set="default"):
193
        """Return testing parameter settings for the optimizer."""
194
        from sklearn.datasets import load_iris
195
        from sklearn.svm import SVC
196
197
        from hyperactive.experiment.integrations import SklearnCvExperiment
198
199
        X, y = load_iris(return_X_y=True)
200
        sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y)
201
202
        param_space = {
203
            "C": (0.01, 10),
204
            "gamma": (0.0001, 10),
205
        }
206
207
        return [
208
            {
209
                "param_space": param_space,
210
                "n_trials": 10,
211
                "experiment": sklearn_exp,
212
            }
213
        ]
214