1
|
|
|
import numpy as np |
2
|
|
|
from sklearn.datasets import load_diabetes |
3
|
|
|
from sklearn.tree import DecisionTreeRegressor |
4
|
|
|
|
5
|
|
|
|
6
|
|
|
from hyperactive.base.search_space_optional import SearchSpace |
7
|
|
|
from hyperactive.optimizers import ( |
8
|
|
|
HillClimbingOptimizer, |
9
|
|
|
RandomRestartHillClimbingOptimizer, |
10
|
|
|
) |
11
|
|
|
|
12
|
|
|
from .experiments.test_function import SklearnExperiment |
13
|
|
|
|
14
|
|
|
|
15
|
|
|
data = load_diabetes() |
16
|
|
|
X, y = data.data, data.target |
17
|
|
|
|
18
|
|
|
|
19
|
|
|
search_space = { |
20
|
|
|
"max_depth": list(np.arange(2, 15, 1)), |
21
|
|
|
"min_samples_split": list(np.arange(2, 25, 2)), |
22
|
|
|
} |
23
|
|
|
|
24
|
|
|
""" optional way of defining search-space |
25
|
|
|
search_space = SearchSpace( |
26
|
|
|
max_depth=list(np.arange(2, 15, 1)), |
27
|
|
|
min_samples_split=list(np.arange(2, 25, 2)), |
28
|
|
|
) |
29
|
|
|
""" |
30
|
|
|
|
31
|
|
|
experiment = SklearnExperiment(DecisionTreeRegressor, X, y, cv=4) |
32
|
|
|
|
33
|
|
|
optimizer1 = HillClimbingOptimizer(n_iter=50) |
34
|
|
|
optimizer2 = RandomRestartHillClimbingOptimizer(n_iter=50, n_jobs=2) |
35
|
|
|
|
36
|
|
|
optimizer1.add_search(experiment, search_space) |
37
|
|
|
optimizer2.add_search(experiment, search_space) |
38
|
|
|
|
39
|
|
|
# not sure about this way of combining optimizers. Might not be intuitive what the plus means. |
40
|
|
|
hyper = optimizer1 + optimizer2 |
41
|
|
|
|
42
|
|
|
hyper.run(max_time=5) |
43
|
|
|
|