1
|
|
|
import numpy as np |
2
|
|
|
from sklearn.datasets import load_diabetes |
3
|
|
|
from sklearn.tree import DecisionTreeRegressor |
4
|
|
|
|
5
|
|
|
from hyperactive.optimizers import ( |
6
|
|
|
HillClimbingOptimizer, |
7
|
|
|
RandomRestartHillClimbingOptimizer, |
8
|
|
|
) |
9
|
|
|
|
10
|
|
|
from .experiments.test_function import SklearnExperiment |
11
|
|
|
from .search_space_optional import SearchSpace |
12
|
|
|
|
13
|
|
|
|
14
|
|
|
data = load_diabetes() |
15
|
|
|
X, y = data.data, data.target |
16
|
|
|
|
17
|
|
|
|
18
|
|
|
search_space = { |
19
|
|
|
"max_depth": list(np.arange(2, 15, 1)), |
20
|
|
|
"min_samples_split": list(np.arange(2, 25, 2)), |
21
|
|
|
} |
22
|
|
|
|
23
|
|
|
""" optional way of defining search-space |
24
|
|
|
search_space = SearchSpace( |
25
|
|
|
max_depth=list(np.arange(2, 15, 1)), |
26
|
|
|
min_samples_split=list(np.arange(2, 25, 2)), |
27
|
|
|
) |
28
|
|
|
""" |
29
|
|
|
|
30
|
|
|
experiment = SklearnExperiment(DecisionTreeRegressor, X, y, cv=4) |
31
|
|
|
|
32
|
|
|
optimizer1 = HillClimbingOptimizer(n_iter=50) |
33
|
|
|
optimizer2 = RandomRestartHillClimbingOptimizer(n_iter=50, n_jobs=2) |
34
|
|
|
|
35
|
|
|
optimizer1.add_search(experiment, search_space) |
36
|
|
|
optimizer2.add_search(experiment, search_space) |
37
|
|
|
|
38
|
|
|
# not sure about this way of combining optimizers. Might not be intuitive what the plus means. |
39
|
|
|
hyper = optimizer1 + optimizer2 |
40
|
|
|
|
41
|
|
|
hyper.run(max_time=5) |
42
|
|
|
|