Passed
Pull Request — master (#101)
by Simon
01:32
created

optimizer   A

Complexity

Total Complexity 0

Size/Duplication

Total Lines 43
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 21
dl 0
loc 43
rs 10
c 0
b 0
f 0
wmc 0
1
import numpy as np
2
from sklearn.datasets import load_diabetes
3
from sklearn.tree import DecisionTreeRegressor
4
5
6
from hyperactive.base.search_space_optional import SearchSpace
7
from hyperactive.optimizers import (
8
    HillClimbingOptimizer,
9
    RandomRestartHillClimbingOptimizer,
10
)
11
12
from .experiments.test_function import SklearnExperiment
13
14
15
data = load_diabetes()
16
X, y = data.data, data.target
17
18
19
search_space = {
20
    "max_depth": list(np.arange(2, 15, 1)),
21
    "min_samples_split": list(np.arange(2, 25, 2)),
22
}
23
24
""" optional way of defining search-space
25
search_space = SearchSpace(
26
    max_depth=list(np.arange(2, 15, 1)),
27
    min_samples_split=list(np.arange(2, 25, 2)),
28
)
29
"""
30
31
experiment = SklearnExperiment(DecisionTreeRegressor, X, y, cv=4)
32
33
optimizer1 = HillClimbingOptimizer(n_iter=50)
34
optimizer2 = RandomRestartHillClimbingOptimizer(n_iter=50, n_jobs=2)
35
36
optimizer1.add_search(experiment, search_space)
37
optimizer2.add_search(experiment, search_space)
38
39
# not sure about this way of combining optimizers. Might not be intuitive what the plus means.
40
hyper = optimizer1 + optimizer2
41
42
hyper.run(max_time=5)
43