Passed
Push — master ( b10705...ca1f66 )
by Simon
02:39
created

memory_   A

Complexity

Total Complexity 1

Size/Duplication

Total Lines 37
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 1
eloc 23
dl 0
loc 37
rs 10
c 0
b 0
f 0

1 Function

Rating   Name   Duplication   Size   Complexity  
A model() 0 9 1
1
from sklearn.model_selection import cross_val_score
2
from sklearn.ensemble import GradientBoostingClassifier
3
from sklearn.datasets import load_iris
4
from hyperactive import Hyperactive
5
6
iris_data = load_iris()
7
X = iris_data.data
8
y = iris_data.target
9
10
def model(para, X_train, y_train):
11
    model = GradientBoostingClassifier(
12
        n_estimators=para["n_estimators"],
13
        max_depth=para["max_depth"],
14
        min_samples_split=para["min_samples_split"],
15
    )
16
    scores = cross_val_score(model, X_train, y_train, cv=3)
17
18
    return scores.mean(), model
19
20
search_config = {
21
    model: {
22
        "n_estimators": range(10, 200, 10),
23
        "max_depth": range(2, 12),
24
        "min_samples_split": range(2, 12),
25
    }
26
}
27
28
"""
29
The memory will remember previous evaluations done during the optimization process.
30
Instead of retraining the model, it accesses the memory and uses the saved score/loss.
31
This shows as a speed up during the optimization process, since the whole search space has been explored.
32
"""
33
opt = Hyperactive(search_config, n_iter=1000, memory=True)
34
35
# search best hyperparameter for given data
36
opt.fit(X, y)
37