Passed
Push — master ( ef01f2...fd3757 )
by Simon
01:46 queued 10s
created

Multiprocessing.model0()   A

Complexity

Conditions 1

Size

Total Lines 12
Code Lines 10

Duplication

Lines 12
Ratio 100 %

Importance

Changes 0
Metric Value
eloc 10
dl 12
loc 12
rs 9.9
c 0
b 0
f 0
cc 1
nop 3
1
import numpy as np
2
from sklearn.model_selection import cross_val_score
3
from sklearn.ensemble import GradientBoostingClassifier
4
from sklearn.ensemble import RandomForestClassifier
5
from sklearn.ensemble import ExtraTreesClassifier
6
from sklearn.datasets import load_breast_cancer
7
from hyperactive import Hyperactive
8
9
data = load_breast_cancer()
10
X, y = data.data, data.target
11
12
13 View Code Duplication
def model0(para, X, y):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
14
    etc = ExtraTreesClassifier(
15
        n_estimators=para["n_estimators"],
16
        criterion=para["criterion"],
17
        max_features=para["max_features"],
18
        min_samples_split=para["min_samples_split"],
19
        min_samples_leaf=para["min_samples_leaf"],
20
        bootstrap=para["bootstrap"],
21
    )
22
    scores = cross_val_score(etc, X, y, cv=3)
23
24
    return scores.mean()
25
26
27 View Code Duplication
def model1(para, X, y):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
28
    rfc = RandomForestClassifier(
29
        n_estimators=para["n_estimators"],
30
        criterion=para["criterion"],
31
        max_features=para["max_features"],
32
        min_samples_split=para["min_samples_split"],
33
        min_samples_leaf=para["min_samples_leaf"],
34
        bootstrap=para["bootstrap"],
35
    )
36
    scores = cross_val_score(rfc, X, y, cv=3)
37
38
    return scores.mean()
39
40
41 View Code Duplication
def model2(para, X, y):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
42
    gbc = GradientBoostingClassifier(
43
        n_estimators=para["n_estimators"],
44
        learning_rate=para["learning_rate"],
45
        max_depth=para["max_depth"],
46
        min_samples_split=para["min_samples_split"],
47
        min_samples_leaf=para["min_samples_leaf"],
48
        subsample=para["subsample"],
49
        max_features=para["max_features"],
50
    )
51
    scores = cross_val_score(gbc, X, y, cv=3)
52
53
    return scores.mean()
54
55
56
search_config = {
57
    model0: {
58
        "n_estimators": range(10, 200, 10),
59
        "criterion": ["gini", "entropy"],
60
        "max_features": np.arange(0.05, 1.01, 0.05),
61
        "min_samples_split": range(2, 21),
62
        "min_samples_leaf": range(1, 21),
63
        "bootstrap": [True, False],
64
    },
65
    model1: {
66
        "n_estimators": range(10, 200, 10),
67
        "criterion": ["gini", "entropy"],
68
        "max_features": np.arange(0.05, 1.01, 0.05),
69
        "min_samples_split": range(2, 21),
70
        "min_samples_leaf": range(1, 21),
71
        "bootstrap": [True, False],
72
    },
73
    model2: {
74
        "n_estimators": range(10, 200, 10),
75
        "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
76
        "max_depth": range(1, 11),
77
        "min_samples_split": range(2, 21),
78
        "min_samples_leaf": range(1, 21),
79
        "subsample": np.arange(0.05, 1.01, 0.05),
80
        "max_features": np.arange(0.05, 1.01, 0.05),
81
    },
82
}
83
84
85
opt = Hyperactive(X, y)
86
opt.search(search_config, n_iter=30, n_jobs=4)
87