|
1
|
|
|
import random |
|
2
|
|
|
import numpy as pd |
|
3
|
|
|
import pandas as pd |
|
4
|
|
|
|
|
5
|
|
|
from sklearn.datasets import load_iris |
|
6
|
|
|
from sklearn.datasets import make_classification |
|
7
|
|
|
from sklearn.neighbors import KNeighborsClassifier |
|
8
|
|
|
from sklearn.ensemble import GradientBoostingRegressor |
|
9
|
|
|
from sklearn.model_selection import cross_val_score |
|
10
|
|
|
|
|
11
|
|
|
from hyperactive import Hyperactive |
|
12
|
|
|
|
|
13
|
|
|
|
|
14
|
|
|
def model(opt): |
|
15
|
|
|
knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"]) |
|
16
|
|
|
scores = cross_val_score(knr, X, y, cv=5) |
|
|
|
|
|
|
17
|
|
|
score = scores.mean() |
|
18
|
|
|
|
|
19
|
|
|
return score |
|
20
|
|
|
|
|
21
|
|
|
|
|
22
|
|
|
search_space = { |
|
23
|
|
|
"n_neighbors": list(range(1, 80)), |
|
24
|
|
|
} |
|
25
|
|
|
|
|
26
|
|
|
|
|
27
|
|
|
search_data_list = [] |
|
28
|
|
|
|
|
29
|
|
|
for i in range(25): |
|
30
|
|
|
n_samples = random.randint(100, 1000) |
|
31
|
|
|
n_features = random.randint(3, 20) |
|
32
|
|
|
n_informative = n_features - random.randint(0, n_features - 2) |
|
33
|
|
|
|
|
34
|
|
|
X, y = make_classification( |
|
35
|
|
|
n_samples=n_samples, |
|
36
|
|
|
n_classes=2, |
|
37
|
|
|
n_features=n_features, |
|
38
|
|
|
n_informative=n_informative, |
|
39
|
|
|
n_redundant=0, |
|
40
|
|
|
random_state=i, |
|
41
|
|
|
) |
|
42
|
|
|
|
|
43
|
|
|
hyper = Hyperactive(verbosity=False) |
|
44
|
|
|
hyper.add_search(model, search_space, n_iter=10) |
|
45
|
|
|
hyper.run() |
|
46
|
|
|
|
|
47
|
|
|
search_data = hyper.search_data(model) |
|
48
|
|
|
|
|
49
|
|
|
search_data["size_X"] = X.size |
|
50
|
|
|
search_data["itemsize_X"] = X.itemsize |
|
51
|
|
|
search_data["ndim_X"] = X.ndim |
|
52
|
|
|
|
|
53
|
|
|
search_data["size_y"] = y.size |
|
54
|
|
|
search_data["itemsize_y"] = y.itemsize |
|
55
|
|
|
search_data["ndim_y"] = y.ndim |
|
56
|
|
|
|
|
57
|
|
|
search_data_list.append(search_data) |
|
58
|
|
|
|
|
59
|
|
|
|
|
60
|
|
|
meta_data = pd.concat(search_data_list) |
|
61
|
|
|
|
|
62
|
|
|
X_meta = meta_data.drop(["score"], axis=1) |
|
63
|
|
|
y_meta = meta_data["score"] |
|
64
|
|
|
|
|
65
|
|
|
|
|
66
|
|
|
gbr = GradientBoostingRegressor() |
|
67
|
|
|
gbr.fit(X_meta, y_meta) |
|
68
|
|
|
|
|
69
|
|
|
data = load_iris() |
|
70
|
|
|
X_new, y_new = data.data, data.target |
|
71
|
|
|
|
|
72
|
|
|
X_meta_test = pd.DataFrame(range(1, 100), columns=["n_neighbors"]) |
|
73
|
|
|
|
|
74
|
|
|
X_meta_test["size_X"] = X_new.size |
|
75
|
|
|
X_meta_test["itemsize_X"] = X_new.itemsize |
|
76
|
|
|
X_meta_test["ndim_X"] = X_new.ndim |
|
77
|
|
|
|
|
78
|
|
|
X_meta_test["size_y"] = y_new.size |
|
79
|
|
|
X_meta_test["itemsize_y"] = y_new.itemsize |
|
80
|
|
|
X_meta_test["ndim_y"] = y_new.ndim |
|
81
|
|
|
|
|
82
|
|
|
|
|
83
|
|
|
y_meta_pred = gbr.predict(X_meta_test) |
|
84
|
|
|
|
|
85
|
|
|
y_meta_pred_max_idx = y_meta_pred.argmax() |
|
86
|
|
|
n_neighbors_best = search_space["n_neighbors"][y_meta_pred_max_idx] |
|
87
|
|
|
|
|
88
|
|
|
hyper = Hyperactive() |
|
89
|
|
|
hyper.add_search(model, search_space, n_iter=200) |
|
90
|
|
|
hyper.run() |
|
91
|
|
|
|