Total Complexity | 1 |
Total Lines | 29 |
Duplicated Lines | 0 % |
Changes | 0 |
1 | from sklearn.model_selection import cross_val_score |
||
2 | from sklearn.ensemble import GradientBoostingClassifier |
||
3 | from sklearn.datasets import load_iris |
||
4 | from hyperactive import Hyperactive |
||
5 | |||
6 | iris_data = load_iris() |
||
7 | X = iris_data.data |
||
8 | y = iris_data.target |
||
9 | |||
10 | |||
11 | def model(para, X, y): |
||
12 | gbc = GradientBoostingClassifier( |
||
13 | n_estimators=para["n_estimators"], max_depth=para["max_depth"] |
||
14 | ) |
||
15 | scores = cross_val_score(gbc, X, y, cv=3) |
||
16 | |||
17 | return scores.mean() |
||
18 | |||
19 | |||
20 | search_config = {model: {"n_estimators": range(10, 200, 10), "max_depth": range(2, 15)}} |
||
21 | |||
22 | """ |
||
23 | The memory will remember previous evaluations done during the optimization process. |
||
24 | Instead of retraining the model, it accesses the memory and uses the saved score/loss. |
||
25 | This shows as a speed up during the optimization process, since the whole search space has been explored. |
||
26 | """ |
||
27 | opt = Hyperactive(X, y) |
||
28 | opt.search(search_config, n_iter=1000, memory=True) |
||
29 |