Total Complexity | 1 |
Total Lines | 34 |
Duplicated Lines | 0 % |
Changes | 0 |
1 | import numpy as np |
||
2 | from sklearn.model_selection import cross_val_score |
||
3 | from sklearn.ensemble import GradientBoostingClassifier |
||
4 | from sklearn.datasets import load_wine |
||
5 | |||
6 | from gradient_free_optimizers import HillClimbingOptimizer |
||
7 | |||
8 | |||
9 | data = load_wine() |
||
10 | X, y = data.data, data.target |
||
11 | |||
12 | |||
13 | def model(para): |
||
14 | gbc = GradientBoostingClassifier( |
||
15 | n_estimators=para["n_estimators"], |
||
16 | max_depth=para["max_depth"], |
||
17 | min_samples_split=para["min_samples_split"], |
||
18 | min_samples_leaf=para["min_samples_leaf"], |
||
19 | ) |
||
20 | scores = cross_val_score(gbc, X, y, cv=3) |
||
21 | |||
22 | return scores.mean() |
||
23 | |||
24 | |||
25 | search_space = { |
||
26 | "n_estimators": np.arange(20, 120, 1), |
||
27 | "max_depth": np.arange(2, 12, 1), |
||
28 | "min_samples_split": np.arange(2, 12, 1), |
||
29 | "min_samples_leaf": np.arange(1, 12, 1), |
||
30 | } |
||
31 | |||
32 | opt = HillClimbingOptimizer(search_space) |
||
33 | opt.search(model, n_iter=50) |
||
34 |