| Total Complexity | 1 |
| Total Lines | 35 |
| Duplicated Lines | 0 % |
| Changes | 0 | ||
| 1 | import numpy as np |
||
| 2 | from sklearn.model_selection import cross_val_score |
||
| 3 | from sklearn.tree import DecisionTreeClassifier |
||
| 4 | from sklearn.datasets import load_wine |
||
| 5 | |||
| 6 | from gradient_free_optimizers import HillClimbingOptimizer |
||
| 7 | |||
| 8 | |||
| 9 | data = load_wine() |
||
| 10 | X, y = data.data, data.target |
||
| 11 | |||
| 12 | |||
| 13 | def model(para): |
||
| 14 | gbc = DecisionTreeClassifier( |
||
| 15 | min_samples_split=para["min_samples_split"], |
||
| 16 | min_samples_leaf=para["min_samples_leaf"], |
||
| 17 | ) |
||
| 18 | scores = cross_val_score(gbc, X, y, cv=5) |
||
| 19 | |||
| 20 | return scores.mean() |
||
| 21 | |||
| 22 | |||
| 23 | search_space = { |
||
| 24 | "min_samples_split": np.arange(2, 25, 1), |
||
| 25 | "min_samples_leaf": np.arange(1, 25, 1), |
||
| 26 | } |
||
| 27 | |||
| 28 | opt = HillClimbingOptimizer(search_space) |
||
| 29 | opt.search(model, n_iter=500, memory=False) |
||
| 30 | |||
| 31 | |||
| 32 | print("\n\nMemory activated:") |
||
| 33 | opt = HillClimbingOptimizer(search_space) |
||
| 34 | opt.search(model, n_iter=500, memory=True) |
||
| 35 |