| Total Complexity | 1 |
| Total Lines | 41 |
| Duplicated Lines | 0 % |
| Changes | 0 | ||
| 1 | import numpy as np |
||
| 2 | |||
| 3 | |||
| 4 | from sklearn.datasets import load_iris |
||
| 5 | from sklearn.neighbors import KNeighborsClassifier |
||
| 6 | from sklearn.model_selection import cross_val_score |
||
| 7 | |||
| 8 | from hyperactive import Hyperactive |
||
| 9 | from hyperactive.optimizers import BayesianOptimizer |
||
| 10 | |||
| 11 | |||
| 12 | data = load_iris() |
||
| 13 | X, y = data.data, data.target |
||
| 14 | |||
| 15 | |||
| 16 | def model(opt): |
||
| 17 | knr = KNeighborsClassifier(n_neighbors=opt["n_neighbors"]) |
||
| 18 | scores = cross_val_score(knr, X, y, cv=5) |
||
| 19 | score = scores.mean() |
||
| 20 | |||
| 21 | return score |
||
| 22 | |||
| 23 | |||
| 24 | search_space = { |
||
| 25 | "n_neighbors": list(range(1, 100)), |
||
| 26 | } |
||
| 27 | |||
| 28 | |||
| 29 | hyper = Hyperactive() |
||
| 30 | hyper.add_search(model, search_space, n_iter=100) |
||
| 31 | hyper.run() |
||
| 32 | |||
| 33 | search_data = hyper.search_data(model) |
||
| 34 | |||
| 35 | |||
| 36 | optimizer = BayesianOptimizer(xi=0.03, warm_start_smbo=search_data, rand_rest_p=0.1) |
||
| 37 | |||
| 38 | hyper = Hyperactive() |
||
| 39 | hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100) |
||
| 40 | hyper.run() |
||
| 41 |