1
|
|
|
# Author: Simon Blanke |
2
|
|
|
# Email: [email protected] |
3
|
|
|
# License: MIT License |
4
|
|
|
|
5
|
|
|
import os |
6
|
|
|
import json |
7
|
|
|
import shutil |
8
|
|
|
import hashlib |
9
|
|
|
import inspect |
10
|
|
|
|
11
|
|
|
|
12
|
|
|
current_path = os.path.realpath(__file__) |
13
|
|
|
meta_learn_path, _ = current_path.rsplit("/", 1) |
14
|
|
|
meta_path = meta_learn_path + "/meta_data/" |
15
|
|
|
|
16
|
|
|
""" |
17
|
|
|
def get_best_models(X, y): |
18
|
|
|
# TODO: model_dict key:model value:score |
19
|
|
|
|
20
|
|
|
return model_dict |
21
|
|
|
|
22
|
|
|
|
23
|
|
|
def get_model_search_config(model): |
24
|
|
|
# TODO |
25
|
|
|
return search_config |
26
|
|
|
|
27
|
|
|
|
28
|
|
|
def get_model_init_config(model): |
29
|
|
|
# TODO |
30
|
|
|
return init_config |
31
|
|
|
""" |
32
|
|
|
|
33
|
|
|
|
34
|
|
|
def delete_model(model): |
35
|
|
|
model_hash = _get_model_hash(model) |
36
|
|
|
path = meta_path + str(model_hash) |
37
|
|
|
|
38
|
|
|
if os.path.exists(path) and os.path.isdir(path): |
39
|
|
|
shutil.rmtree(meta_path + str(model_hash)) |
40
|
|
|
print("Model data successfully removed") |
41
|
|
|
else: |
42
|
|
|
print("Model data not found in memory") |
43
|
|
|
|
44
|
|
|
|
45
|
|
|
def delete_model_dataset(model, X, y): |
46
|
|
|
csv_file = _get_file_path(model, X, y) |
47
|
|
|
print("csv_file", csv_file) |
48
|
|
|
|
49
|
|
|
if os.path.exists(csv_file): |
50
|
|
|
os.remove(csv_file) |
51
|
|
|
print("Model data successfully removed") |
52
|
|
|
else: |
53
|
|
|
print("Model data not found in memory") |
54
|
|
|
|
55
|
|
|
|
56
|
|
|
def merge_model_hashes(model1, model2): |
57
|
|
|
# do checks if search space has same dim |
58
|
|
|
|
59
|
|
|
with open(meta_path + "model_connections.json") as f: |
60
|
|
|
data = json.load(f) |
61
|
|
|
|
62
|
|
|
model1_hash = _get_model_hash(model1) |
63
|
|
|
model2_hash = _get_model_hash(model2) |
64
|
|
|
|
65
|
|
|
models_dict = {str(model1_hash): str(model2_hash)} |
66
|
|
|
data.update(models_dict) |
67
|
|
|
|
68
|
|
|
with open(meta_path + "model_connections.json", "w") as f: |
69
|
|
|
json.dump(data, f) |
70
|
|
|
|
71
|
|
|
|
72
|
|
|
def split_model_hashes(model1, model2): |
73
|
|
|
# TODO: do checks if search space has same dim |
74
|
|
|
|
75
|
|
|
with open(meta_path + "model_connections.json") as f: |
76
|
|
|
data = json.load(f) |
77
|
|
|
|
78
|
|
|
model1_hash = _get_model_hash(model1) |
79
|
|
|
model2_hash = _get_model_hash(model2) |
80
|
|
|
|
81
|
|
|
if model1_hash in data.keys(): |
82
|
|
|
del data[model1_hash] |
83
|
|
|
if model2_hash in data.keys(): |
84
|
|
|
del data[model2_hash] |
85
|
|
|
|
86
|
|
|
with open(meta_path + "model_connections.json", "w") as f: |
87
|
|
|
json.dump(data, f) |
88
|
|
|
|
89
|
|
|
|
90
|
|
|
def _get_file_path(model, X, y): |
91
|
|
|
func_path_ = _get_model_hash(model) + "/" |
92
|
|
|
func_path = meta_path + func_path_ |
93
|
|
|
|
94
|
|
|
feature_hash = _get_hash(X) |
95
|
|
|
label_hash = _get_hash(y) |
96
|
|
|
|
97
|
|
|
return func_path + (feature_hash + "_" + label_hash + "_.csv") |
98
|
|
|
|
99
|
|
|
|
100
|
|
|
def _get_model_hash(model): |
101
|
|
|
return _get_hash(_get_func_str(model).encode("utf-8")) |
102
|
|
|
|
103
|
|
|
|
104
|
|
|
def _get_func_str(func): |
105
|
|
|
return inspect.getsource(func) |
106
|
|
|
|
107
|
|
|
|
108
|
|
|
def _get_hash(object): |
109
|
|
|
return hashlib.sha1(object).hexdigest() |
110
|
|
|
|