1
|
|
|
# Copyright 2014 Diamond Light Source Ltd. |
2
|
|
|
# |
3
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
4
|
|
|
# you may not use this file except in compliance with the License. |
5
|
|
|
# You may obtain a copy of the License at |
6
|
|
|
# |
7
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
8
|
|
|
# |
9
|
|
|
# Unless required by applicable law or agreed to in writing, software |
10
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
11
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12
|
|
|
# See the License for the specific language governing permissions and |
13
|
|
|
# limitations under the License. |
14
|
|
|
|
15
|
|
|
""" |
16
|
|
|
.. module:: yaml_loader |
17
|
|
|
:platform: Unix |
18
|
|
|
:synopsis: A class to load data from a non-standard nexus/hdf5 file using \ |
19
|
|
|
descriptions loaded from a yaml file. |
20
|
|
|
|
21
|
|
|
.. moduleauthor:: Nicola Wadeson <[email protected]> |
22
|
|
|
|
23
|
|
|
""" |
24
|
|
|
|
25
|
|
|
import os |
26
|
|
|
import h5py |
27
|
|
|
import yaml |
28
|
|
|
import copy |
29
|
|
|
import collections.abc as collections |
30
|
|
|
import numpy as np # used in exec so do not delete |
31
|
|
|
from ast import literal_eval |
32
|
|
|
|
33
|
|
|
import savu.plugins.utils as pu |
34
|
|
|
import savu.plugins.loaders.utils.yaml_utils as yu |
35
|
|
|
from savu.plugins.loaders.base_loader import BaseLoader |
36
|
|
|
from savu.data.experiment_collection import Experiment |
37
|
|
|
|
38
|
|
|
|
39
|
|
|
class YamlConverter(BaseLoader): |
40
|
|
|
""" |
41
|
|
|
A class to load data from a non-standard nexus/hdf5 file using \ |
42
|
|
|
descriptions loaded from a yaml file. |
43
|
|
|
|
44
|
|
|
:u*param yaml_file: Path to the file containing the data \ |
45
|
|
|
descriptions. Default: None. |
46
|
|
|
:*param template_param: A hidden parameter to hold parameters passed in \ |
47
|
|
|
via a savu template file. Default: {}. |
48
|
|
|
""" |
49
|
|
|
|
50
|
|
|
def __init__(self, name='YamlConverter'): |
51
|
|
|
super(YamlConverter, self).__init__(name) |
52
|
|
|
|
53
|
|
|
def setup(self, template=False, metadata=True): |
54
|
|
|
# Read YAML file |
55
|
|
|
yfile = self.parameters['yaml_file'] |
56
|
|
|
data_dict = yu.read_yaml(self._get_yaml_file(yfile)) |
57
|
|
|
data_dict = self._check_for_inheritance(data_dict, {}) |
58
|
|
|
self._check_for_imports(data_dict) |
59
|
|
|
data_dict.pop('inherit', None) |
60
|
|
|
data_dict.pop('import', None) |
61
|
|
|
if template: |
62
|
|
|
return data_dict |
63
|
|
|
|
64
|
|
|
data_dict = self._add_template_updates(data_dict) |
65
|
|
|
self._set_entries(data_dict) |
66
|
|
|
|
67
|
|
|
def _get_yaml_file(self, yaml_file): |
68
|
|
|
if yaml_file is None: |
69
|
|
|
raise Exception('Please pass a yaml file to the yaml loader.') |
70
|
|
|
|
71
|
|
|
if not os.path.exists(yaml_file): |
72
|
|
|
path = os.path.dirname( |
73
|
|
|
__file__.split(os.path.join('savu', 'plugins'))[0]) |
74
|
|
|
yaml_file = os.path.join(path, yaml_file) |
75
|
|
|
if not os.path.exists(yaml_file): |
76
|
|
|
raise Exception('The yaml file does not exist %s' % yaml_file) |
77
|
|
|
return yaml_file |
78
|
|
|
|
79
|
|
|
def _add_template_updates(self, ddict): |
80
|
|
|
all_entries = ddict.pop('all', {}) |
81
|
|
|
for key, value in all_entries: |
82
|
|
|
for entry in ddict: |
83
|
|
|
if key in list(entry.keys()): |
84
|
|
|
entry[key] = value |
85
|
|
|
|
86
|
|
|
for entry in self.parameters['template_param']: |
87
|
|
|
updates = self.parameters['template_param'][entry] |
88
|
|
|
ddict[entry]['params'].update(updates) |
89
|
|
|
return ddict |
90
|
|
|
|
91
|
|
|
def _check_for_imports(self, ddict): |
92
|
|
|
if 'import' in list(ddict.keys()): |
93
|
|
|
for imp in ddict['import']: |
94
|
|
|
name = False |
95
|
|
|
if len(imp.split()) > 1: |
96
|
|
|
imp, name = imp.split('as') |
97
|
|
|
mod = __import__(imp.strip()) |
98
|
|
|
globals()[mod.__name__ if not name else name] = mod |
|
|
|
|
99
|
|
|
|
100
|
|
|
def _check_for_inheritance(self, ddict, inherit, override=False): |
101
|
|
|
if 'inherit' in list(ddict.keys()): |
102
|
|
|
idict = ddict['inherit'] |
103
|
|
|
idict = idict if isinstance(idict, list) else [idict] |
104
|
|
|
for i in idict: |
105
|
|
|
if i != 'None': |
106
|
|
|
new_dict = yu.read_yaml(self._get_yaml_file(i)) |
107
|
|
|
new_dict, isoverride = \ |
108
|
|
|
self.__override(inherit, new_dict, override) |
109
|
|
|
inherit.update(new_dict) |
110
|
|
|
inherit = self._check_for_inheritance( |
111
|
|
|
new_dict, inherit, override=isoverride) |
112
|
|
|
self._update(inherit, ddict) |
113
|
|
|
return inherit |
114
|
|
|
|
115
|
|
|
def __override(self, inherit, ddict, override): |
116
|
|
|
isoverride = False |
117
|
|
|
if 'override' in ddict: |
118
|
|
|
isoverride = ddict.pop('override') |
119
|
|
|
if override: |
120
|
|
|
for old, new in override.items(): |
121
|
|
|
ddict[new] = ddict.pop(old) |
122
|
|
|
if new in list(inherit.keys()): |
123
|
|
|
self._update(ddict[new], inherit[new]) |
124
|
|
|
return ddict, isoverride |
125
|
|
|
|
126
|
|
|
def _update(self, d, u): |
127
|
|
|
for k, v in u.items(): |
128
|
|
|
if isinstance(v, collections.Mapping): |
129
|
|
|
d[k] = self._update(d.get(k, {}), v) |
130
|
|
|
else: |
131
|
|
|
d[k] = v |
132
|
|
|
return d |
133
|
|
|
|
134
|
|
|
def _set_entries(self, ddict): |
135
|
|
|
entries = list(ddict.keys()) |
136
|
|
|
for name in entries: |
137
|
|
|
self.get_description(ddict[name], name) |
138
|
|
|
|
139
|
|
|
def get_description(self, entry, name, metadata=True): |
140
|
|
|
# set params first as we may need them subsequently |
141
|
|
|
if 'params' in entry: |
142
|
|
|
self._set_params(entry['params']) |
143
|
|
|
# --------------- check for data entry ----------------------------- |
144
|
|
|
if 'data' in list(entry.keys()): |
145
|
|
|
data_obj = self.exp.create_data_object("in_data", name) |
146
|
|
|
data_obj = self.set_data(data_obj, entry['data']) |
147
|
|
|
|
148
|
|
|
else: |
149
|
|
|
emsg = 'Please specify the data information in the yaml file.' |
150
|
|
|
raise Exception(emsg) |
151
|
|
|
|
152
|
|
|
if metadata: |
153
|
|
|
self._get_meta_data_descriptions(entry, data_obj) |
154
|
|
|
|
155
|
|
|
def _get_meta_data_descriptions(self, entry, data_obj): |
156
|
|
|
# --------------- check for axis label information ----------------- |
157
|
|
|
if 'axis_labels' in list(entry.keys()): |
158
|
|
|
self._set_axis_labels(data_obj, entry['axis_labels']) |
159
|
|
|
else: |
160
|
|
|
raise Exception('Please specify the axis labels in the yaml file.') |
161
|
|
|
|
162
|
|
|
# --------------- check for data access patterns ------------------- |
163
|
|
|
if 'patterns' in list(entry.keys()): |
164
|
|
|
self._set_patterns(data_obj, entry['patterns']) |
165
|
|
|
else: |
166
|
|
|
raise Exception('Please specify the patterns in the yaml file.') |
167
|
|
|
|
168
|
|
|
# add any additional metadata |
169
|
|
|
if 'metadata' in entry: |
170
|
|
|
self._set_metadata(data_obj, entry['metadata']) |
171
|
|
|
self.set_data_reduction_params(data_obj) |
172
|
|
|
|
173
|
|
|
if 'exp_metadata' in entry: |
174
|
|
|
self._set_metadata(data_obj, entry['exp_metadata'], exp=True) |
175
|
|
|
|
176
|
|
|
def set_data(self, name, entry): |
177
|
|
|
raise NotImplementedError('Please implement "set_data" function' |
178
|
|
|
' in the loader') |
179
|
|
|
|
180
|
|
|
def _set_keywords(self, dObj): |
181
|
|
|
filepath = str(dObj.backing_file.filename) |
182
|
|
|
shape = str(dObj.get_shape()) |
183
|
|
|
return {'dfile': filepath, 'dshape': shape} |
184
|
|
|
|
185
|
|
|
def __get_wildcard_values(self, dObj): |
186
|
|
|
if 'wildcard_values' in list(dObj.data_info.get_dictionary().keys()): |
187
|
|
|
return dObj.data_info.get('wildcard_values') |
188
|
|
|
return None |
189
|
|
|
|
190
|
|
|
def update_value(self, dObj, value, itr=0): |
191
|
|
|
import pdb |
192
|
|
|
# setting the keywords |
193
|
|
|
if dObj is not None: |
194
|
|
|
dshape = dObj.get_shape() |
195
|
|
|
dfile = dObj.backing_file |
196
|
|
|
globals()['dfile'] = dfile |
|
|
|
|
197
|
|
|
wildcard = self.__get_wildcard_values(dObj) |
198
|
|
|
|
199
|
|
|
if isinstance(value, str): |
200
|
|
|
split = value.split('$') |
201
|
|
|
if len(split) > 1: |
202
|
|
|
value = self._convert_string(dObj, split[1]) |
203
|
|
|
try: |
204
|
|
|
value = eval(value, globals(), locals()) |
205
|
|
|
value = self._convert_bytes(value) |
206
|
|
|
except Exception as e: |
207
|
|
|
msg = (f"Error evaluating value: '{value}' \n %s" % e) |
208
|
|
|
try: |
209
|
|
|
value = value.replace("index(", "index(b") |
210
|
|
|
value = eval(value, globals(), locals()) |
211
|
|
|
value = self._convert_bytes(value) |
212
|
|
|
except: |
213
|
|
|
value = eval(value, globals(), locals()) |
214
|
|
|
raise Exception(msg) |
215
|
|
|
return value |
216
|
|
|
|
217
|
|
|
def _convert_string(self, dObj, string): |
218
|
|
|
for old, new in self.parameters.items(): |
219
|
|
|
if old in string: |
220
|
|
|
if isinstance(new, str): |
221
|
|
|
split = new.split('$') |
222
|
|
|
if len(split) > 1: |
223
|
|
|
new = split[1] |
224
|
|
|
elif isinstance(new, str): # nothing left to split |
225
|
|
|
new = "'%s'" % new |
226
|
|
|
string = self._convert_string( |
227
|
|
|
dObj, string.replace(old, str(new))) |
228
|
|
|
return string |
229
|
|
|
|
230
|
|
|
def _convert_bytes(self, value): |
231
|
|
|
# convert bytes to str - for back compatability |
232
|
|
|
if isinstance(value, bytes): |
233
|
|
|
return value.decode("ascii") |
234
|
|
|
if isinstance(value, np.ndarray) and isinstance(value[0], bytes): |
235
|
|
|
return value.astype(str) |
236
|
|
|
return value |
237
|
|
|
|
238
|
|
|
def _set_params(self, params): |
239
|
|
|
# Update variable parameters that are revealed in the template |
240
|
|
|
params = self._update_template_params(params) |
241
|
|
|
self.parameters.update(params) |
242
|
|
|
# find files, open and add to the namespace then delete file params |
243
|
|
|
files = [k for k in list(params.keys()) if k.endswith('file')] |
244
|
|
|
for f in files: |
245
|
|
|
param = params[f] |
246
|
|
|
try: |
247
|
|
|
globals()[str(f)] = self.update_value(None, param) |
|
|
|
|
248
|
|
|
except IOError: |
249
|
|
|
self._check_for_test_data(f, param) |
250
|
|
|
del params[f] |
251
|
|
|
|
252
|
|
|
def _check_for_test_data(self, f, param): |
253
|
|
|
# check if this is Savu test data |
254
|
|
|
substrs = param.split("'")[1:2] |
255
|
|
|
filename = None |
256
|
|
|
for s in substrs: |
257
|
|
|
try: |
258
|
|
|
filename = self._get_yaml_file(s) |
259
|
|
|
break |
260
|
|
|
except: |
261
|
|
|
pass |
262
|
|
|
param = param.replace(s, filename) |
|
|
|
|
263
|
|
|
globals()[str(f)] = self.update_value(None, param) |
|
|
|
|
264
|
|
|
del self.parameters[f] |
265
|
|
|
|
266
|
|
|
def _update_template_params(self, params): |
267
|
|
|
for k, v in params.items(): |
268
|
|
|
v = pu.is_template_param(v) |
269
|
|
|
if v is not False: |
270
|
|
|
params[k] = \ |
271
|
|
|
self.parameters[k] if k in list(self.parameters.keys()) else v[1] |
272
|
|
|
return params |
273
|
|
|
|
274
|
|
|
def _set_axis_labels(self, dObj, labels): |
275
|
|
|
dims = list(range(len(list(labels.keys())))) |
276
|
|
|
axis_labels = [None]*len(list(labels.keys())) |
277
|
|
|
for d in dims: |
278
|
|
|
self._check_label_entry(labels[d]) |
279
|
|
|
l = labels[d] |
280
|
|
|
for key in list(l.keys()): |
281
|
|
|
l[key] = self.update_value(dObj, l[key]) |
282
|
|
|
axis_labels[l['dim']] = (l['name'] + '.' + l['units']) |
283
|
|
|
if l['value'] is not None: |
284
|
|
|
dObj.meta_data.set(l['name'], l['value']) |
285
|
|
|
dObj.set_axis_labels(*axis_labels) |
286
|
|
|
|
287
|
|
|
def _check_label_entry(self, label): |
288
|
|
|
required = ['dim', 'name', 'value', 'units'] |
289
|
|
|
try: |
290
|
|
|
[label[i] for i in required] |
291
|
|
|
except: |
292
|
|
|
raise Exception("name, value and units are required fields for \ |
293
|
|
|
axis labels") |
294
|
|
|
|
295
|
|
|
def _set_patterns(self, dObj, patterns): |
296
|
|
|
for key, dims in patterns.items(): |
297
|
|
|
core_dims = self.__get_tuple( |
298
|
|
|
self.update_value(dObj, dims['core_dims'])) |
299
|
|
|
slice_dims = self.__get_tuple( |
300
|
|
|
self.update_value(dObj, dims['slice_dims'])) |
301
|
|
|
dObj.add_pattern(key, core_dims=core_dims, slice_dims=slice_dims) |
302
|
|
|
|
303
|
|
|
def __get_tuple(self, val): |
304
|
|
|
return literal_eval(val) if not isinstance(val, tuple) else val |
305
|
|
|
|
306
|
|
|
def _set_metadata(self, dObj, mdata, exp=False): |
307
|
|
|
populate = dObj.exp if exp else dObj |
308
|
|
|
for key, value in mdata.items(): |
309
|
|
|
value = self.update_value(dObj, value['value']) |
310
|
|
|
populate.meta_data.set(key, value) |
311
|
|
|
|