|
1
|
|
|
# -*- coding: utf-8 -*- |
|
2
|
|
|
# Apache Software License 2.0 |
|
3
|
|
|
# |
|
4
|
|
|
# Copyright (c) 2018, Christophe Duong |
|
5
|
|
|
# |
|
6
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
|
7
|
|
|
# you may not use this file except in compliance with the License. |
|
8
|
|
|
# You may obtain a copy of the License at |
|
9
|
|
|
# |
|
10
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
|
11
|
|
|
# |
|
12
|
|
|
# Unless required by applicable law or agreed to in writing, software |
|
13
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
|
14
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
15
|
|
|
# See the License for the specific language governing permissions and |
|
16
|
|
|
# limitations under the License. |
|
17
|
|
|
""" |
|
18
|
|
|
Handles configurations files for the application |
|
19
|
|
|
""" |
|
20
|
|
|
import os |
|
21
|
|
|
import uuid |
|
22
|
|
|
from datetime import datetime |
|
23
|
|
|
from logging import DEBUG |
|
24
|
|
|
from logging import Formatter |
|
25
|
|
|
from logging import StreamHandler |
|
26
|
|
|
from logging import getLogger |
|
27
|
|
|
from logging.config import dictConfig |
|
28
|
|
|
from platform import uname |
|
29
|
|
|
from tempfile import mkstemp |
|
30
|
|
|
from urllib.error import HTTPError |
|
31
|
|
|
from urllib.error import URLError |
|
32
|
|
|
|
|
33
|
|
|
import pyhocon |
|
34
|
|
|
from pytz import timezone |
|
35
|
|
|
|
|
36
|
|
|
from aiscalator import __version__ |
|
37
|
|
|
from aiscalator.core.utils import copy_replace |
|
38
|
|
|
from aiscalator.core.utils import data_file |
|
39
|
|
|
|
|
40
|
|
|
|
|
41
|
|
|
def _generate_global_config() -> str: |
|
42
|
|
|
"""Generate a standard configuration file for the application in the |
|
43
|
|
|
user's home folder ~/.aiscalator/config/aiscalator.conf from the |
|
44
|
|
|
template file in aiscalator/config/template/aiscalator.conf |
|
45
|
|
|
""" |
|
46
|
|
|
logger = getLogger(__name__) |
|
47
|
|
|
dst = os.path.join(os.path.expanduser("~"), |
|
48
|
|
|
".aiscalator/config/aiscalator.conf") |
|
49
|
|
|
logger.info("Generating a new configuration file for aiscalator:\n\t%s", |
|
50
|
|
|
dst) |
|
51
|
|
|
pattern = [ |
|
52
|
|
|
"testUserID", |
|
53
|
|
|
"generation_date", |
|
54
|
|
|
] |
|
55
|
|
|
replace_value = [ |
|
56
|
|
|
generate_user_id(), |
|
57
|
|
|
'"' + str(datetime |
|
58
|
|
|
.utcnow() |
|
59
|
|
|
.replace(tzinfo=timezone("UTC"))) + |
|
60
|
|
|
'" // in UTC timezone', |
|
61
|
|
|
] |
|
62
|
|
|
dst_dir = os.path.dirname(dst) |
|
63
|
|
|
if dst_dir: |
|
64
|
|
|
os.makedirs(dst_dir, exist_ok=True) |
|
65
|
|
|
copy_replace(data_file("../config/template/aiscalator.conf"), |
|
66
|
|
|
dst, pattern=pattern, replace_value=replace_value) |
|
67
|
|
|
open(os.path.join(dst_dir, "apt_packages.txt"), 'a').close() |
|
68
|
|
|
open(os.path.join(dst_dir, "requirements.txt"), 'a').close() |
|
69
|
|
|
open(os.path.join(dst_dir, "lab_extensions.txt"), 'a').close() |
|
70
|
|
|
return dst |
|
71
|
|
|
|
|
72
|
|
|
|
|
73
|
|
|
def generate_user_id() -> str: |
|
74
|
|
|
""" |
|
75
|
|
|
Returns |
|
76
|
|
|
------- |
|
77
|
|
|
str |
|
78
|
|
|
Returns a string identifying this user when the |
|
79
|
|
|
setup was run first |
|
80
|
|
|
""" |
|
81
|
|
|
return 'u' + str((uuid.getnode())) |
|
82
|
|
|
|
|
83
|
|
|
|
|
84
|
|
|
def _app_config_file() -> str: |
|
85
|
|
|
"""Return the path to the app configuration file.""" |
|
86
|
|
|
if 'AISCALATOR_HOME' in os.environ: |
|
87
|
|
|
home = os.environ['AISCALATOR_HOME'] |
|
88
|
|
|
file = os.path.join(home, "config", "aiscalator.conf") |
|
89
|
|
|
if os.path.exists(file): |
|
90
|
|
|
return file |
|
91
|
|
|
return os.path.join(os.path.expanduser("~"), '.aiscalator', |
|
92
|
|
|
'config', 'aiscalator.conf') |
|
93
|
|
|
|
|
94
|
|
|
|
|
95
|
|
|
# TODO refactor, splitting up the Global App Config part from |
|
96
|
|
|
# Jupyter Config (step) and Airflow config (DAG) into 3 classes |
|
97
|
|
|
# with separate APIs. |
|
98
|
|
|
class AiscalatorConfig: |
|
99
|
|
|
""" |
|
100
|
|
|
A configuration object for the Aiscalator application. |
|
101
|
|
|
|
|
102
|
|
|
This object stores: |
|
103
|
|
|
- global configuration for the whole application |
|
104
|
|
|
- configuration for a particular context specified in a step |
|
105
|
|
|
configuration file. |
|
106
|
|
|
- In this case, we might even focus on a particular step. |
|
107
|
|
|
|
|
108
|
|
|
... |
|
109
|
|
|
|
|
110
|
|
|
Attributes |
|
111
|
|
|
---------- |
|
112
|
|
|
_app_conf |
|
113
|
|
|
global configuration object for the application |
|
114
|
|
|
_config_path : str |
|
115
|
|
|
path to the configuration file (or plain configuration as string) |
|
116
|
|
|
_step_name : str |
|
117
|
|
|
name of the currently processed step |
|
118
|
|
|
_step |
|
119
|
|
|
configuration object for the currently processed step |
|
120
|
|
|
_dag_name : str |
|
121
|
|
|
name of the currently processed dag |
|
122
|
|
|
_dag |
|
123
|
|
|
configuration object for the currently processed dag |
|
124
|
|
|
""" |
|
125
|
|
|
def __init__(self, |
|
126
|
|
|
config=None, |
|
127
|
|
|
step_selection=None, |
|
128
|
|
|
dag_selection=None): |
|
129
|
|
|
""" |
|
130
|
|
|
Parameters |
|
131
|
|
|
---------- |
|
132
|
|
|
config : str |
|
133
|
|
|
path to the step configuration file (or plain configuration |
|
134
|
|
|
string) |
|
135
|
|
|
step_selection : str |
|
136
|
|
|
Name of step from the configuration file to focus on |
|
137
|
|
|
dag_selection : str |
|
138
|
|
|
Name of dag from the configuration file to focus on |
|
139
|
|
|
""" |
|
140
|
|
|
self._config_path = config |
|
141
|
|
|
self._app_conf = _setup_app_config() |
|
142
|
|
|
self._setup_logging() |
|
143
|
|
|
parsed_config = _parse_config(config) |
|
144
|
|
|
self._step_name = None |
|
145
|
|
|
self._step = None |
|
146
|
|
|
self._dag_name = None |
|
147
|
|
|
self._dag = None |
|
148
|
|
|
if parsed_config: |
|
149
|
|
|
step_sel = step_selection |
|
150
|
|
|
if "steps" in parsed_config: |
|
151
|
|
|
self._step_name, self._step = ( |
|
152
|
|
|
_select_config(parsed_config, |
|
153
|
|
|
root_node='steps', |
|
154
|
|
|
child_node='task', |
|
155
|
|
|
selection=step_sel) |
|
156
|
|
|
) |
|
157
|
|
|
if "dags" in parsed_config: |
|
158
|
|
|
self._dag_name, self._dag = ( |
|
159
|
|
|
_select_config(parsed_config, |
|
160
|
|
|
root_node='dags', |
|
161
|
|
|
child_node='definition', |
|
162
|
|
|
selection=dag_selection) |
|
163
|
|
|
) |
|
164
|
|
|
|
|
165
|
|
|
################################################### |
|
166
|
|
|
# Global App Config methods # |
|
167
|
|
|
################################################### |
|
168
|
|
|
|
|
169
|
|
|
def _setup_logging(self): |
|
170
|
|
|
""" Setup the logging configuration of the application """ |
|
171
|
|
|
if self.app_config_has("logging"): |
|
172
|
|
|
log_config = self.app_config()["logging"] |
|
173
|
|
|
filename_list = [ |
|
174
|
|
|
v['filename'] for k, v in |
|
175
|
|
|
_find_config_tree(log_config, "filename") |
|
176
|
|
|
] |
|
177
|
|
|
# pre-create directory in advance for all loggers |
|
178
|
|
|
for file in filename_list: |
|
179
|
|
|
file_dir = os.path.dirname(file) |
|
180
|
|
|
if file_dir and not os.path.isdir(file_dir): |
|
181
|
|
|
os.makedirs(file_dir, exist_ok=True) |
|
182
|
|
|
dictConfig(log_config) |
|
183
|
|
|
else: |
|
184
|
|
|
log = getLogger() |
|
185
|
|
|
handler = StreamHandler() |
|
186
|
|
|
formatter = Formatter( |
|
187
|
|
|
"%(asctime)s-%(threadName)s-%(name)s-%(levelname)s-%(message)s" |
|
188
|
|
|
) |
|
189
|
|
|
handler.setFormatter(formatter) |
|
190
|
|
|
log.addHandler(handler) |
|
191
|
|
|
log.setLevel(DEBUG) |
|
192
|
|
|
msg = ("Starting " + os.path.basename(__name__) + |
|
193
|
|
|
" version " + __version__ + " on " + |
|
194
|
|
|
"_".join(uname()).replace(" ", "_")) |
|
195
|
|
|
logger = getLogger(__name__) |
|
196
|
|
|
logger.debug(msg) |
|
197
|
|
|
|
|
198
|
|
|
def app_config_home(self) -> str: |
|
199
|
|
|
"""Return the path to the app configuration folder.""" |
|
200
|
|
|
if self.app_config_has("app_config_home_directory"): |
|
201
|
|
|
return self.app_config()["app_config_home_directory"] |
|
202
|
|
|
return os.path.join(os.path.expanduser("~"), '.aiscalator') |
|
203
|
|
|
|
|
204
|
|
|
def redefine_app_config_home(self, config_home): |
|
205
|
|
|
""" |
|
206
|
|
|
Modify the configuration file to change the value of the |
|
207
|
|
|
application configuration home directory. |
|
208
|
|
|
|
|
209
|
|
|
Parameters |
|
210
|
|
|
---------- |
|
211
|
|
|
config_home : str |
|
212
|
|
|
path to the new configuration home |
|
213
|
|
|
|
|
214
|
|
|
Returns |
|
215
|
|
|
------- |
|
216
|
|
|
AiscalatorConfig |
|
217
|
|
|
the new configuration object |
|
218
|
|
|
""" |
|
219
|
|
|
dst = _app_config_file() |
|
220
|
|
|
new_config = ( |
|
221
|
|
|
pyhocon.ConfigFactory.parse_string( |
|
222
|
|
|
"aiscalator.app_config_home_directory = " + config_home |
|
223
|
|
|
) |
|
224
|
|
|
).with_fallback(_app_config_file(), resolve=False) |
|
225
|
|
|
with open(dst, "w") as output: |
|
226
|
|
|
output.write( |
|
227
|
|
|
pyhocon.converter.HOCONConverter.to_hocon(new_config) |
|
228
|
|
|
) |
|
229
|
|
|
self._app_conf = new_config |
|
230
|
|
|
return new_config |
|
231
|
|
|
|
|
232
|
|
|
def redefine_airflow_workspaces(self, workspaces): |
|
233
|
|
|
""" |
|
234
|
|
|
Modify the configuration file to change the value of the |
|
235
|
|
|
airflow workspaces |
|
236
|
|
|
|
|
237
|
|
|
Parameters |
|
238
|
|
|
---------- |
|
239
|
|
|
workspaces : list |
|
240
|
|
|
list of workspaces to bind to airflow |
|
241
|
|
|
|
|
242
|
|
|
Returns |
|
243
|
|
|
------- |
|
244
|
|
|
AiscalatorConfig |
|
245
|
|
|
the new configuration object |
|
246
|
|
|
""" |
|
247
|
|
|
dst = _app_config_file() |
|
248
|
|
|
new_config = ( |
|
249
|
|
|
pyhocon.ConfigFactory.parse_string( |
|
250
|
|
|
"aiscalator.airflow.setup.workspace_paths = [\n" + |
|
251
|
|
|
"\n".join([ws for ws in workspaces]) + |
|
252
|
|
|
"]" |
|
253
|
|
|
) |
|
254
|
|
|
).with_fallback(_app_config_file(), resolve=False) |
|
255
|
|
|
with open(dst, "w") as output: |
|
256
|
|
|
output.write( |
|
257
|
|
|
pyhocon.converter.HOCONConverter.to_hocon(new_config) |
|
258
|
|
|
) |
|
259
|
|
|
self._app_conf = new_config |
|
260
|
|
|
return new_config |
|
261
|
|
|
|
|
262
|
|
|
def user_env_file(self, job=None) -> list: |
|
263
|
|
|
""" |
|
264
|
|
|
Find a list of env files to pass to docker containers |
|
265
|
|
|
|
|
266
|
|
|
Parameters |
|
267
|
|
|
---------- |
|
268
|
|
|
job |
|
269
|
|
|
Optional step or dag config |
|
270
|
|
|
|
|
271
|
|
|
Returns |
|
272
|
|
|
------- |
|
273
|
|
|
List |
|
274
|
|
|
env files |
|
275
|
|
|
|
|
276
|
|
|
""" |
|
277
|
|
|
logger = getLogger(__name__) |
|
278
|
|
|
result = [] |
|
279
|
|
|
# Look if any env file or variables were defined in the step/dag |
|
280
|
|
|
if job: |
|
281
|
|
|
(_, env_filename) = mkstemp(prefix="aiscalator_", text=True) |
|
282
|
|
|
with open(env_filename, mode="w") as env_file: |
|
283
|
|
|
# concatenate all the env files and variables into one |
|
284
|
|
|
for env in job: |
|
285
|
|
|
if isinstance(env, pyhocon.config_tree.ConfigTree): |
|
286
|
|
|
for k in env.keys(): |
|
287
|
|
|
env_file.write(k + '=' + env.get(k) + '\n') |
|
288
|
|
|
elif os.path.isfile(os.path.join(self.root_dir(), env)): |
|
289
|
|
|
with open(os.path.join(self.root_dir(), env), |
|
290
|
|
|
mode="r") as file: |
|
291
|
|
|
for line in file: |
|
292
|
|
|
env_file.write(line) |
|
293
|
|
|
else: |
|
294
|
|
|
msg = ("Undefined env" + env + |
|
295
|
|
|
": expecting a dict of environment variables" + |
|
296
|
|
|
" or path to environment configuration file.") |
|
297
|
|
|
logger.warning("Warning %s", msg) |
|
298
|
|
|
result.append(env_filename) |
|
299
|
|
|
# TODO look in user config if env file has been redefined |
|
300
|
|
|
result.append( |
|
301
|
|
|
os.path.join(self.app_config_home(), "config", ".env") |
|
302
|
|
|
) |
|
303
|
|
|
return result |
|
304
|
|
|
|
|
305
|
|
|
def _timestamp_now(self) -> str: |
|
306
|
|
|
""" |
|
307
|
|
|
Depending on how the timezone is configured, returns the |
|
308
|
|
|
timestamp for this instant. |
|
309
|
|
|
|
|
310
|
|
|
""" |
|
311
|
|
|
date_now = datetime.utcnow().replace(tzinfo=timezone("UTC")) |
|
312
|
|
|
if self._app_conf["aiscalator"]: |
|
313
|
|
|
pst = timezone(self.app_config().timezone) |
|
314
|
|
|
else: |
|
315
|
|
|
pst = timezone('Europe/Paris') |
|
316
|
|
|
return date_now.astimezone(pst).strftime("%Y%m%d%H%M%S") |
|
317
|
|
|
|
|
318
|
|
|
def app_config(self): |
|
319
|
|
|
""" |
|
320
|
|
|
Returns |
|
321
|
|
|
------- |
|
322
|
|
|
str |
|
323
|
|
|
the configuration object for the aiscalator application |
|
324
|
|
|
""" |
|
325
|
|
|
return self._app_conf["aiscalator"] |
|
326
|
|
|
|
|
327
|
|
|
def config_path(self): |
|
328
|
|
|
""" |
|
329
|
|
|
Returns |
|
330
|
|
|
------- |
|
331
|
|
|
str |
|
332
|
|
|
Returns the path to the step configuration file. |
|
333
|
|
|
If it was an URL, it will return the path to the temporary |
|
334
|
|
|
downloaded version of it. |
|
335
|
|
|
If it was a plain string, then returns None |
|
336
|
|
|
|
|
337
|
|
|
""" |
|
338
|
|
|
if os.path.exists(self._config_path): |
|
339
|
|
|
if pyhocon.ConfigFactory.parse_file(self._config_path): |
|
340
|
|
|
return os.path.realpath(self._config_path) |
|
341
|
|
|
# TODO if string is url/git repo, download file locally first |
|
342
|
|
|
return None |
|
343
|
|
|
|
|
344
|
|
|
def root_dir(self): |
|
345
|
|
|
""" |
|
346
|
|
|
Returns |
|
347
|
|
|
------- |
|
348
|
|
|
str |
|
349
|
|
|
Returns the path to the folder containing the |
|
350
|
|
|
configuration file |
|
351
|
|
|
""" |
|
352
|
|
|
path = self.config_path() |
|
353
|
|
|
if path: |
|
354
|
|
|
root_dir = os.path.dirname(path) |
|
355
|
|
|
if not root_dir.endswith("/"): |
|
356
|
|
|
root_dir += "/" |
|
357
|
|
|
return root_dir |
|
358
|
|
|
return None |
|
359
|
|
|
|
|
360
|
|
|
def user_id(self) -> str: |
|
361
|
|
|
""" |
|
362
|
|
|
Returns |
|
363
|
|
|
------- |
|
364
|
|
|
str |
|
365
|
|
|
the user id stored when the application was first setup |
|
366
|
|
|
""" |
|
367
|
|
|
return self.app_config()["metadata.user.id"] |
|
368
|
|
|
|
|
369
|
|
|
def app_config_has(self, field) -> bool: |
|
370
|
|
|
""" |
|
371
|
|
|
Tests if the applicatin config has a configuration |
|
372
|
|
|
value for the field. |
|
373
|
|
|
|
|
374
|
|
|
""" |
|
375
|
|
|
if not self.app_config(): |
|
376
|
|
|
return False |
|
377
|
|
|
return field in self.app_config() |
|
378
|
|
|
|
|
379
|
|
|
def airflow_docker_compose_file(self): |
|
380
|
|
|
"""Return the configuration file to bring airflow services up.""" |
|
381
|
|
|
if self.app_config_has("airflow.docker_compose_file"): |
|
382
|
|
|
return self.app_config()["airflow.docker_compose_file"] |
|
383
|
|
|
return None |
|
384
|
|
|
|
|
385
|
|
|
def validate_config(self): |
|
386
|
|
|
""" |
|
387
|
|
|
Check if all the fields in the reference config are |
|
388
|
|
|
defined in focused steps too. Otherwise |
|
389
|
|
|
raise an Exception (either pyhocon.ConfigMissingException |
|
390
|
|
|
or pyhocon.ConfigWrongTypeException) |
|
391
|
|
|
|
|
392
|
|
|
""" |
|
393
|
|
|
reference = data_file("../config/template/minimum_aiscalator.conf") |
|
394
|
|
|
ref = pyhocon.ConfigFactory.parse_file(reference) |
|
395
|
|
|
msg = "In Global Application Configuration file " |
|
396
|
|
|
_validate_configs(self._app_conf, ref, msg, |
|
397
|
|
|
missing_exception=True, |
|
398
|
|
|
type_mismatch_exception=True) |
|
399
|
|
|
reference = data_file("../config/template/aiscalator.conf") |
|
400
|
|
|
ref = pyhocon.ConfigFactory.parse_file(reference) |
|
401
|
|
|
msg = "In Global Application Configuration file " |
|
402
|
|
|
_validate_configs(self._app_conf, ref, msg, |
|
403
|
|
|
missing_exception=False, |
|
404
|
|
|
type_mismatch_exception=True) |
|
405
|
|
View Code Duplication |
if self._step_name: |
|
|
|
|
|
|
406
|
|
|
reference = data_file("../config/template/minimum_step.conf") |
|
407
|
|
|
ref = pyhocon.ConfigFactory.parse_file(reference) |
|
408
|
|
|
msg = "in step named " + self._step_name |
|
409
|
|
|
_validate_configs(self._step, |
|
410
|
|
|
ref["steps"]["Untitled"], |
|
411
|
|
|
msg, |
|
412
|
|
|
missing_exception=True, |
|
413
|
|
|
type_mismatch_exception=True) |
|
414
|
|
|
reference = data_file("../config/template/step.conf") |
|
415
|
|
|
ref = pyhocon.ConfigFactory.parse_file(reference) |
|
416
|
|
|
msg = "in step named " + self._step_name |
|
417
|
|
|
_validate_configs(self._step, |
|
418
|
|
|
ref["steps"]["Untitled"], |
|
419
|
|
|
msg, |
|
420
|
|
|
missing_exception=False, |
|
421
|
|
|
type_mismatch_exception=True) |
|
422
|
|
View Code Duplication |
if self._dag_name: |
|
|
|
|
|
|
423
|
|
|
reference = data_file("../config/template/minimum_dag.conf") |
|
424
|
|
|
ref = pyhocon.ConfigFactory.parse_file(reference) |
|
425
|
|
|
msg = "in dag named " + self._dag_name |
|
426
|
|
|
_validate_configs(self._dag, |
|
427
|
|
|
ref["dags"]["Untitled"], |
|
428
|
|
|
msg, |
|
429
|
|
|
missing_exception=True, |
|
430
|
|
|
type_mismatch_exception=True) |
|
431
|
|
|
reference = data_file("../config/template/step.conf") |
|
432
|
|
|
ref = pyhocon.ConfigFactory.parse_file(reference) |
|
433
|
|
|
msg = "in dag named " + self._dag_name |
|
434
|
|
|
_validate_configs(self._dag, |
|
435
|
|
|
ref["dags"]["Untitled"], |
|
436
|
|
|
msg, |
|
437
|
|
|
missing_exception=False, |
|
438
|
|
|
type_mismatch_exception=True) |
|
439
|
|
|
|
|
440
|
|
|
################################################### |
|
441
|
|
|
# Step methods # |
|
442
|
|
|
################################################### |
|
443
|
|
|
|
|
444
|
|
|
def step_notebook_output_path(self, notebook) -> str: |
|
445
|
|
|
"""Generates the name of the output notebook""" |
|
446
|
|
|
return ("/home/jovyan/work/notebook_run/" + |
|
447
|
|
|
os.path.basename(notebook).replace(".ipynb", "") + "_" + |
|
448
|
|
|
self._timestamp_now() + |
|
449
|
|
|
self.user_id() + |
|
450
|
|
|
".ipynb") |
|
451
|
|
|
|
|
452
|
|
|
def step_field(self, field): |
|
453
|
|
|
""" |
|
454
|
|
|
Returns the value associated with the field for the currently |
|
455
|
|
|
focused step. |
|
456
|
|
|
|
|
457
|
|
|
""" |
|
458
|
|
|
if self.has_step_field(field): |
|
459
|
|
|
return self._step[field] |
|
460
|
|
|
return None |
|
461
|
|
|
|
|
462
|
|
|
def has_step_field(self, field) -> bool: |
|
463
|
|
|
""" |
|
464
|
|
|
Tests if the currently focused step has a configuration |
|
465
|
|
|
value for the field. |
|
466
|
|
|
|
|
467
|
|
|
""" |
|
468
|
|
|
if not self._step: |
|
469
|
|
|
return False |
|
470
|
|
|
return field in self._step |
|
471
|
|
|
|
|
472
|
|
|
def step_name(self): |
|
473
|
|
|
""" |
|
474
|
|
|
Returns the name of the currently focused step |
|
475
|
|
|
""" |
|
476
|
|
|
return self._step_name |
|
477
|
|
|
|
|
478
|
|
|
def step_file_path(self, string): |
|
479
|
|
|
""" |
|
480
|
|
|
Returns absolute path of a file from a field of the currently |
|
481
|
|
|
focused step. |
|
482
|
|
|
|
|
483
|
|
|
""" |
|
484
|
|
|
if not self.has_step_field(string): |
|
485
|
|
|
return None |
|
486
|
|
|
# TODO handle url |
|
487
|
|
|
root_dir = self.root_dir() |
|
488
|
|
|
if root_dir: |
|
489
|
|
|
path = os.path.join(root_dir, self.step_field(string)) |
|
490
|
|
|
return os.path.realpath(path) |
|
491
|
|
|
return os.path.realpath(self.step_field(string)) |
|
492
|
|
|
|
|
493
|
|
|
def step_container_name(self) -> str: |
|
494
|
|
|
"""Return the docker container name to execute this step""" |
|
495
|
|
|
return ( |
|
496
|
|
|
self.step_field("task.type") + |
|
497
|
|
|
"_" + |
|
498
|
|
|
self.step_name().replace(".", "_") |
|
499
|
|
|
) |
|
500
|
|
|
|
|
501
|
|
|
def step_extract_parameters(self) -> list: |
|
502
|
|
|
"""Returns a list of docker parameters""" |
|
503
|
|
|
result = [] |
|
504
|
|
|
if self.has_step_field("task.parameters"): |
|
505
|
|
|
for param in self.step_field("task.parameters"): |
|
506
|
|
|
for key in param: |
|
507
|
|
|
result += ["-p", key, param[key]] |
|
508
|
|
|
return result |
|
509
|
|
|
|
|
510
|
|
|
################################################### |
|
511
|
|
|
# DAG methods # |
|
512
|
|
|
################################################### |
|
513
|
|
|
|
|
514
|
|
|
def dag_field(self, field): |
|
515
|
|
|
""" |
|
516
|
|
|
Returns the value associated with the field for the currently |
|
517
|
|
|
focused dag. |
|
518
|
|
|
|
|
519
|
|
|
""" |
|
520
|
|
|
if self.has_dag_field(field): |
|
521
|
|
|
return self._dag[field] |
|
522
|
|
|
return None |
|
523
|
|
|
|
|
524
|
|
|
def has_dag_field(self, field) -> bool: |
|
525
|
|
|
""" |
|
526
|
|
|
Tests if the currently focused dag has a configuration |
|
527
|
|
|
value for the field. |
|
528
|
|
|
|
|
529
|
|
|
""" |
|
530
|
|
|
if not self._dag: |
|
531
|
|
|
return False |
|
532
|
|
|
return field in self._dag |
|
533
|
|
|
|
|
534
|
|
|
def dag_name(self): |
|
535
|
|
|
""" |
|
536
|
|
|
Returns the name of the currently focused dag |
|
537
|
|
|
""" |
|
538
|
|
|
return self._dag_name |
|
539
|
|
|
|
|
540
|
|
|
def dag_file_path(self, string): |
|
541
|
|
|
""" |
|
542
|
|
|
Returns absolute path of a file from a field of the currently |
|
543
|
|
|
focused dag. |
|
544
|
|
|
|
|
545
|
|
|
""" |
|
546
|
|
|
if not self.has_dag_field(string): |
|
547
|
|
|
return None |
|
548
|
|
|
# TODO handle url |
|
549
|
|
|
root_dir = self.root_dir() |
|
550
|
|
|
if root_dir: |
|
551
|
|
|
path = os.path.join(root_dir, self.dag_field(string)) |
|
552
|
|
|
return os.path.realpath(path) |
|
553
|
|
|
return os.path.realpath(self.dag_field(string)) |
|
554
|
|
|
|
|
555
|
|
|
def dag_container_name(self) -> str: |
|
556
|
|
|
"""Return the docker container name to execute this step""" |
|
557
|
|
|
return ( |
|
558
|
|
|
"airflow_" + |
|
559
|
|
|
self.dag_name().replace(".", "_") |
|
560
|
|
|
) |
|
561
|
|
|
|
|
562
|
|
|
|
|
563
|
|
|
def _setup_app_config(): |
|
564
|
|
|
""" |
|
565
|
|
|
Setup global application configuration. |
|
566
|
|
|
If not found in the default location, this method will generate |
|
567
|
|
|
a brand new one. |
|
568
|
|
|
|
|
569
|
|
|
""" |
|
570
|
|
|
try: |
|
571
|
|
|
file = _app_config_file() |
|
572
|
|
|
conf = pyhocon.ConfigFactory.parse_file(file) |
|
573
|
|
|
except FileNotFoundError: |
|
574
|
|
|
conf = pyhocon.ConfigFactory.parse_file(_generate_global_config()) |
|
575
|
|
|
# test if since_version is deprecated and regenerate a newer config |
|
576
|
|
|
return conf |
|
577
|
|
|
|
|
578
|
|
|
|
|
579
|
|
|
def _validate_configs(test, reference, path, |
|
580
|
|
|
missing_exception=True, |
|
581
|
|
|
type_mismatch_exception=True): |
|
582
|
|
|
""" |
|
583
|
|
|
Recursively check two configs if they match |
|
584
|
|
|
|
|
585
|
|
|
Parameters |
|
586
|
|
|
---------- |
|
587
|
|
|
test |
|
588
|
|
|
configuration object to test |
|
589
|
|
|
reference |
|
590
|
|
|
reference configuration object |
|
591
|
|
|
path : str |
|
592
|
|
|
this accumulates the recursive path for details in Exceptions |
|
593
|
|
|
missing_exception : bool |
|
594
|
|
|
when a missing field is found, raise xception? |
|
595
|
|
|
type_mismatch_exception : bool |
|
596
|
|
|
when a field has type mismatch, raise xception? |
|
597
|
|
|
|
|
598
|
|
|
""" |
|
599
|
|
|
logger = getLogger(__name__) |
|
600
|
|
|
if isinstance(reference, pyhocon.config_tree.ConfigTree): |
|
601
|
|
|
for key in reference.keys(): |
|
602
|
|
|
if key not in test.keys(): |
|
603
|
|
|
msg = (path + ": Missing definition of " + key) |
|
604
|
|
|
if missing_exception: |
|
605
|
|
|
raise pyhocon.ConfigMissingException( |
|
606
|
|
|
message="Exception " + msg |
|
607
|
|
|
) |
|
608
|
|
|
else: |
|
609
|
|
|
logger.warning("Warning %s", msg) |
|
610
|
|
|
elif not isinstance(test[key], type(reference[key])): |
|
611
|
|
|
msg = (path + ": Type mismatch of " + key + " found type " + |
|
612
|
|
|
str(type(test[key])) + " instead of " + |
|
613
|
|
|
str(type(reference[key]))) |
|
614
|
|
|
if type_mismatch_exception: |
|
615
|
|
|
raise pyhocon.ConfigWrongTypeException( |
|
616
|
|
|
message="Exception " + msg |
|
617
|
|
|
) |
|
618
|
|
|
else: |
|
619
|
|
|
logger.warning("Warning %s", msg) |
|
620
|
|
|
elif (isinstance(test[key], pyhocon.config_tree.ConfigTree) and |
|
621
|
|
|
isinstance(reference[key], pyhocon.config_tree.ConfigTree)): |
|
622
|
|
|
# test recursively |
|
623
|
|
|
_validate_configs(test[key], reference[key], |
|
624
|
|
|
".".join([path, key]), |
|
625
|
|
|
missing_exception, |
|
626
|
|
|
type_mismatch_exception) |
|
627
|
|
|
elif (isinstance(test[key], list) and |
|
628
|
|
|
isinstance(reference[key], list)): |
|
629
|
|
|
# iterate through both collections |
|
630
|
|
|
for i in test[key]: |
|
631
|
|
|
for j in reference[key]: |
|
632
|
|
|
_validate_configs(i, j, ".".join([path, key]), |
|
633
|
|
|
missing_exception, |
|
634
|
|
|
type_mismatch_exception) |
|
635
|
|
|
|
|
636
|
|
|
|
|
637
|
|
|
def _parse_config(step_config): |
|
638
|
|
|
""" |
|
639
|
|
|
Interpret the step_config to produce a step configuration |
|
640
|
|
|
object. It could be provided as: |
|
641
|
|
|
- a path to a local file |
|
642
|
|
|
- a url to a remote file |
|
643
|
|
|
- the plain configuration stored as string |
|
644
|
|
|
|
|
645
|
|
|
Returns |
|
646
|
|
|
------- |
|
647
|
|
|
Step configuration object |
|
648
|
|
|
|
|
649
|
|
|
""" |
|
650
|
|
|
if not step_config: |
|
651
|
|
|
return None |
|
652
|
|
|
if os.path.exists(step_config): |
|
653
|
|
|
conf = pyhocon.ConfigFactory.parse_file(step_config) |
|
654
|
|
|
else: |
|
655
|
|
|
try: |
|
656
|
|
|
conf = pyhocon.ConfigFactory.parse_URL(step_config) |
|
657
|
|
|
except (HTTPError, URLError): |
|
658
|
|
|
conf = pyhocon.ConfigFactory.parse_string(step_config) |
|
659
|
|
|
return conf |
|
660
|
|
|
|
|
661
|
|
|
|
|
662
|
|
|
def _select_config(conf, |
|
663
|
|
|
root_node: str, child_node: str, |
|
664
|
|
|
selection: str): |
|
665
|
|
|
""" |
|
666
|
|
|
Extract the list of step objects corresponding to |
|
667
|
|
|
the list of names provided. |
|
668
|
|
|
|
|
669
|
|
|
Parameters |
|
670
|
|
|
---------- |
|
671
|
|
|
conf |
|
672
|
|
|
step configuration object |
|
673
|
|
|
root_node : str |
|
674
|
|
|
node to start looking from |
|
675
|
|
|
child_node : str |
|
676
|
|
|
node that represents the leaves we are searching |
|
677
|
|
|
for. The path from root_node to child_node is compared |
|
678
|
|
|
with selection to check for a match. |
|
679
|
|
|
selection : str |
|
680
|
|
|
name of node to extract |
|
681
|
|
|
Returns |
|
682
|
|
|
------- |
|
683
|
|
|
tuple of (node_name, node) of selected |
|
684
|
|
|
configuration object |
|
685
|
|
|
""" |
|
686
|
|
|
result = None |
|
687
|
|
|
candidates = [] |
|
688
|
|
|
if conf and root_node in conf: |
|
689
|
|
|
candidates = _find_config_tree(conf[root_node], child_node) |
|
690
|
|
|
if selection: |
|
691
|
|
|
for name, candidate in candidates: |
|
692
|
|
|
if name == selection: |
|
693
|
|
|
result = (name, candidate) |
|
694
|
|
|
break |
|
695
|
|
|
else: |
|
696
|
|
|
result = candidates[0] |
|
697
|
|
|
if selection and not result: |
|
698
|
|
|
msg = (selection + "'s " + child_node + |
|
699
|
|
|
" was not found in " + root_node + |
|
700
|
|
|
" configurations.\n ") |
|
701
|
|
|
if candidates: |
|
702
|
|
|
msg += ("Available candidates are: " + |
|
703
|
|
|
" ".join([name for name, _ in candidates])) |
|
704
|
|
|
raise pyhocon.ConfigMissingException(msg) |
|
705
|
|
|
return result |
|
706
|
|
|
|
|
707
|
|
|
|
|
708
|
|
|
def _find_config_tree(tree: pyhocon.ConfigTree, target_node, path="") -> list: |
|
709
|
|
|
""" |
|
710
|
|
|
Find all target_node objects in the Configuration object and report |
|
711
|
|
|
their paths. |
|
712
|
|
|
|
|
713
|
|
|
Parameters |
|
714
|
|
|
---------- |
|
715
|
|
|
tree : pyhocon.ConfigTree |
|
716
|
|
|
Configuration object |
|
717
|
|
|
target_node : str |
|
718
|
|
|
key of Config to find |
|
719
|
|
|
path : str |
|
720
|
|
|
path that was traversed to get to this tree |
|
721
|
|
|
|
|
722
|
|
|
Returns |
|
723
|
|
|
------- |
|
724
|
|
|
list |
|
725
|
|
|
list of names of Configuration objects containing a |
|
726
|
|
|
definition of a section 'task' |
|
727
|
|
|
""" |
|
728
|
|
|
result = [] |
|
729
|
|
|
if path: |
|
730
|
|
|
next_path = path + "." |
|
731
|
|
|
else: |
|
732
|
|
|
next_path = "" |
|
733
|
|
|
for key in tree.keys(): |
|
734
|
|
|
if key == target_node: |
|
735
|
|
|
result += [(path, tree)] |
|
736
|
|
|
else: |
|
737
|
|
|
if isinstance(tree[key], pyhocon.config_tree.ConfigTree): |
|
738
|
|
|
value = _find_config_tree(tree[key], target_node, |
|
739
|
|
|
path=next_path + key) |
|
740
|
|
|
if value: |
|
741
|
|
|
result += value |
|
742
|
|
|
return result |
|
743
|
|
|
|
|
744
|
|
|
|
|
745
|
|
|
def convert_to_format(file: str, output: str, output_format: str): |
|
746
|
|
|
""" |
|
747
|
|
|
Converts a HOCON file to another format |
|
748
|
|
|
|
|
749
|
|
|
Parameters |
|
750
|
|
|
---------- |
|
751
|
|
|
file : str |
|
752
|
|
|
hocon file to convert |
|
753
|
|
|
output : str |
|
754
|
|
|
output file to produce |
|
755
|
|
|
output_format : str |
|
756
|
|
|
format of the output file |
|
757
|
|
|
|
|
758
|
|
|
Returns |
|
759
|
|
|
------- |
|
760
|
|
|
str |
|
761
|
|
|
the output file |
|
762
|
|
|
""" |
|
763
|
|
|
(pyhocon |
|
764
|
|
|
.converter |
|
765
|
|
|
.HOCONConverter |
|
766
|
|
|
.convert_from_file(file, output_file=output, |
|
767
|
|
|
output_format=output_format)) |
|
768
|
|
|
os.remove(file) |
|
769
|
|
|
return output |
|
770
|
|
|
|