Total Complexity | 4 |
Total Lines | 49 |
Duplicated Lines | 0 % |
Changes | 0 |
1 | import fileinput |
||
2 | import re |
||
3 | import os |
||
4 | from glob import glob |
||
5 | from shutil import rmtree |
||
6 | |||
7 | """ |
||
8 | cleandir is function which removes generated reports. |
||
9 | """ |
||
10 | |||
11 | |||
12 | def cleandir(): |
||
13 | path = os.getcwd() |
||
14 | pattern = os.path.join(path, "graph-of-*") |
||
15 | |||
16 | for item in glob(pattern): |
||
17 | if not os.path.isdir(item): |
||
18 | continue |
||
19 | rmtree(item) |
||
20 | |||
21 | |||
22 | """ |
||
23 | Data_cleaner is script which removes randomly generated items from test files. |
||
24 | """ |
||
25 | |||
26 | |||
27 | def replace_hash(line): |
||
28 | return re.sub( |
||
29 | r'([a-zA-Z]|\d){8}-([a-zA-Z]|\d){4}-([a-zA-Z]|\d){4}-([a-zA-Z]|\d){4}-([a-zA-Z]|\d){12}', |
||
30 | 'UUID-HASH', |
||
31 | str(line)) |
||
32 | |||
33 | |||
34 | path = './' |
||
35 | |||
36 | files = [] |
||
37 | |||
38 | # r=root, d=directories, f = files |
||
39 | for r, d, f in os.walk(path): |
||
40 | for file_ in f: |
||
41 | if '.json' in file_ or '.js' in file_: |
||
42 | files.append(os.path.join(r, file_)) |
||
43 | |||
44 | for file_src in files: |
||
45 | print(file_src) |
||
46 | with fileinput.FileInput(file_src, inplace=True) as f: |
||
47 | for line in f: |
||
48 | print(replace_hash(line), end="") |
||
49 |