Total Complexity | 1 |
Total Lines | 31 |
Duplicated Lines | 0 % |
Changes | 0 |
1 | """ |
||
2 | Data_cleaner is script which removes randomly generated items from test files. |
||
3 | """ |
||
4 | import fileinput |
||
5 | import re |
||
6 | import os |
||
7 | |||
8 | |||
9 | def replace_hash(line): |
||
10 | return re.sub( |
||
11 | r'([a-zA-Z]|\d){8}-([a-zA-Z]|\d){4}-([a-zA-Z]|\d){4}-([a-zA-Z]|\d){4}-([a-zA-Z]|\d){12}', |
||
12 | 'UUID-HASH', |
||
13 | str(line)) |
||
14 | |||
15 | |||
16 | path = './' |
||
17 | |||
18 | files = [] |
||
19 | |||
20 | # r=root, d=directories, f = files |
||
21 | for r, d, f in os.walk(path): |
||
22 | for file_ in f: |
||
23 | if '.json' in file_ or '.js' in file_: |
||
24 | files.append(os.path.join(r, file_)) |
||
25 | |||
26 | for file_src in files: |
||
27 | print(file_src) |
||
28 | with fileinput.FileInput(file_src, inplace=True) as f: |
||
29 | for line in f: |
||
30 | print(replace_hash(line), end="") |
||
31 |