Completed
Pull Request — master (#2432)
by Zatreanu
01:47
created

autoapply_actions()   D

Complexity

Conditions 10

Size

Total Lines 68

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 10
dl 0
loc 68
rs 4.3373
c 0
b 0
f 0

How to fix   Long Method    Complexity   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

Complexity

Complex classes like autoapply_actions() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
import multiprocessing
2
import os
3
import platform
4
import queue
5
import subprocess
6
from itertools import chain
7
8
from coalib.collecting import Dependencies
9
from coalib.collecting.Collectors import collect_files
10
from coala-utils.string_processing.StringConverter import StringConverter
11
from coalib.output.printers.LOG_LEVEL import LOG_LEVEL
12
from coalib.processes.BearRunning import run
13
from coalib.processes.CONTROL_ELEMENT import CONTROL_ELEMENT
14
from coalib.processes.LogPrinterThread import LogPrinterThread
15
from coalib.results.Result import Result
16
from coalib.results.result_actions.ApplyPatchAction import ApplyPatchAction
17
from coalib.results.result_actions.PrintDebugMessageAction import (
18
    PrintDebugMessageAction)
19
from coalib.results.result_actions.ShowPatchAction import ShowPatchAction
20
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
21
from coalib.results.SourceRange import SourceRange
22
from coalib.settings.Setting import glob_list
23
from coalib.parsing.Globbing import fnmatch
24
25
26
ACTIONS = [ApplyPatchAction,
27
           PrintDebugMessageAction,
28
           ShowPatchAction]
29
30
31
def get_cpu_count():
32
    try:
33
        return multiprocessing.cpu_count()
34
    # cpu_count is not implemented for some CPU architectures/OSes
35
    except NotImplementedError:  # pragma: no cover
36
        return 2
37
38
39
def fill_queue(queue_fill, any_list):
40
    """
41
    Takes element from a list and populates a queue with those elements.
42
43
    :param queue_fill: The queue to be filled.
44
    :param any_list:   List containing the elements.
45
    """
46
    for elem in any_list:
47
        queue_fill.put(elem)
48
49
50
def get_running_processes(processes):
51
    return sum((1 if process.is_alive() else 0) for process in processes)
52
53
54
def create_process_group(command_array, **kwargs):
55
    if platform.system() == "Windows":  # pragma: no cover
56
        proc = subprocess.Popen(
57
            command_array,
58
            creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,
59
            **kwargs)
60
    else:
61
        proc = subprocess.Popen(command_array,
62
                                preexec_fn=os.setsid,
63
                                **kwargs)
64
    return proc
65
66
67
def get_default_actions(section):
68
    """
69
    Parses the key ``default_actions`` in the given section.
70
71
    :param section:    The section where to parse from.
72
    :return:           A dict with the bearname as keys and their default
73
                       actions as values and another dict that contains bears
74
                       and invalid action names.
75
    """
76
    try:
77
        default_actions = dict(section["default_actions"])
78
    except IndexError:
79
        return {}, {}
80
81
    action_dict = {action.get_metadata().name: action for action in ACTIONS}
82
    invalid_action_set = default_actions.values() - action_dict.keys()
83
    invalid_actions = {}
84
    if len(invalid_action_set) != 0:
85
        invalid_actions = {
86
            bear: action
87
            for bear, action in default_actions.items()
88
            if action in invalid_action_set}
89
        for invalid in invalid_actions.keys():
90
            del default_actions[invalid]
91
92
    actions = {bearname: action_dict[action_name]
93
               for bearname, action_name in default_actions.items()}
94
    return actions, invalid_actions
95
96
97
def autoapply_actions(results,
98
                      file_dict,
99
                      file_diff_dict,
100
                      section,
101
                      log_printer):
102
    """
103
    Auto-applies actions like defined in the given section.
104
105
    :param results:        A list of results.
106
    :param file_dict:      A dictionary containing the name of files and its
107
                           contents.
108
    :param file_diff_dict: A dictionary that contains filenames as keys and
109
                           diff objects as values.
110
    :param section:        The section.
111
    :param log_printer:    A log printer instance to log messages on.
112
    :return:               A list of unprocessed results.
113
    """
114
115
    default_actions, invalid_actions = get_default_actions(section)
116
117
    for bearname, actionname in invalid_actions.items():
118
        log_printer.warn("Selected default action {!r} for bear {!r} does "
119
                         "not exist. Ignoring action.".format(actionname,
120
                                                              bearname))
121
122
    if len(default_actions) == 0:
123
        # There's nothing to auto-apply.
124
        return results
125
126
    not_processed_results = []
127
    for result in results:
128
        try:
129
            # Match full bear names deterministically, prioritized!
130
            action = default_actions[result.origin]
131
        except KeyError:
132
            for bear_glob in default_actions:
133
                if fnmatch(result.origin, bear_glob):
134
                    action = default_actions[bear_glob]
135
                    break
136
            else:
137
                not_processed_results.append(result)
138
                continue
139
140
        if not action.is_applicable(result, file_dict, file_diff_dict):
141
            log_printer.warn("Selected default action {!r} for bear {!r} is "
142
                             "not applicable. Action not applied.".format(
143
                                 action.get_metadata().name, result.origin))
144
            not_processed_results.append(result)
145
            continue
146
147
        try:
148
            action().apply_from_section(result,
149
                                        file_dict,
150
                                        file_diff_dict,
151
                                        section)
152
            log_printer.info("Applied {!r} on {} from {!r}.".format(
153
                action.get_metadata().name,
154
                result.location_repr(),
155
                result.origin))
156
        except Exception as ex:
157
            not_processed_results.append(result)
158
            log_printer.log_exception(
159
                "Failed to execute action {!r} with error: {}.".format(
160
                    action.get_metadata().name, ex),
161
                ex)
162
            log_printer.debug("-> for result " + repr(result) + ".")
163
164
    return not_processed_results
165
166
167
def check_result_ignore(result, ignore_ranges):
168
    """
169
    Determines if the result has to be ignored.
170
171
    :param result:        The result that needs to be checked.
172
    :param ignore_ranges: A list of tuples, each containing a list of lower
173
                          cased affected bearnames and a SourceRange to
174
                          ignore. If any of the bearname lists is empty, it
175
                          is considered an ignore range for all bears.
176
                          This may be a list of globbed bear wildcards.
177
    :return:              True if the result has to be ignored.
178
    """
179
    for bears, range in ignore_ranges:
180
        orig = result.origin.lower()
181
        if (result.overlaps(range) and
182
                (len(bears) == 0 or orig in bears or fnmatch(orig, bears))):
183
            return True
184
185
    return False
186
187
188
def print_result(results,
189
                 file_dict,
190
                 retval,
191
                 print_results,
192
                 section,
193
                 log_printer,
194
                 file_diff_dict,
195
                 ignore_ranges):
196
    """
197
    Takes the results produced by each bear and gives them to the print_results
198
    method to present to the user.
199
200
    :param results:        A list of results.
201
    :param file_dict:      A dictionary containing the name of files and its
202
                           contents.
203
    :param retval:         It is True if no results were yielded ever before.
204
                           If it is False this function will return False no
205
                           matter what happens. Else it depends on if this
206
                           invocation yields results.
207
    :param print_results:  A function that prints all given results appropriate
208
                           to the output medium.
209
    :param file_diff_dict: A dictionary that contains filenames as keys and
210
                           diff objects as values.
211
    :param ignore_ranges:  A list of SourceRanges. Results that affect code in
212
                           any of those ranges will be ignored.
213
    :return:               Returns False if any results were yielded. Else
214
                           True.
215
    """
216
    min_severity_str = str(section.get('min_severity', 'INFO')).upper()
217
    min_severity = RESULT_SEVERITY.str_dict.get(min_severity_str, 'INFO')
218
    results = list(filter(lambda result:
219
                          type(result) is Result and
220
                          result.severity >= min_severity and
221
                          not check_result_ignore(result, ignore_ranges),
222
                          results))
223
224
    if bool(section.get('autoapply', 'true')):
225
        patched_results = autoapply_actions(results,
226
                                            file_dict,
227
                                            file_diff_dict,
228
                                            section,
229
                                            log_printer)
230
    else:
231
        patched_results = results
232
233
    print_results(log_printer,
234
                  section,
235
                  patched_results,
236
                  file_dict,
237
                  file_diff_dict)
238
    return retval or len(results) > 0, patched_results
239
240
241
def get_file_dict(filename_list, log_printer):
242
    """
243
    Reads all files into a dictionary.
244
245
    :param filename_list: List of names of paths to files to get contents of.
246
    :param log_printer:   The logger which logs errors.
247
    :return:              Reads the content of each file into a dictionary
248
                          with filenames as keys.
249
    """
250
    file_dict = {}
251
    for filename in filename_list:
252
        try:
253
            with open(filename, "r", encoding="utf-8") as _file:
254
                file_dict[filename] = tuple(_file.readlines())
255
        except UnicodeDecodeError:
256
            log_printer.warn("Failed to read file '{}'. It seems to contain "
257
                             "non-unicode characters. Leaving it "
258
                             "out.".format(filename))
259
        except OSError as exception:  # pragma: no cover
260
            log_printer.log_exception("Failed to read file '{}' because of "
261
                                      "an unknown error. Leaving it "
262
                                      "out.".format(filename),
263
                                      exception,
264
                                      log_level=LOG_LEVEL.WARNING)
265
266
    log_printer.debug("Files that will be checked:\n" +
267
                      "\n".join(file_dict.keys()))
268
    return file_dict
269
270
271
def filter_raising_callables(it, exception, *args, **kwargs):
272
    """
273
    Filters all callable items inside the given iterator that raise the
274
    given exceptions.
275
276
    :param it:        The iterator to filter.
277
    :param exception: The (tuple of) exception(s) to filter for.
278
    :param args:      Positional arguments to pass to the callable.
279
    :param kwargs:    Keyword arguments to pass to the callable.
280
    """
281
    for elem in it:
282
        try:
283
            yield elem(*args, **kwargs)
284
        except exception:
285
            pass
286
287
288
def instantiate_bears(section,
289
                      local_bear_list,
290
                      global_bear_list,
291
                      file_dict,
292
                      message_queue):
293
    """
294
    Instantiates each bear with the arguments it needs.
295
296
    :param section:          The section the bears belong to.
297
    :param local_bear_list:  List of local bear classes to instantiate.
298
    :param global_bear_list: List of global bear classes to instantiate.
299
    :param file_dict:        Dictionary containing filenames and their
300
                             contents.
301
    :param message_queue:    Queue responsible to maintain the messages
302
                             delivered by the bears.
303
    :return:                 The local and global bear instance lists.
304
    """
305
    local_bear_list = [bear
306
                       for bear in filter_raising_callables(
307
                           local_bear_list,
308
                           RuntimeError,
309
                           section,
310
                           message_queue,
311
                           timeout=0.1)]
312
313
    global_bear_list = [bear
314
                        for bear in filter_raising_callables(
315
                            global_bear_list,
316
                            RuntimeError,
317
                            file_dict,
318
                            section,
319
                            message_queue,
320
                            timeout=0.1)]
321
322
    return local_bear_list, global_bear_list
323
324
325
def instantiate_processes(section,
326
                          local_bear_list,
327
                          global_bear_list,
328
                          job_count,
329
                          cache,
330
                          log_printer):
331
    """
332
    Instantiate the number of processes that will run bears which will be
333
    responsible for running bears in a multiprocessing environment.
334
335
    :param section:          The section the bears belong to.
336
    :param local_bear_list:  List of local bears belonging to the section.
337
    :param global_bear_list: List of global bears belonging to the section.
338
    :param job_count:        Max number of processes to create.
339
    :param cache:            An instance of ``misc.Caching.FileCache`` to use as
340
                             a file cache buffer.
341
    :param log_printer:      The log printer to warn to.
342
    :return:                 A tuple containing a list of processes,
343
                             and the arguments passed to each process which are
344
                             the same for each object.
345
    """
346
    filename_list = collect_files(
347
        glob_list(section.get('files', "")),
348
        log_printer,
349
        ignored_file_paths=glob_list(section.get('ignore', "")),
350
        limit_file_paths=glob_list(section.get('limit_files', "")))
351
352
    # This stores all matched files irrespective of whether coala is run
353
    # only on changed files or not. Global bears require all the files
354
    complete_filename_list = filename_list
355
356
    # Start tracking all the files
357
    if cache and section.get('changed_files', False):
358
        cache.track_files(set(complete_filename_list))
359
        changed_files = cache.get_uncached_files(
360
            set(filename_list)) if cache else filename_list
361
362
        # If caching is enabled then the local bears should process only the
363
        # changed files.
364
        # FIXME: Log this to the debug channel instead.
365
        log_printer.info("coala is run only on changed files, bears' log "
366
                         "messages from previous runs may not appear. You may "
367
                         "use the `--flush-cache` flag to see them.")
368
        filename_list = changed_files
369
370
    # Note: the complete file dict is given as the file dict to bears and
371
    # the whole project is accessible to every bear. However, local bears are
372
    # run only for the changed files if caching is enabled.
373
    file_dict = get_file_dict(filename_list, log_printer)
374
    complete_file_dict = get_file_dict(complete_filename_list, log_printer)
375
376
    manager = multiprocessing.Manager()
377
    global_bear_queue = multiprocessing.Queue()
378
    filename_queue = multiprocessing.Queue()
379
    local_result_dict = manager.dict()
380
    global_result_dict = manager.dict()
381
    message_queue = multiprocessing.Queue()
382
    control_queue = multiprocessing.Queue()
383
384
    bear_runner_args = {"file_name_queue": filename_queue,
385
                        "local_bear_list": local_bear_list,
386
                        "global_bear_list": global_bear_list,
387
                        "global_bear_queue": global_bear_queue,
388
                        "file_dict": file_dict,
389
                        "local_result_dict": local_result_dict,
390
                        "global_result_dict": global_result_dict,
391
                        "message_queue": message_queue,
392
                        "control_queue": control_queue,
393
                        "timeout": 0.1}
394
395
    local_bear_list[:], global_bear_list[:] = instantiate_bears(
396
        section,
397
        local_bear_list,
398
        global_bear_list,
399
        complete_file_dict,
400
        message_queue)
401
402
    fill_queue(filename_queue, file_dict.keys())
403
    fill_queue(global_bear_queue, range(len(global_bear_list)))
404
405
    return ([multiprocessing.Process(target=run, kwargs=bear_runner_args)
406
             for i in range(job_count)],
407
            bear_runner_args)
408
409
410
def get_ignore_scope(line, keyword):
411
    """
412
    Retrieves the bears that are to be ignored defined in the given line.
413
414
    :param line:    The line containing the ignore declaration.
415
    :param keyword: The keyword that was found. Everything after the rightmost
416
                    occurrence of it will be considered for the scope.
417
    :return:        A list of lower cased bearnames or an empty list (-> "all")
418
    """
419
    toignore = line[line.rfind(keyword) + len(keyword):]
420
    if toignore.startswith("all"):
421
        return []
422
    else:
423
        return list(StringConverter(toignore, list_delimiters=', '))
424
425
426
def yield_ignore_ranges(file_dict):
427
    """
428
    Yields tuples of affected bears and a SourceRange that shall be ignored for
429
    those.
430
431
    :param file_dict: The file dictionary.
432
    """
433
    for filename, file in file_dict.items():
434
        start = None
435
        bears = []
436
        stop_ignoring = False
437
        for line_number, line in enumerate(file, start=1):
438
            # Before lowering all lines ever read, first look for the biggest
439
            # common substring, case sensitive: I*gnor*e, start i*gnor*ing.
440
            if 'gnor' in line:
441
                line = line.lower()
442
                if "start ignoring " in line:
443
                    start = line_number
444
                    bears = get_ignore_scope(line, "start ignoring ")
445
                elif "stop ignoring" in line:
446
                    stop_ignoring = True
447
                    if start:
448
                        yield (bears,
449
                               SourceRange.from_values(
450
                                   filename,
451
                                   start,
452
                                   1,
453
                                   line_number,
454
                                   len(file[line_number-1])))
455
                elif "ignore " in line:
456
                    yield (get_ignore_scope(line, "ignore "),
457
                           SourceRange.from_values(filename,
458
                                                   line_number,
459
                                                   1,
460
                                                   line_number+1,
461
                                                   len(file[line_number])))
462
        if stop_ignoring is False and start is not None:
463
            yield (bears,
464
                   SourceRange.from_values(filename,
465
                                           start,
466
                                           1,
467
                                           len(file),
468
                                           len(file[-1])))
469
470
471
def get_file_list(results):
472
    """
473
    Get the set of files that are affected in the given results.
474
475
    :param results: A list of results from which the list of files is to be
476
                    extracted.
477
    :return:        A set of file paths containing the mentioned list of
478
                    files.
479
    """
480
    return {code.file for result in results for code in result.affected_code}
481
482
483
def process_queues(processes,
484
                   control_queue,
485
                   local_result_dict,
486
                   global_result_dict,
487
                   file_dict,
488
                   print_results,
489
                   section,
490
                   cache,
491
                   log_printer):
492
    """
493
    Iterate the control queue and send the results recieved to the print_result
494
    method so that they can be presented to the user.
495
496
    :param processes:          List of processes which can be used to run
497
                               Bears.
498
    :param control_queue:      Containing control elements that indicate
499
                               whether there is a result available and which
500
                               bear it belongs to.
501
    :param local_result_dict:  Dictionary containing results respective to
502
                               local bears. It is modified by the processes
503
                               i.e. results are added to it by multiple
504
                               processes.
505
    :param global_result_dict: Dictionary containing results respective to
506
                               global bears. It is modified by the processes
507
                               i.e. results are added to it by multiple
508
                               processes.
509
    :param file_dict:          Dictionary containing file contents with
510
                               filename as keys.
511
    :param print_results:      Prints all given results appropriate to the
512
                               output medium.
513
    :param cache:              An instance of ``misc.Caching.FileCache`` to use
514
                               as a file cache buffer.
515
    :return:                   Return True if all bears execute succesfully and
516
                               Results were delivered to the user. Else False.
517
    """
518
    file_diff_dict = {}
519
    retval = False
520
    # Number of processes working on local/global bears. They are count down
521
    # when the last queue element of that process is processed which may be
522
    # *after* the process has ended!
523
    local_processes = len(processes)
524
    global_processes = len(processes)
525
    global_result_buffer = []
526
    result_files = set()
527
    ignore_ranges = list(yield_ignore_ranges(file_dict))
528
529
    # One process is the logger thread
530
    while local_processes > 1:
531
        try:
532
            control_elem, index = control_queue.get(timeout=0.1)
533
534
            if control_elem == CONTROL_ELEMENT.LOCAL_FINISHED:
535
                local_processes -= 1
536
            elif control_elem == CONTROL_ELEMENT.GLOBAL_FINISHED:
537
                global_processes -= 1
538
            elif control_elem == CONTROL_ELEMENT.LOCAL:
539
                assert local_processes != 0
540
                result_files.update(get_file_list(local_result_dict[index]))
541
                retval, res = print_result(local_result_dict[index],
542
                                           file_dict,
543
                                           retval,
544
                                           print_results,
545
                                           section,
546
                                           log_printer,
547
                                           file_diff_dict,
548
                                           ignore_ranges)
549
                local_result_dict[index] = res
550
            else:
551
                assert control_elem == CONTROL_ELEMENT.GLOBAL
552
                global_result_buffer.append(index)
553
        except queue.Empty:
554
            if get_running_processes(processes) < 2:  # pragma: no cover
555
                # Recover silently, those branches are only
556
                # nondeterministically covered.
557
                break
558
559
    # Flush global result buffer
560
    for elem in global_result_buffer:
561
        result_files.update(get_file_list(global_result_dict[elem]))
562
        retval, res = print_result(global_result_dict[elem],
563
                                   file_dict,
564
                                   retval,
565
                                   print_results,
566
                                   section,
567
                                   log_printer,
568
                                   file_diff_dict,
569
                                   ignore_ranges)
570
        global_result_dict[elem] = res
571
572
    # One process is the logger thread
573
    while global_processes > 1:
574
        try:
575
            control_elem, index = control_queue.get(timeout=0.1)
576
577
            if control_elem == CONTROL_ELEMENT.GLOBAL:
578
                result_files.update(get_file_list(global_result_dict[index]))
579
                retval, res = print_result(global_result_dict[index],
580
                                           file_dict,
581
                                           retval,
582
                                           print_results,
583
                                           section,
584
                                           log_printer,
585
                                           file_diff_dict,
586
                                           ignore_ranges)
587
                global_result_dict[index] = res
588
            else:
589
                assert control_elem == CONTROL_ELEMENT.GLOBAL_FINISHED
590
                global_processes -= 1
591
        except queue.Empty:
592
            if get_running_processes(processes) < 2:  # pragma: no cover
593
                # Recover silently, those branches are only
594
                # nondeterministically covered.
595
                break
596
597
    if cache:
598
        cache.untrack_files(result_files)
599
    return retval
600
601
602
def simplify_section_result(section_result):
603
    """
604
    Takes in a section's result from ``execute_section`` and simplifies it
605
    for easy usage in other functions.
606
607
    :param section_result: The result of a section which was executed.
608
    :return:               Tuple containing:
609
                            - bool - True if results were yielded
610
                            - bool - True if unfixed results were yielded
611
                            - list - Results from all bears (local and global)
612
    """
613
    section_yielded_result = section_result[0]
614
    results_for_section = []
615
    for value in chain(section_result[1].values(),
616
                       section_result[2].values()):
617
        if value is None:
618
            continue
619
620
        for result in value:
621
            results_for_section.append(result)
622
    section_yielded_unfixed_results = len(results_for_section) > 0
623
624
    return (section_yielded_result,
625
            section_yielded_unfixed_results,
626
            results_for_section)
627
628
629
def execute_section(section,
630
                    global_bear_list,
631
                    local_bear_list,
632
                    print_results,
633
                    cache,
634
                    log_printer):
635
    """
636
    Executes the section with the given bears.
637
638
    The execute_section method does the following things:
639
640
    1. Prepare a Process
641
       -  Load files
642
       -  Create queues
643
    2. Spawn up one or more Processes
644
    3. Output results from the Processes
645
    4. Join all processes
646
647
    :param section:          The section to execute.
648
    :param global_bear_list: List of global bears belonging to the section.
649
    :param local_bear_list:  List of local bears belonging to the section.
650
    :param print_results:    Prints all given results appropriate to the
651
                             output medium.
652
    :param cache:            An instance of ``misc.Caching.FileCache`` to use as
653
                             a file cache buffer.
654
    :param log_printer:      The log_printer to warn to.
655
    :return:                 Tuple containing a bool (True if results were
656
                             yielded, False otherwise), a Manager.dict
657
                             containing all local results(filenames are key)
658
                             and a Manager.dict containing all global bear
659
                             results (bear names are key) as well as the
660
                             file dictionary.
661
    """
662
    local_bear_list = Dependencies.resolve(local_bear_list)
663
    global_bear_list = Dependencies.resolve(global_bear_list)
664
665
    try:
666
        running_processes = int(section['jobs'])
667
    except ValueError:
668
        log_printer.warn("Unable to convert setting 'jobs' into a number. "
669
                         "Falling back to CPU count.")
670
        running_processes = get_cpu_count()
671
    except IndexError:
672
        running_processes = get_cpu_count()
673
674
    processes, arg_dict = instantiate_processes(section,
675
                                                local_bear_list,
676
                                                global_bear_list,
677
                                                running_processes,
678
                                                cache,
679
                                                log_printer)
680
681
    logger_thread = LogPrinterThread(arg_dict["message_queue"],
682
                                     log_printer)
683
    # Start and join the logger thread along with the processes to run bears
684
    processes.append(logger_thread)
685
686
    for runner in processes:
687
        runner.start()
688
689
    try:
690
        return (process_queues(processes,
691
                               arg_dict["control_queue"],
692
                               arg_dict["local_result_dict"],
693
                               arg_dict["global_result_dict"],
694
                               arg_dict["file_dict"],
695
                               print_results,
696
                               section,
697
                               cache,
698
                               log_printer),
699
                arg_dict["local_result_dict"],
700
                arg_dict["global_result_dict"],
701
                arg_dict["file_dict"])
702
    finally:
703
        logger_thread.running = False
704
705
        for runner in processes:
706
            runner.join()
707