Completed
Push — master ( 1d68af...e525d3 )
by Simon
01:31
created

hyperactive.main_args.MainArgs.search_args()   A

Complexity

Conditions 3

Size

Total Lines 38
Code Lines 28

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 28
dl 0
loc 38
rs 9.208
c 0
b 0
f 0
cc 3
nop 8

How to fix   Many Parameters   

Many Parameters

Methods with many parameters are not only hard to understand, but their parameters also often become inconsistent when you need more, or different data.

There are several approaches to avoid long parameter lists:

1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
6
import random
7
import numpy as np
8
import multiprocessing
9
10
from .checks import check_hyperactive_para, check_search_para
11
12
13
def stop_warnings():
14
    # because sklearn warnings are annoying when they appear 100 times
15
    def warn(*args, **kwargs):
16
        pass
17
18
    import warnings
19
20
    warnings.warn = warn
21
22
23
class MainArgs:
24
    def __init__(self, X, y, memory, random_state, verbosity, warnings, ext_warnings):
25
        check_hyperactive_para(X, y, memory, random_state, verbosity)
26
27
        if not ext_warnings:
28
            stop_warnings()
29
30
        self._verb_ = None
31
        self.hyperactive_para = {
32
            "memory": memory,
33
            "random_state": random_state,
34
            "verbosity": verbosity,
35
        }
36
37
        self.X = X
38
        self.y = y
39
        self.verbosity = verbosity
40
        self.random_state = random_state
41
        self.memory = memory
42
43
        self.opt_para = dict()
44
45
    def search_args(
46
        self, search_config, max_time, n_iter, optimizer, n_jobs, scheduler, init_config
47
    ):
48
        check_search_para(
49
            search_config, max_time, n_iter, optimizer, n_jobs, scheduler, init_config
50
        )
51
52
        self.search_para = {
53
            "search_config": search_config,
54
            "max_time": max_time,
55
            "n_iter": n_iter,
56
            "optimizer": optimizer,
57
            "n_jobs": n_jobs,
58
            "scheduler": scheduler,
59
            "init_config": init_config,
60
        }
61
62
        self.search_config = search_config
63
        self.max_time = max_time
64
        self.n_iter = n_iter
65
        self.optimizer = optimizer
66
        self.n_jobs = n_jobs
67
        self.scheduler = scheduler
68
        self.init_config = init_config
69
70
        self.model_list = list(self.search_config.keys())
71
        self.n_models = len(self.model_list)
72
73
        if self.max_time:
74
            self.max_time = self.max_time * 3600
75
76
        self.set_n_jobs()
77
78
        self._n_process_range = range(0, int(self.n_jobs))
79
80
        if isinstance(optimizer, dict):
81
            self.optimizer = list(optimizer.keys())[0]
82
            self.opt_para = optimizer[self.optimizer]
83
84
    def _set_random_seed(self, thread=0):
85
        """Sets the random seed separately for each thread (to avoid getting the same results in each thread)"""
86
        if self.n_jobs > 1 and not self.random_state:
87
            rand = np.random.randint(0, high=2 ** 32 - 2)
88
            random.seed(rand + thread)
89
            np.random.seed(rand + thread)
90
91
        elif self.random_state:
92
            rand = int(self.random_state)
93
94
            random.seed(rand + thread)
95
            np.random.seed(rand + thread)
96
97
    def set_n_jobs(self):
98
        """Sets the number of jobs to run in parallel"""
99
        num_cores = multiprocessing.cpu_count()
100
        if self.n_jobs == -1 or self.n_jobs > num_cores:
101
            self.n_jobs = num_cores
102