Code Duplication    Length = 42-45 lines in 2 locations

gradient_free_optimizers/optimizers/smb_opt/forest_optimizer.py 1 location

@@ 35-79 (lines=45) @@
32
        return ((num / den) + 0) / 1
33
34
35
class ForestOptimizer(SMBO):
36
    name = "Forest Optimization"
37
    _name_ = "forest_optimization"
38
    """Based on the forest-optimizer in the scikit-optimize package"""
39
40
    def __init__(
41
        self,
42
        *args,
43
        tree_regressor="extra_tree",
44
        tree_para={"n_estimators": 100},
45
        xi=0.03,
46
        warm_start_smbo=None,
47
        max_sample_size=10000000,
48
        sampling={"random": 1000000},
49
        warnings=100000000,
50
        **kwargs
51
    ):
52
        super().__init__(*args, **kwargs)
53
        self.tree_regressor = tree_regressor
54
        self.tree_para = tree_para
55
        self.regr = tree_regressor_dict[tree_regressor](**self.tree_para)
56
        self.xi = xi
57
        self.warm_start_smbo = warm_start_smbo
58
        self.max_sample_size = max_sample_size
59
        self.sampling = sampling
60
        self.warnings = warnings
61
62
        self.init_warm_start_smbo()
63
64
    def _expected_improvement(self):
65
        all_pos_comb = self._all_possible_pos()
66
        self.pos_comb = self._sampling(all_pos_comb)
67
68
        acqu_func = ExpectedImprovement(self.regr, self.pos_comb, self.xi)
69
        return acqu_func.calculate(self.X_sample, self.Y_sample)
70
71
    def _training(self):
72
        X_sample = np.array(self.X_sample)
73
        Y_sample = np.array(self.Y_sample)
74
75
        if len(Y_sample) == 0:
76
            return self.move_random()
77
78
        Y_sample = normalize(Y_sample).reshape(-1, 1)
79
        self.regr.fit(X_sample, Y_sample)
80

gradient_free_optimizers/optimizers/smb_opt/bayesian_optimization.py 1 location

@@ 30-71 (lines=42) @@
27
        return ((num / den) + 0) / 1
28
29
30
class BayesianOptimizer(SMBO):
31
    name = "Bayesian Optimization"
32
    _name_ = "bayesian_optimization"
33
34
    def __init__(
35
        self,
36
        *args,
37
        gpr=gaussian_process["gp_nonlinear"],
38
        xi=0.03,
39
        warm_start_smbo=None,
40
        max_sample_size=10000000,
41
        sampling={"random": 1000000},
42
        warnings=100000000,
43
        **kwargs
44
    ):
45
        super().__init__(*args, **kwargs)
46
        self.gpr = gpr
47
        self.regr = gpr
48
        self.xi = xi
49
        self.warm_start_smbo = warm_start_smbo
50
        self.max_sample_size = max_sample_size
51
        self.sampling = sampling
52
        self.warnings = warnings
53
54
        self.init_warm_start_smbo()
55
56
    def _expected_improvement(self):
57
        all_pos_comb = self._all_possible_pos()
58
        self.pos_comb = self._sampling(all_pos_comb)
59
60
        acqu_func = ExpectedImprovement(self.gpr, self.pos_comb, self.xi)
61
        return acqu_func.calculate(self.X_sample, self.Y_sample)
62
63
    def _training(self):
64
        X_sample = np.array(self.X_sample)
65
        Y_sample = np.array(self.Y_sample)
66
67
        if len(Y_sample) == 0:
68
            return self.move_random()
69
70
        Y_sample = normalize(Y_sample).reshape(-1, 1)
71
        self.regr.fit(X_sample, Y_sample)
72