Passed
Push — master ( 300275...433e71 )
by Simon
01:13
created

GPR_linear.predict()   A

Complexity

Conditions 1

Size

Total Lines 2
Code Lines 2

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 2
nop 3
dl 0
loc 2
rs 10
c 0
b 0
f 0
1
# Author: Simon Blanke
2
# Email: [email protected]
3
# License: MIT License
4
5
6
import numpy as np
7
8
from sklearn.linear_model import BayesianRidge
9
from sklearn.gaussian_process import GaussianProcessRegressor
10
from sklearn.gaussian_process.kernels import Matern, WhiteKernel
11
from sklearn.ensemble import ExtraTreesRegressor as _ExtraTreesRegressor_
12
from sklearn.ensemble import RandomForestRegressor as _RandomForestRegressor_
13
14
15
class EnsembleRegressor:
16
    def __init__(self, estimators):
17
        self.estimators = estimators
18
19
    def fit(self, X, y):
20
        for estimator in self.estimators:
21
            estimator.fit(X, y)
22
23
    def predict(self, X, return_std=False):
24
        predictions = []
25
        for estimator in self.estimators:
26
            predictions.append(estimator.predict(X))
27
28
        predictions = np.array(predictions).T
29
        mean = predictions.mean(axis=1)
30
        std = predictions.std(axis=1)
31
32
        if return_std:
33
34
            return mean, std
35
        return mean
36
37
38
def _return_std(X, trees, predictions, min_variance):
39
    std = np.zeros(len(X))
40
41
    for tree in trees:
42
        var_tree = tree.tree_.impurity[tree.apply(X)]
43
        var_tree[var_tree < min_variance] = min_variance
44
        mean_tree = tree.predict(X)
45
        std += var_tree + mean_tree ** 2
46
47
    std /= len(trees)
48
    std -= predictions ** 2.0
49
    std[std < 0.0] = 0.0
50
    std = std ** 0.5
51
    return std
52
53
54
class TreeEnsembleBase:
55
    def __init__(self, min_variance=0.0, **kwargs):
56
        self.min_variance = min_variance
57
        super().__init__(**kwargs)
58
59
    def fit(self, X, y):
60
        super().fit(X, np.ravel(y))
61
62
    def predict(self, X, return_std=False):
63
        mean = super().predict(X)
64
65
        if return_std:
66
            std = _return_std(X, self.estimators_, mean, self.min_variance)
67
68
            return mean, std
69
        return mean
70
71
72
class RandomForestRegressor(TreeEnsembleBase, _RandomForestRegressor_):
73
    def __init__(self, min_variance=0.0, **kwargs):
74
        super().__init__(**kwargs)
75
76
77
class ExtraTreesRegressor(TreeEnsembleBase, _ExtraTreesRegressor_):
78
    def __init__(self, min_variance=0.0, **kwargs):
79
        super().__init__(**kwargs)
80
81
82
class GPR:
83
    def __init__(self):
84
        self.gpr = GaussianProcessRegressor(
85
            kernel=Matern(nu=2.5) + WhiteKernel(), normalize_y=True
86
        )
87
88
    def fit(self, X, y):
89
        self.gpr.fit(X, y)
90
91
    def predict(self, X, return_std=False):
92
        return self.gpr.predict(X, return_std=return_std)
93
94
95
class GPR_linear:
96
    def __init__(self):
97
        self.gpr = BayesianRidge(n_iter=10, normalize=True)
98
99
    def fit(self, X, y):
100
        self.gpr.fit(X, y)
101
102
    def predict(self, X, return_std=False):
103
        return self.gpr.predict(X, return_std=return_std)
104