1
|
|
|
import sklearn.linear_model as skl_linear_model |
|
|
|
|
2
|
|
|
import sklearn.pipeline as skl_pipeline |
|
|
|
|
3
|
|
|
import sklearn.preprocessing as skl_preprocessing |
|
|
|
|
4
|
|
|
|
5
|
|
|
from Orange.regression import Learner, Model, SklLearner |
6
|
|
|
|
7
|
|
|
|
8
|
|
|
__all__ = ["LinearRegressionLearner", "RidgeRegressionLearner", |
9
|
|
|
"LassoRegressionLearner", "SGDRegressionLearner", |
10
|
|
|
"ElasticNetLearner", "ElasticNetCVLearner", |
11
|
|
|
"PolynomialLearner"] |
12
|
|
|
|
13
|
|
|
|
14
|
|
|
class LinearRegressionLearner(SklLearner): |
15
|
|
|
__wraps__ = skl_linear_model.LinearRegression |
16
|
|
|
name = 'linreg' |
17
|
|
|
|
18
|
|
|
def __init__(self, preprocessors=None): |
19
|
|
|
super().__init__(preprocessors=preprocessors) |
20
|
|
|
|
|
|
|
|
21
|
|
|
def fit(self, X, Y, W): |
22
|
|
|
sk = skl_linear_model.LinearRegression() |
23
|
|
|
sk.fit(X, Y) |
24
|
|
|
return LinearModel(sk) |
25
|
|
|
|
26
|
|
|
|
27
|
|
|
class RidgeRegressionLearner(SklLearner): |
28
|
|
|
__wraps__ = skl_linear_model.Ridge |
29
|
|
|
name = 'ridge' |
30
|
|
|
|
31
|
|
|
def __init__(self, alpha=1.0, fit_intercept=True, |
|
|
|
|
32
|
|
|
normalize=False, copy_X=True, max_iter=None, |
|
|
|
|
33
|
|
|
tol=0.001, solver='auto', preprocessors=None): |
|
|
|
|
34
|
|
|
super().__init__(preprocessors=preprocessors) |
35
|
|
|
self.params = vars() |
36
|
|
|
|
37
|
|
|
|
38
|
|
|
class LassoRegressionLearner(SklLearner): |
39
|
|
|
__wraps__ = skl_linear_model.Lasso |
40
|
|
|
name = 'lasso' |
41
|
|
|
|
42
|
|
|
def __init__(self, alpha=1.0, fit_intercept=True, normalize=False, |
|
|
|
|
43
|
|
|
precompute=False, copy_X=True, max_iter=1000, |
|
|
|
|
44
|
|
|
tol=0.0001, warm_start=False, positive=False, |
|
|
|
|
45
|
|
|
preprocessors=None): |
46
|
|
|
super().__init__(preprocessors=preprocessors) |
47
|
|
|
self.params = vars() |
48
|
|
|
|
49
|
|
|
|
50
|
|
|
class ElasticNetLearner(SklLearner): |
51
|
|
|
__wraps__ = skl_linear_model.ElasticNet |
52
|
|
|
name = 'elastic' |
53
|
|
|
|
54
|
|
|
def __init__(self, alpha=1.0, l1_ratio=0.5, fit_intercept=True, |
|
|
|
|
55
|
|
|
normalize=False, precompute=False, max_iter=1000, |
|
|
|
|
56
|
|
|
copy_X=True, tol=0.0001, warm_start=False, positive=False, |
|
|
|
|
57
|
|
|
preprocessors=None): |
58
|
|
|
super().__init__(preprocessors=preprocessors) |
59
|
|
|
self.params = vars() |
60
|
|
|
|
61
|
|
|
|
62
|
|
|
class ElasticNetCVLearner(SklLearner): |
63
|
|
|
__wraps__ = skl_linear_model.ElasticNetCV |
64
|
|
|
name = 'elasticCV' |
65
|
|
|
|
66
|
|
|
def __init__(self, l1_ratio=0.5, eps=0.001, n_alphas=100, alphas=None, |
|
|
|
|
67
|
|
|
fit_intercept=True, normalize=False, precompute='auto', |
|
|
|
|
68
|
|
|
max_iter=1000, tol=0.0001, cv=None, copy_X=True, |
|
|
|
|
69
|
|
|
verbose=0, n_jobs=1, positive=False, preprocessors=None): |
|
|
|
|
70
|
|
|
super().__init__(preprocessors=preprocessors) |
71
|
|
|
self.params = vars() |
72
|
|
|
|
73
|
|
|
|
74
|
|
|
class SGDRegressionLearner(SklLearner): |
75
|
|
|
__wraps__ = skl_linear_model.SGDRegressor |
76
|
|
|
name = 'sgd' |
77
|
|
|
|
78
|
|
|
def __init__(self, loss='squared_loss', alpha=0.0001, epsilon=0.1, |
|
|
|
|
79
|
|
|
eta0=0.01, l1_ratio=0.15, penalty='l2', power_t=0.25, |
|
|
|
|
80
|
|
|
learning_rate='invscaling', n_iter=5, fit_intercept=True, |
|
|
|
|
81
|
|
|
preprocessors=None): |
82
|
|
|
super().__init__(preprocessors=preprocessors) |
83
|
|
|
self.params = vars() |
84
|
|
|
|
85
|
|
|
def fit(self, X, Y, W): |
86
|
|
|
sk = self.__wraps__(**self.params) |
87
|
|
|
clf = skl_pipeline.Pipeline( |
88
|
|
|
[('scaler', skl_preprocessing.StandardScaler()), ('sgd', sk)]) |
89
|
|
|
clf.fit(X, Y.ravel()) |
90
|
|
|
return LinearModel(clf) |
91
|
|
|
|
92
|
|
|
|
93
|
|
|
class PolynomialLearner(Learner): |
94
|
|
|
name = 'poly learner' |
95
|
|
|
|
96
|
|
|
def __init__(self, learner, degree=1, preprocessors=None): |
97
|
|
|
super().__init__(preprocessors=preprocessors) |
98
|
|
|
self.degree = degree |
99
|
|
|
self.learner = learner |
100
|
|
|
|
|
|
|
|
101
|
|
|
def fit(self, X, Y, W): |
|
|
|
|
102
|
|
|
polyfeatures = skl_preprocessing.PolynomialFeatures(self.degree) |
103
|
|
|
X = polyfeatures.fit_transform(X) |
104
|
|
|
clf = self.learner |
105
|
|
|
if W is None or not self.supports_weights: |
106
|
|
|
model = clf.fit(X, Y, None) |
107
|
|
|
else: |
108
|
|
|
model = clf.fit(X, Y, sample_weight=W.reshape(-1)) |
109
|
|
|
return PolynomialModel(model, polyfeatures) |
110
|
|
|
|
111
|
|
|
|
112
|
|
|
class LinearModel(Model): |
113
|
|
|
supports_multiclass = True |
114
|
|
|
|
115
|
|
|
def __init__(self, model): |
|
|
|
|
116
|
|
|
self.model = model |
117
|
|
|
|
118
|
|
|
def predict(self, X): |
119
|
|
|
vals = self.model.predict(X) |
120
|
|
|
if len(vals.shape) == 1: |
121
|
|
|
# Prevent IndexError for 1D array |
122
|
|
|
return vals |
123
|
|
|
elif vals.shape[1] == 1: |
124
|
|
|
return vals.ravel() |
125
|
|
|
else: |
126
|
|
|
return vals |
127
|
|
|
|
128
|
|
|
def __str__(self): |
129
|
|
|
return 'LinearModel {}'.format(self.model) |
130
|
|
|
|
131
|
|
|
class PolynomialModel(Model): |
132
|
|
|
supports_multiclass = True |
133
|
|
|
|
134
|
|
|
def __init__(self, model, polyfeatures): |
|
|
|
|
135
|
|
|
self.model = model |
136
|
|
|
self.polyfeatures = polyfeatures |
137
|
|
|
|
138
|
|
|
def predict(self, X): |
139
|
|
|
X = self.polyfeatures.fit_transform(X) |
140
|
|
|
return self.model.predict(X) |
141
|
|
|
|
142
|
|
|
def __str__(self): |
143
|
|
|
return 'PolynomialModel {}'.format(self.model) |
144
|
|
|
|
This can be caused by one of the following:
1. Missing Dependencies
This error could indicate a configuration issue of Pylint. Make sure that your libraries are available by adding the necessary commands.
2. Missing __init__.py files
This error could also result from missing
__init__.py
files in your module folders. Make sure that you place one file in each sub-folder.