@@ 90-134 (lines=45) @@ | ||
87 | # Return transformation matrix and principal component coefficients |
|
88 | return V, CX, CZ |
|
89 | ||
90 | def fit(self, X, y, Z): |
|
91 | """ |
|
92 | Fit/train a classifier on data mapped onto transfer components. |
|
93 | ||
94 | INPUT (1) array 'X': source data (N samples by D features) |
|
95 | (2) array 'y': source labels (N samples by 1) |
|
96 | (3) array 'Z': target data (M samples by D features) |
|
97 | OUTPUT None |
|
98 | """ |
|
99 | # Data shapes |
|
100 | N, DX = X.shape |
|
101 | M, DZ = Z.shape |
|
102 | ||
103 | # Assert equivalent dimensionalities |
|
104 | assert DX == DZ |
|
105 | ||
106 | # Transfer component analysis (store target subspace) |
|
107 | V, CX, self.CZ = self.subspace_alignment(X, Z, num_components=self. |
|
108 | num_components) |
|
109 | ||
110 | # Map source data onto source principal components |
|
111 | X = np.dot(X, CX) |
|
112 | ||
113 | # Align source data to target subspace |
|
114 | X = np.dot(X, V) |
|
115 | ||
116 | # Train a weighted classifier |
|
117 | if self.loss == 'logistic': |
|
118 | # Logistic regression model with sample weights |
|
119 | self.clf.fit(X, y) |
|
120 | elif self.loss == 'quadratic': |
|
121 | # Least-squares model with sample weights |
|
122 | self.clf.fit(X, y) |
|
123 | elif self.loss == 'hinge': |
|
124 | # Linear support vector machine with sample weights |
|
125 | self.clf.fit(X, y) |
|
126 | else: |
|
127 | # Other loss functions are not implemented |
|
128 | raise NotImplementedError |
|
129 | ||
130 | # Mark classifier as trained |
|
131 | self.is_trained = True |
|
132 | ||
133 | # Store training data dimensionality |
|
134 | self.train_data_dim = DX |
|
135 | ||
136 | def predict(self, Z_, whiten=False): |
|
137 | """ |
@@ 172-209 (lines=38) @@ | ||
169 | return np.sum(2*np.clip(1-Xyt[ix], 0, None).T * -Xy[ix, :].T, |
|
170 | axis=1).T + np.sum(-4*Xy[~ix, :], axis=0) + 2*l2*theta |
|
171 | ||
172 | def fit(self, X, y, Z): |
|
173 | """ |
|
174 | Fit/train an structural correpondence classifier. |
|
175 | ||
176 | INPUT (1) array 'X': source data (N samples by D features) |
|
177 | (2) array 'y': source labels (N samples by 1) |
|
178 | (3) array 'Z': target data (M samples by D features) |
|
179 | OUTPUT None |
|
180 | """ |
|
181 | # Data shapes |
|
182 | N, DX = X.shape |
|
183 | M, DZ = Z.shape |
|
184 | ||
185 | # Assert equivalent dimensionalities |
|
186 | assert DX == DZ |
|
187 | ||
188 | # Augment features |
|
189 | X, _, self.C = self.augment_features(X, Z) |
|
190 | ||
191 | # Train a classifier |
|
192 | if self.loss == 'logistic': |
|
193 | # Logistic regression model |
|
194 | self.clf.fit(X, y) |
|
195 | elif self.loss == 'quadratic': |
|
196 | # Least-squares model |
|
197 | self.clf.fit(X, y) |
|
198 | elif self.loss == 'hinge': |
|
199 | # Linear support vector machine |
|
200 | self.clf.fit(X, y) |
|
201 | else: |
|
202 | # Other loss functions are not implemented |
|
203 | raise NotImplementedError |
|
204 | ||
205 | # Mark classifier as trained |
|
206 | self.is_trained = True |
|
207 | ||
208 | # Store training data dimensionality |
|
209 | self.train_data_dim = DX + self.num_components |
|
210 | ||
211 | def predict(self, Z_): |
|
212 | """ |