| @@ 91-135 (lines=45) @@ | ||
| 88 | # Return transformation matrix and principal component coefficients |
|
| 89 | return V, CX, CZ |
|
| 90 | ||
| 91 | def fit(self, X, y, Z): |
|
| 92 | """ |
|
| 93 | Fit/train a classifier on data mapped onto transfer components. |
|
| 94 | ||
| 95 | INPUT (1) array 'X': source data (N samples by D features) |
|
| 96 | (2) array 'y': source labels (N samples by 1) |
|
| 97 | (3) array 'Z': target data (M samples by D features) |
|
| 98 | OUTPUT None |
|
| 99 | """ |
|
| 100 | # Data shapes |
|
| 101 | N, DX = X.shape |
|
| 102 | M, DZ = Z.shape |
|
| 103 | ||
| 104 | # Assert equivalent dimensionalities |
|
| 105 | if not DX == DZ: raise ValueError('Dimensionalities of X and Z should be equal.')
|
|
| 106 | ||
| 107 | # Transfer component analysis (store target subspace) |
|
| 108 | V, CX, self.CZ = self.subspace_alignment(X, Z, num_components=self. |
|
| 109 | num_components) |
|
| 110 | ||
| 111 | # Map source data onto source principal components |
|
| 112 | X = np.dot(X, CX) |
|
| 113 | ||
| 114 | # Align source data to target subspace |
|
| 115 | X = np.dot(X, V) |
|
| 116 | ||
| 117 | # Train a weighted classifier |
|
| 118 | if self.loss == 'logistic': |
|
| 119 | # Logistic regression model with sample weights |
|
| 120 | self.clf.fit(X, y) |
|
| 121 | elif self.loss == 'quadratic': |
|
| 122 | # Least-squares model with sample weights |
|
| 123 | self.clf.fit(X, y) |
|
| 124 | elif self.loss == 'hinge': |
|
| 125 | # Linear support vector machine with sample weights |
|
| 126 | self.clf.fit(X, y) |
|
| 127 | else: |
|
| 128 | # Other loss functions are not implemented |
|
| 129 | raise NotImplementedError |
|
| 130 | ||
| 131 | # Mark classifier as trained |
|
| 132 | self.is_trained = True |
|
| 133 | ||
| 134 | # Store training data dimensionality |
|
| 135 | self.train_data_dim = DX |
|
| 136 | ||
| 137 | def predict(self, Z_, whiten=False): |
|
| 138 | """ |
|
| @@ 178-215 (lines=38) @@ | ||
| 175 | return np.sum(2*np.clip(1-Xyt[ix], 0, None).T * -Xy[ix, :].T, |
|
| 176 | axis=1).T + np.sum(-4*Xy[~ix, :], axis=0) + 2*l2*theta |
|
| 177 | ||
| 178 | def fit(self, X, y, Z): |
|
| 179 | """ |
|
| 180 | Fit/train an structural correpondence classifier. |
|
| 181 | ||
| 182 | INPUT (1) array 'X': source data (N samples by D features) |
|
| 183 | (2) array 'y': source labels (N samples by 1) |
|
| 184 | (3) array 'Z': target data (M samples by D features) |
|
| 185 | OUTPUT None |
|
| 186 | """ |
|
| 187 | # Data shapes |
|
| 188 | N, DX = X.shape |
|
| 189 | M, DZ = Z.shape |
|
| 190 | ||
| 191 | # Assert equivalent dimensionalities |
|
| 192 | assert DX == DZ |
|
| 193 | ||
| 194 | # Augment features |
|
| 195 | X, _, self.C = self.augment_features(X, Z, l2=self.l2) |
|
| 196 | ||
| 197 | # Train a classifier |
|
| 198 | if self.loss == 'logistic': |
|
| 199 | # Logistic regression model |
|
| 200 | self.clf.fit(X, y) |
|
| 201 | elif self.loss == 'quadratic': |
|
| 202 | # Least-squares model |
|
| 203 | self.clf.fit(X, y) |
|
| 204 | elif self.loss == 'hinge': |
|
| 205 | # Linear support vector machine |
|
| 206 | self.clf.fit(X, y) |
|
| 207 | else: |
|
| 208 | # Other loss functions are not implemented |
|
| 209 | raise NotImplementedError |
|
| 210 | ||
| 211 | # Mark classifier as trained |
|
| 212 | self.is_trained = True |
|
| 213 | ||
| 214 | # Store training data dimensionality |
|
| 215 | self.train_data_dim = DX + self.num_components |
|
| 216 | ||
| 217 | def predict(self, Z_): |
|
| 218 | """ |
|