| @@ 177-218 (lines=42) @@ | ||
| 174 | # Store training data dimensionality |
|
| 175 | self.train_data_dim = DX |
|
| 176 | ||
| 177 | def predict(self, Z, whiten=False): |
|
| 178 | """ |
|
| 179 | Make predictions on new dataset. |
|
| 180 | ||
| 181 | Parameters |
|
| 182 | ---------- |
|
| 183 | Z : array |
|
| 184 | new data set (M samples by D features) |
|
| 185 | whiten : boolean |
|
| 186 | whether to whiten new data (def: false) |
|
| 187 | ||
| 188 | Returns |
|
| 189 | ------- |
|
| 190 | preds : array |
|
| 191 | label predictions (M samples by 1) |
|
| 192 | ||
| 193 | """ |
|
| 194 | # Data shape |
|
| 195 | M, D = Z.shape |
|
| 196 | ||
| 197 | # If classifier is trained, check for same dimensionality |
|
| 198 | if self.is_trained: |
|
| 199 | if not self.train_data_dim == D: |
|
| 200 | raise ValueError('''Test data is of different dimensionality
|
|
| 201 | than training data.''') |
|
| 202 | ||
| 203 | # Check for need to whiten data beforehand |
|
| 204 | if whiten: |
|
| 205 | Z = st.zscore(Z) |
|
| 206 | ||
| 207 | # Map new target data onto target subspace |
|
| 208 | Z = np.dot(Z, self.target_subspace) |
|
| 209 | ||
| 210 | # Call scikit's predict function |
|
| 211 | preds = self.clf.predict(Z) |
|
| 212 | ||
| 213 | # For quadratic loss function, correct predictions |
|
| 214 | if self.loss == 'quadratic': |
|
| 215 | preds = (np.sign(preds)+1)/2. |
|
| 216 | ||
| 217 | # Return predictions array |
|
| 218 | return preds |
|
| 219 | ||
| 220 | def get_params(self): |
|
| 221 | """Get classifier parameters.""" |
|
| @@ 269-305 (lines=37) @@ | ||
| 266 | # Store training data dimensionality |
|
| 267 | self.train_data_dim = DX + self.num_components |
|
| 268 | ||
| 269 | def predict(self, Z): |
|
| 270 | """ |
|
| 271 | Make predictions on new dataset. |
|
| 272 | ||
| 273 | Parameters |
|
| 274 | ---------- |
|
| 275 | Z : array |
|
| 276 | new data set (M samples by D features) |
|
| 277 | ||
| 278 | Returns |
|
| 279 | ------- |
|
| 280 | preds : array |
|
| 281 | label predictions (M samples by 1) |
|
| 282 | ||
| 283 | """ |
|
| 284 | # Data shape |
|
| 285 | M, D = Z.shape |
|
| 286 | ||
| 287 | # If classifier is trained, check for same dimensionality |
|
| 288 | if self.is_trained: |
|
| 289 | if not self.train_data_dim == D: |
|
| 290 | raise ValueError('''Test data is of different dimensionality
|
|
| 291 | than training data.''') |
|
| 292 | ||
| 293 | # Check for augmentation |
|
| 294 | if not self.train_data_dim == D: |
|
| 295 | Z = np.concatenate((np.dot(Z, self.C), Z), axis=1) |
|
| 296 | ||
| 297 | # Call scikit's predict function |
|
| 298 | preds = self.clf.predict(Z) |
|
| 299 | ||
| 300 | # For quadratic loss function, correct predictions |
|
| 301 | if self.loss == 'quadratic': |
|
| 302 | preds = (np.sign(preds)+1)/2. |
|
| 303 | ||
| 304 | # Return predictions array |
|
| 305 | return preds |
|
| 306 | ||
| 307 | def get_params(self): |
|
| 308 | """Get classifier parameters.""" |
|