1
|
|
|
#!/usr/bin/env python |
2
|
|
|
# -*- coding: utf-8 -*- |
3
|
|
|
|
4
|
|
|
|
5
|
|
|
from network import NeuralNetwork |
6
|
|
|
from deepy.core import env |
7
|
|
|
import deepy.tensor as DT |
8
|
|
|
import theano.tensor as T |
9
|
|
|
|
10
|
|
|
class NeuralClassifier(NeuralNetwork): |
11
|
|
|
""" |
12
|
|
|
A class of defining stacked neural network for classifiers. |
13
|
|
|
""" |
14
|
|
|
|
15
|
|
|
def __init__(self, input_dim, input_tensor=None): |
16
|
|
|
super(NeuralClassifier, self).__init__(input_dim, input_tensor=input_tensor) |
17
|
|
|
|
18
|
|
|
def setup_variables(self): |
19
|
|
|
super(NeuralClassifier, self).setup_variables() |
20
|
|
|
|
21
|
|
|
self.k = T.ivector('k') |
22
|
|
|
self.target_variables.append(self.k) |
23
|
|
|
|
24
|
|
|
def _cost_func(self, y): |
25
|
|
|
y = T.clip(y, env.EPSILON, 1.0 - env.EPSILON) |
26
|
|
|
return DT.costs.cross_entropy(y, self.k) |
27
|
|
|
|
28
|
|
|
def _error_func(self, y): |
29
|
|
|
return 100 * T.mean(T.neq(T.argmax(y, axis=1), self.k)) |
30
|
|
|
|
31
|
|
|
@property |
32
|
|
|
def cost(self): |
33
|
|
|
return self._cost_func(self.output) |
34
|
|
|
|
35
|
|
|
@property |
36
|
|
|
def test_cost(self): |
37
|
|
|
return self._cost_func(self.test_output) |
38
|
|
|
|
39
|
|
|
def prepare_training(self): |
40
|
|
|
self.training_monitors.append(("err", self._error_func(self.output))) |
41
|
|
|
self.testing_monitors.append(("err", self._error_func(self.test_output))) |
42
|
|
|
super(NeuralClassifier, self).prepare_training() |
43
|
|
|
|
44
|
|
|
def predict(self, x): |
45
|
|
|
return self.compute(x).argmax(axis=1) |
46
|
|
|
|
47
|
|
|
class MultiTargetNeuralClassifier(NeuralClassifier): |
48
|
|
|
""" |
49
|
|
|
Classifier for multiple targets. |
50
|
|
|
""" |
51
|
|
|
|
52
|
|
|
def __init__(self, class_num, input_dim=None): |
53
|
|
|
super(MultiTargetNeuralClassifier, self).__init__(input_dim) |
54
|
|
|
self.class_num = class_num |
55
|
|
|
|
56
|
|
|
def setup_vars(self): |
57
|
|
|
super(NeuralClassifier, self).setup_variables() |
58
|
|
|
|
59
|
|
|
self.k = T.imatrix('k') |
60
|
|
|
self.target_variables.append(self.k) |
61
|
|
|
|
62
|
|
|
def _cost_func(self, y): |
63
|
|
|
entropy_sum = T.constant(0, dtype=env.FLOATX) |
64
|
|
|
for i in range(self.class_num): |
65
|
|
|
entropy_sum += T.sum(T.nnet.categorical_crossentropy(self._output[:, i, :], self._output[:,i])) |
66
|
|
|
return entropy_sum / (self.k.shape[0] * self.k.shape[1]) |
67
|
|
|
|
68
|
|
|
def _error_func(self, y): |
69
|
|
|
return 100 * T.mean(T.neq(T.argmax(self._output, axis=2), self.k)) |
70
|
|
|
|
71
|
|
|
|
72
|
|
|
|