Completed
Push — master ( 5bbe2a...9d73f5 )
by Raphael
01:33
created

deepy.layers.OneHotEmbedding.output()   B

Complexity

Conditions 7

Size

Total Lines 20

Duplication

Lines 0
Ratio 0 %
Metric Value
cc 7
dl 0
loc 20
rs 7.3333
1
#!/usr/bin/env python
2
# -*- coding: utf-8 -*-
3
4
import numpy as np
5
import theano.tensor as T
6
from deepy.layers import NeuralLayer
7
from deepy.layers.var import NeuralVar
8
from deepy.utils import onehot_tensor, onehot
9
from deepy.utils import FLOATX
10
11
class OneHotEmbedding(NeuralLayer):
12
    """
13
    One-hot embedding layer.
14
    Computation: [0,1,2]  ---> [[1,0,0],[0,1,0],[0,0,1]]
15
    """
16
    def __init__(self, vocab_size, cached=True, zero_index=None, mask=None):
17
        super(OneHotEmbedding, self).__init__("onehot")
18
        self.vocab_size = vocab_size
19
        self.output_dim = vocab_size
20
        self.cached = cached
21
        self.zero_index = zero_index
22
        self.mask = mask.tensor if type(mask) == NeuralVar else mask
23
24
    def prepare(self):
25
        if not self.cached:
26
            return
27
        onehot_matrix = []
28
        for i in xrange(self.vocab_size):
29
            onehot_matrix.append(onehot(self.vocab_size, i))
30
        onehot_matrix = np.array(onehot_matrix, dtype=FLOATX)
31
        self.onehot_list = self.create_matrix(self.vocab_size, self.vocab_size, "onehot_list")
32
        self.onehot_list.set_value(onehot_matrix)
33
34
    def output(self, x):
35
        if self.cached:
36
            if x.ndim == 1:
37
                ret_tensor = self.onehot_list[x]
38
            else:
39
                ret_tensor = self.onehot_list[x.flatten()].reshape((x.shape[0], x.shape[1], self.vocab_size))
40
        else:
41
            ret_tensor = onehot_tensor(x, self.vocab_size)
42
        if self.zero_index != None:
43
            mask = T.neq(x, self.zero_index)
44
            if x.ndim == 1:
45
                ret_tensor *= mask[:, None]
46
            else:
47
                ret_tensor *= mask[:, :, None]
48
        if self.mask:
49
            if x.ndim == 1:
50
                ret_tensor *= self.mask[:, None]
51
            else:
52
                ret_tensor *= self.mask[:, :, None]
53
        return ret_tensor
54