Completed
Push — master ( 5bbe2a...9d73f5 )
by Raphael
01:33
created

deepy.layers.NeuralLayer.create_bias()   A

Complexity

Conditions 2

Size

Total Lines 8

Duplication

Lines 0
Ratio 0 %
Metric Value
cc 2
dl 0
loc 8
rs 9.4286
1
#!/usr/bin/env python
2
# -*- coding: utf-8 -*-
3
4
5
import logging as loggers
6
7
import numpy as np
8
import theano
9
10
from deepy.utils import FLOATX, global_rand, UniformInitializer
11
12
logging = loggers.getLogger(__name__)
13
14
class NeuralLayer(object):
15
16
    def __init__(self, name="unknown"):
17
        """
18
        Create a neural layer.
19
        """
20
        self.name = name
21
        self.input_dim = 0
22
        self.input_dims = [0]
23
        self.output_dim = 0
24
25
        self._linked_block = None
26
        self._linked = False
27
28
        self.connected = False
29
        self.updates = []
30
        self.training_updates = []
31
        self.free_parameters = []
32
        self.parameters = []
33
        self.training_monitors = []
34
        self.testing_monitors = []
35
        self._registered_monitors = set()
36
        self._registered_updates = set()
37
        self._registered_training_updates = set()
38
        self.external_inputs = []
39
        self.external_targets = []
40
        self.parameter_count = 0
41
        self.epoch_callbacks = []
42
        self.training_callbacks = []
43
        self.testing_callbacks = []
44
45
    def connect(self, input_dim=0, input_dims=None, previous_layer=None, network_config=None, no_prepare=False):
46
        """
47
        Connect to a previous layer.
48
        :param no_prepare: if avoid calling setup
49
        """
50
        # configure input dimensions
51
        if input_dims:
52
            self.input_dims = input_dims
53
            self.input_dim = input_dims[0]
54
        else:
55
            self.input_dim = input_dim
56
            self.input_dims = [input_dims]
57
        # set default output dimension
58
        if self.output_dim == 0:
59
            self.output_dim = self.input_dim
60
        self.previous_layer = previous_layer
61
        self.network_config = network_config
62
        self.connected = True
63
        # call prepare
64
        if not no_prepare:
65
            self.prepare()
66
        if self._linked_block and not self._linked:
67
            self._linked = True
68
            self._linked_block.register_layer(self)
69
        return self
70
71
    def compute_raw(self, inputs, dims, **kwargs):
72
        """
73
        Compute on raw Theano tensors.
74
        :type inputs: list of TensorLayer
75
        :type dims: list of int
76
        """
77
        from var import NeuralVar
78
        tensors = [NeuralVar(d, t) for d, t in zip(dims, inputs)]
79
        return self.compute(*tensors, **kwargs)
80
81
    def compute(self, *inputs, **kwargs):
82
        """
83
        Take a TensorLayer or tensor as input, compute the result.
84
        Dimension must be given is the input is a tensor.
85
        :type inputs:  list of TensorLayer
86
        :return: TensorLayer
87
        """
88
        from var import NeuralVar
89
        if type(inputs[0]) != NeuralVar:
90
            raise SystemError("The input of `compute` must be NeuralVar")
91
92
        dims = [t.dim() for t in inputs]
93
        if len(inputs) == 1:
94
            self.connect(input_dim=dims[0])
95
        else:
96
            self.connect(input_dims=dims)
97
        # convert kwargs
98
        train_kwargs = {}
99
        test_kwargs = {}
100
        for key, val in kwargs.items():
101
            if type(val) == NeuralVar:
102
                train_kwargs[key] = val.tensor
103
                test_kwargs[key] = val.test_tensor
104
            else:
105
                train_kwargs[key] = val
106
                test_kwargs[key] = val
107
108
        return NeuralVar(self.output_dim,
109
                         self.output(*[t.tensor for t in inputs], **train_kwargs),
110
                         self.test_output(*[t.test_tensor for t in inputs], **test_kwargs))
111
112
    def prepare(self):
113
        """
114
        Prepare function will be called after connected.
115
        """
116
        return self.setup()
117
118
    def setup(self):
119
        """
120
        !!! DEPRECATED !!!
121
        Setup function will be called after connected.
122
        """
123
        pass
124
125
    def output(self, *args, **kwargs):
126
        """
127
        Output function.
128
        """
129
        raise NotImplementedError("output function of '%s' is not implemented" % self.name)
130
131
    def test_output(self, *args, **kwargs):
132
        """
133
        Output function in test time.
134
        """
135
        return self.output(*args, **kwargs)
136
137
    def call(self, x, test=False):
138
        """
139
        Call this layer, with a parameter to switch test or not.
140
        """
141
        if test:
142
            return self.test_output(x)
143
        else:
144
            return self.output(x)
145
146
    def link(self, block):
147
        """
148
        Let the given block or network manage the parameters of this layer.
149
        :param block: Block or NeuralNetwork
150
        :return: NeuralLayer
151
        """
152
        if self._linked_block:
153
            raise SystemError("One layer can not be linked twice")
154
        self._linked_block = block
155
        if self.connected:
156
            self._linked = True
157
            block.register_layer(self)
158
        return self
159
160
    def register(self, *layers):
161
        """
162
        Register inner layers.
163
        """
164
        self.register_inner_layers(*layers)
165
166
    def register_inner_layers(self, *layers):
167
        for layer in layers:
168
            self.register_parameters(*layer.parameters)
169
170
    def register_parameters(self, *parameters):
171
        """
172
        Register parameters.
173
        """
174
        for param in parameters:
175
            self.parameter_count += np.prod(param.get_value().shape)
176
        self.parameters.extend(parameters)
177
178
    def register_free_parameters(self, *free_parameters):
179
        """
180
        Register free parameters, which means their value will not be learned by trainer.
181
        """
182
        return self.free_parameters.extend(free_parameters)
183
184
    def register_updates(self, *updates):
185
        """
186
        Register updates that will be executed in each iteration.
187
        """
188
        for key, node in updates:
189
            if key not in self._registered_updates:
190
                self.updates.append((key, node))
191
                self._registered_updates.add(key)
192
193
    def register_training_updates(self, *updates):
194
        """
195
        Register updates that will only be executed in training phase.
196
        """
197
        for key, node in updates:
198
            if key not in self._registered_training_updates:
199
                self.training_updates.append((key, node))
200
                self._registered_training_updates.add(key)
201
202
    def register_monitors(self, *monitors):
203
        """
204
        Register monitors they should be tuple of name and Theano variable.
205
        """
206
        for key, node in monitors:
207
            if key not in self._registered_monitors:
208
                self.training_monitors.append((key, node))
209
                self.testing_monitors.append((key, node))
210
                self._registered_monitors.add(key)
211
212
    def register_external_inputs(self, *variables):
213
        """
214
        Register external input variables.
215
        """
216
        self.external_inputs.extend(variables)
217
218
    def register_external_targets(self, *variables):
219
        """
220
        Register extenal target variables.
221
        """
222
        self.external_targets.extend(variables)
223
224
    def register_training_callbacks(self, *callbacks):
225
        """
226
        Register callback for each iteration in the training.
227
        """
228
        self.training_callbacks.extend(callbacks)
229
230
    def register_testing_callbacks(self, *callbacks):
231
        """
232
        Register callback for each iteration in the testing.
233
        """
234
        self.testing_callbacks.extend(callbacks)
235
236
    def register_epoch_callbacks(self, *callbacks):
237
        """
238
        Register callback which will be called after epoch finished.
239
        """
240
        self.epoch_callbacks.extend(callbacks)
241
242
    def create_weight(self, input_n=1, output_n=1, suffix="", initializer=None, shape=None):
243
        if not shape:
244
            shape = (input_n, output_n)
245
246
        if not initializer:
247
            initializer = UniformInitializer()
248
249
        weight = theano.shared(initializer.sample(shape).astype(FLOATX), name='W_{}'.format(suffix))
250
251
        logging.info('create weight W_%s: %s', suffix, str(shape))
252
        return weight
253
254
    def create_bias(self, output_n=1, suffix="", value=0., shape=None):
255
        if not shape:
256
            shape = (output_n, )
257
        bs =  np.ones(shape)
258
        bs *= value
259
        bias = theano.shared(bs.astype(FLOATX), name='B_{}'.format(suffix))
260
        logging.info('create bias B_%s: %s', suffix, str(shape))
261
        return bias
262
263
    def create_vector(self, n, name, dtype=FLOATX):
264
        bs =  np.zeros(n)
265
        v = theano.shared(bs.astype(dtype), name='{}'.format(name))
266
267
        logging.info('create vector %s: %d', name, n)
268
        return v
269
270
    def create_matrix(self, m, n, name):
271
272
        matrix = theano.shared(np.zeros((m, n)).astype(FLOATX), name=name)
273
274
        logging.info('create matrix %s: %d x %d', name, m, n)
275
        return matrix
276
277
    def callback_forward_propagation(self):
278
        pass
279
280