Completed
Push — master ( f73e69...91b7c0 )
by Raphael
01:35
created

compose()   A

Complexity

Conditions 2

Size

Total Lines 4

Duplication

Lines 0
Ratio 0 %
Metric Value
cc 2
dl 0
loc 4
rs 10
1
#!/usr/bin/env python
2
# -*- coding: utf-8 -*-
3
4
5
import functools
6
7
import theano.tensor as T
8
from theano.tensor.shared_randomstreams import RandomStreams
9
10
from deepy.utils.functions import FLOATX
11
12
13
theano_rng = RandomStreams(seed=3)
14
15
16
def add_noise(x, sigma, rho):
17
    if sigma > 0 and rho > 0:
18
        noise = theano_rng.normal(size=x.shape, std=sigma, dtype=FLOATX)
19
        mask = theano_rng.binomial(size=x.shape, n=1, p=1-rho, dtype=FLOATX)
20
        return mask * (x + noise)
21
    if sigma > 0:
22
        return x + theano_rng.normal(size=x.shape, std=sigma, dtype=FLOATX)
23
    if rho > 0:
24
        mask = theano_rng.binomial(size=x.shape, n=1, p=1-rho, dtype=FLOATX)
25
        return mask * x
26
    return x
27
28
def softmax(x):
29
    # T.nnet.softmax doesn't work with the HF trainer.
30
    z = T.exp(x.T - x.T.max(axis=0))
31
    return (z / z.sum(axis=0)).T
32
33
def build_activation(act=None):
34
        def compose(a, b):
35
            c = lambda z: b(a(z))
36
            c.__theanets_name__ = '%s(%s)' % (b.__theanets_name__, a.__theanets_name__)
37
            return c
38
        if '+' in act:
39
            return functools.reduce(
40
                compose, (build_activation(a) for a in act.split('+')))
41
        options = {
42
            'tanh': T.tanh,
43
            'linear': lambda z: z,
44
            'logistic': T.nnet.sigmoid,
45
            'sigmoid': T.nnet.sigmoid,
46
            'hard_sigmoid': T.nnet.hard_sigmoid,
47
            'softplus': T.nnet.softplus,
48
            'softmax': softmax,
49
            'theano_softmax': T.nnet.softmax,
50
51
            # shorthands
52
            'relu': lambda z: z * (z > 0),
53
            'trel': lambda z: z * (z > 0) * (z < 1),
54
            'trec': lambda z: z * (z > 1),
55
            'tlin': lambda z: z * (abs(z) > 1),
56
57
            # modifiers
58
            'rect:max': lambda z: T.minimum(1, z),
59
            'rect:min': lambda z: T.maximum(0, z),
60
61
            # normalization
62
            'norm:dc': lambda z: (z.T - z.mean(axis=1)).T,
63
            'norm:max': lambda z: (z.T / T.maximum(1e-10, abs(z).max(axis=1))).T,
64
            'norm:std': lambda z: (z.T / T.maximum(1e-10, T.std(z, axis=1))).T,
65
            }
66
        for k, v in options.items():
67
            v.__theanets_name__ = k
68
        try:
69
            return options[act]
70
        except KeyError:
71
            raise KeyError('unknown activation %r' % act)
72