| Total Complexity | 5 |
| Total Lines | 21 |
| Duplicated Lines | 0 % |
| 1 | #!/usr/bin/env python |
||
| 7 | class PRelu(NeuralLayer): |
||
| 8 | """ |
||
| 9 | Probabilistic ReLU. |
||
| 10 | - http://arxiv.org/pdf/1502.01852v1.pdf |
||
| 11 | """ |
||
| 12 | def __init__(self, input_tensor=2): |
||
| 13 | super(PRelu, self).__init__("prelu") |
||
| 14 | self.input_tensor = input_tensor |
||
| 15 | |||
| 16 | def prepare(self): |
||
| 17 | self.alphas = self.create_bias(self.output_dim, "alphas") |
||
| 18 | self.register_parameters(self.alphas) |
||
| 19 | if self.input_tensor == 3: |
||
| 20 | self.alphas = self.alphas.dimshuffle('x', 0, 'x') |
||
| 21 | elif self.input_tensor == 4: |
||
| 22 | self.alphas = self.alphas.dimshuffle('x', 0, 'x', 'x') |
||
| 23 | |||
| 24 | def compute_tensor(self, x): |
||
| 25 | positive_vector = x * (x >= 0) |
||
| 26 | negative_vector = self.alphas * (x * (x < 0)) |
||
| 27 | return positive_vector + negative_vector |