Passed
Push — master ( 9938cf...e83f7b )
by Arkadiusz
05:59
created

ThresholdedReLU::differentiate()   A

Complexity

Conditions 2
Paths 2

Size

Total Lines 4
Code Lines 2

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
dl 0
loc 4
rs 10
c 0
b 0
f 0
cc 2
eloc 2
nc 2
nop 2
1
<?php
2
3
declare(strict_types=1);
4
5
namespace Phpml\NeuralNetwork\ActivationFunction;
6
7
use Phpml\NeuralNetwork\ActivationFunction;
8
9
class ThresholdedReLU implements ActivationFunction
10
{
11
    /**
12
     * @var float
13
     */
14
    private $theta;
15
16
    public function __construct(float $theta = 0.0)
17
    {
18
        $this->theta = $theta;
19
    }
20
21
    /**
22
     * @param float|int $value
23
     */
24
    public function compute($value): float
25
    {
26
        return $value > $this->theta ? $value : 0.0;
27
    }
28
29
    /**
30
     * @param float|int $value
31
     * @param float|int $calculatedvalue
32
     */
33
    public function differentiate($value, $calculatedvalue): float
34
    {
35
        return $calculatedvalue >= $this->theta ? 1.0 : 0.0;
36
    }
37
}
38