@@ -1,6 +1,6 @@ discard block |
||
| 1 | 1 | <?php |
| 2 | 2 | |
| 3 | -declare(strict_types=1); |
|
| 3 | +declare(strict_types = 1); |
|
| 4 | 4 | |
| 5 | 5 | namespace Phpml\Classification\Linear; |
| 6 | 6 | |
@@ -64,7 +64,7 @@ discard block |
||
| 64 | 64 | protected function runTraining(array $samples, array $targets) |
| 65 | 65 | { |
| 66 | 66 | // The cost function is the sum of squares |
| 67 | - $callback = function ($weights, $sample, $target) { |
|
| 67 | + $callback = function($weights, $sample, $target) { |
|
| 68 | 68 | $this->weights = $weights; |
| 69 | 69 | |
| 70 | 70 | $output = $this->output($sample); |
@@ -1,6 +1,6 @@ discard block |
||
| 1 | 1 | <?php |
| 2 | 2 | |
| 3 | -declare(strict_types=1); |
|
| 3 | +declare(strict_types = 1); |
|
| 4 | 4 | |
| 5 | 5 | namespace Phpml\Classification\Linear; |
| 6 | 6 | |
@@ -193,7 +193,7 @@ discard block |
||
| 193 | 193 | * The gradient of the cost function to be used with gradient descent: |
| 194 | 194 | * ∇J(x) = -(y - h(x)) = (h(x) - y) |
| 195 | 195 | */ |
| 196 | - $callback = function ($weights, $sample, $y) use ($penalty) { |
|
| 196 | + $callback = function($weights, $sample, $y) use ($penalty) { |
|
| 197 | 197 | $this->weights = $weights; |
| 198 | 198 | $hX = $this->output($sample); |
| 199 | 199 | |
@@ -224,7 +224,7 @@ discard block |
||
| 224 | 224 | * The gradient of the cost function: |
| 225 | 225 | * ∇J(x) = -(h(x) - y) . h(x) . (1 - h(x)) |
| 226 | 226 | */ |
| 227 | - $callback = function ($weights, $sample, $y) use ($penalty) { |
|
| 227 | + $callback = function($weights, $sample, $y) use ($penalty) { |
|
| 228 | 228 | $this->weights = $weights; |
| 229 | 229 | $hX = $this->output($sample); |
| 230 | 230 | |
@@ -1,6 +1,6 @@ discard block |
||
| 1 | 1 | <?php |
| 2 | 2 | |
| 3 | -declare(strict_types=1); |
|
| 3 | +declare(strict_types = 1); |
|
| 4 | 4 | |
| 5 | 5 | namespace Phpml\DimensionReduction; |
| 6 | 6 | |
@@ -162,20 +162,20 @@ discard block |
||
| 162 | 162 | switch ($this->kernel) { |
| 163 | 163 | case self::KERNEL_LINEAR: |
| 164 | 164 | // k(x,y) = xT.y |
| 165 | - return function ($x, $y) { |
|
| 165 | + return function($x, $y) { |
|
| 166 | 166 | return Matrix::dot($x, $y)[0]; |
| 167 | 167 | }; |
| 168 | 168 | case self::KERNEL_RBF: |
| 169 | 169 | // k(x,y)=exp(-γ.|x-y|) where |..| is Euclidean distance |
| 170 | 170 | $dist = new Euclidean(); |
| 171 | 171 | |
| 172 | - return function ($x, $y) use ($dist) { |
|
| 172 | + return function($x, $y) use ($dist) { |
|
| 173 | 173 | return exp(-$this->gamma * $dist->sqDistance($x, $y)); |
| 174 | 174 | }; |
| 175 | 175 | |
| 176 | 176 | case self::KERNEL_SIGMOID: |
| 177 | 177 | // k(x,y)=tanh(γ.xT.y+c0) where c0=1 |
| 178 | - return function ($x, $y) { |
|
| 178 | + return function($x, $y) { |
|
| 179 | 179 | $res = Matrix::dot($x, $y)[0] + 1.0; |
| 180 | 180 | |
| 181 | 181 | return tanh($this->gamma * $res); |
@@ -185,7 +185,7 @@ discard block |
||
| 185 | 185 | // k(x,y)=exp(-γ.|x-y|) where |..| is Manhattan distance |
| 186 | 186 | $dist = new Manhattan(); |
| 187 | 187 | |
| 188 | - return function ($x, $y) use ($dist) { |
|
| 188 | + return function($x, $y) use ($dist) { |
|
| 189 | 189 | return exp(-$this->gamma * $dist->distance($x, $y)); |
| 190 | 190 | }; |
| 191 | 191 | |
@@ -219,7 +219,7 @@ discard block |
||
| 219 | 219 | protected function projectSample(array $pairs) |
| 220 | 220 | { |
| 221 | 221 | // Normalize eigenvectors by eig = eigVectors / eigValues |
| 222 | - $func = function ($eigVal, $eigVect) { |
|
| 222 | + $func = function($eigVal, $eigVect) { |
|
| 223 | 223 | $m = new Matrix($eigVect, false); |
| 224 | 224 | $a = $m->divideByScalar($eigVal)->toArray(); |
| 225 | 225 | |
@@ -1,6 +1,6 @@ discard block |
||
| 1 | 1 | <?php |
| 2 | 2 | |
| 3 | -declare(strict_types=1); |
|
| 3 | +declare(strict_types = 1); |
|
| 4 | 4 | |
| 5 | 5 | namespace Phpml\DimensionReduction; |
| 6 | 6 | |
@@ -146,7 +146,7 @@ discard block |
||
| 146 | 146 | |
| 147 | 147 | // Calculate overall mean of the dataset for each column |
| 148 | 148 | $numElements = array_sum($counts); |
| 149 | - $map = function ($el) use ($numElements) { |
|
| 149 | + $map = function($el) use ($numElements) { |
|
| 150 | 150 | return $el / $numElements; |
| 151 | 151 | }; |
| 152 | 152 | $this->overallMean = array_map($map, $overallMean); |
@@ -65,24 +65,24 @@ |
||
| 65 | 65 | private $V = []; |
| 66 | 66 | |
| 67 | 67 | /** |
| 68 | - * Array for internal storage of nonsymmetric Hessenberg form. |
|
| 69 | - * |
|
| 70 | - * @var array |
|
| 71 | - */ |
|
| 68 | + * Array for internal storage of nonsymmetric Hessenberg form. |
|
| 69 | + * |
|
| 70 | + * @var array |
|
| 71 | + */ |
|
| 72 | 72 | private $H = []; |
| 73 | 73 | |
| 74 | 74 | /** |
| 75 | - * Working storage for nonsymmetric algorithm. |
|
| 76 | - * |
|
| 77 | - * @var array |
|
| 78 | - */ |
|
| 75 | + * Working storage for nonsymmetric algorithm. |
|
| 76 | + * |
|
| 77 | + * @var array |
|
| 78 | + */ |
|
| 79 | 79 | private $ort; |
| 80 | 80 | |
| 81 | 81 | /** |
| 82 | - * Used for complex scalar division. |
|
| 83 | - * |
|
| 84 | - * @var float |
|
| 85 | - */ |
|
| 82 | + * Used for complex scalar division. |
|
| 83 | + * |
|
| 84 | + * @var float |
|
| 85 | + */ |
|
| 86 | 86 | private $cdivr; |
| 87 | 87 | private $cdivi; |
| 88 | 88 | |