Test Failed
Pull Request — master (#81)
by
unknown
04:16
created
src/Phpml/Classification/Linear/LogisticRegression.php 2 patches
Unused Use Statements   -1 removed lines patch added patch discarded remove patch
@@ -4,7 +4,6 @@
 block discarded – undo
4 4
 
5 5
 namespace Phpml\Classification\Linear;
6 6
 
7
-use Phpml\Classification\Classifier;
8 7
 use Phpml\Helper\Optimizer\ConjugateGradient;
9 8
 
10 9
 class LogisticRegression extends Adaline
Please login to merge, or discard this patch.
Spacing   +10 added lines, -10 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Classification\Linear;
6 6
 
@@ -13,12 +13,12 @@  discard block
 block discarded – undo
13 13
     /**
14 14
      * Batch training: Gradient descent algorithm (default)
15 15
      */
16
-    const BATCH_TRAINING    = 1;
16
+    const BATCH_TRAINING = 1;
17 17
 
18 18
     /**
19 19
      * Online training: Stochastic gradient descent learning
20 20
      */
21
-    const ONLINE_TRAINING    = 2;
21
+    const ONLINE_TRAINING = 2;
22 22
 
23 23
     /**
24 24
      * Conjugate Batch: Conjugate Gradient algorithm
@@ -74,14 +74,14 @@  discard block
 block discarded – undo
74 74
         string $penalty = 'L2')
75 75
     {
76 76
         $trainingTypes = range(self::BATCH_TRAINING, self::CONJUGATE_GRAD_TRAINING);
77
-        if (! in_array($trainingType, $trainingTypes)) {
78
-            throw new \Exception("Logistic regression can only be trained with " .
79
-                "batch (gradient descent), online (stochastic gradient descent) " .
77
+        if (!in_array($trainingType, $trainingTypes)) {
78
+            throw new \Exception("Logistic regression can only be trained with ".
79
+                "batch (gradient descent), online (stochastic gradient descent) ".
80 80
                 "or conjugate batch (conjugate gradients) algorithms");
81 81
         }
82 82
 
83
-        if (! in_array($cost, ['log', 'sse'])) {
84
-            throw new \Exception("Logistic regression cost function can be one of the following: \n" .
83
+        if (!in_array($cost, ['log', 'sse'])) {
84
+            throw new \Exception("Logistic regression cost function can be one of the following: \n".
85 85
                 "'log' for log-likelihood and 'sse' for sum of squared errors");
86 86
         }
87 87
 
@@ -177,7 +177,7 @@  discard block
 block discarded – undo
177 177
                  * The gradient of the cost function to be used with gradient descent:
178 178
                  *		∇J(x) = -(y - h(x)) = (h(x) - y)
179 179
                  */
180
-                $callback = function ($weights, $sample, $y) use ($penalty) {
180
+                $callback = function($weights, $sample, $y) use ($penalty) {
181 181
                     $this->weights = $weights;
182 182
                     $hX = $this->output($sample);
183 183
 
@@ -208,7 +208,7 @@  discard block
 block discarded – undo
208 208
                  * The gradient of the cost function:
209 209
                  *		∇J(x) = -(h(x) - y) . h(x) . (1 - h(x))
210 210
                  */
211
-                $callback = function ($weights, $sample, $y) use ($penalty) {
211
+                $callback = function($weights, $sample, $y) use ($penalty) {
212 212
                     $this->weights = $weights;
213 213
                     $hX = $this->output($sample);
214 214
 
Please login to merge, or discard this patch.
src/Phpml/Classification/Linear/Perceptron.php 2 patches
Indentation   +2 added lines, -2 removed lines patch added patch discarded remove patch
@@ -15,7 +15,7 @@  discard block
 block discarded – undo
15 15
 {
16 16
     use Predictable, OneVsRest;
17 17
 
18
-   /**
18
+    /**
19 19
      * @var array
20 20
      */
21 21
     protected $samples = [];
@@ -83,7 +83,7 @@  discard block
 block discarded – undo
83 83
         $this->maxIterations = $maxIterations;
84 84
     }
85 85
 
86
-   /**
86
+    /**
87 87
      * @param array $samples
88 88
      * @param array $targets
89 89
      */
Please login to merge, or discard this patch.
Spacing   +4 added lines, -4 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Classification\Linear;
6 6
 
@@ -118,7 +118,7 @@  discard block
 block discarded – undo
118 118
     protected function runTraining()
119 119
     {
120 120
         // The cost function is the sum of squares
121
-        $callback = function ($weights, $sample, $target) {
121
+        $callback = function($weights, $sample, $target) {
122 122
             $this->weights = $weights;
123 123
 
124 124
             $prediction = $this->outputClass($sample);
@@ -137,7 +137,7 @@  discard block
 block discarded – undo
137 137
      */
138 138
     protected function runGradientDescent(\Closure $gradientFunc, bool $isBatch = false)
139 139
     {
140
-        $class = $isBatch ? GD::class :  StochasticGD::class;
140
+        $class = $isBatch ? GD::class : StochasticGD::class;
141 141
 
142 142
         $optimizer = (new $class($this->featureCount))
143 143
             ->setLearningRate($this->learningRate)
@@ -227,6 +227,6 @@  discard block
 block discarded – undo
227 227
 
228 228
         $predictedClass = $this->outputClass($sample);
229 229
 
230
-        return $this->labels[ $predictedClass ];
230
+        return $this->labels[$predictedClass];
231 231
     }
232 232
 }
Please login to merge, or discard this patch.
src/Phpml/Helper/Optimizer/GD.php 1 patch
Spacing   +5 added lines, -5 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Helper\Optimizer;
6 6
 
@@ -42,7 +42,7 @@  discard block
 block discarded – undo
42 42
 
43 43
             $this->updateWeightsWithUpdates($updates, $totalPenalty);
44 44
 
45
-            $this->costValues[] = array_sum($errors)/$this->sampleCount;
45
+            $this->costValues[] = array_sum($errors) / $this->sampleCount;
46 46
 
47 47
             if ($this->earlyStop($theta)) {
48 48
                 break;
@@ -63,7 +63,7 @@  discard block
 block discarded – undo
63 63
     protected function gradient(array $theta)
64 64
     {
65 65
         $costs = [];
66
-        $gradient= [];
66
+        $gradient = [];
67 67
         $totalPenalty = 0;
68 68
 
69 69
         foreach ($this->samples as $index => $sample) {
@@ -73,7 +73,7 @@  discard block
 block discarded – undo
73 73
             list($cost, $grad, $penalty) = array_pad($result, 3, 0);
74 74
 
75 75
             $costs[] = $cost;
76
-            $gradient[]= $grad;
76
+            $gradient[] = $grad;
77 77
             $totalPenalty += $penalty;
78 78
         }
79 79
 
@@ -89,7 +89,7 @@  discard block
 block discarded – undo
89 89
     protected function updateWeightsWithUpdates(array $updates, float $penalty)
90 90
     {
91 91
         // Updates all weights at once
92
-        for ($i=0; $i <= $this->dimensions; $i++) {
92
+        for ($i = 0; $i <= $this->dimensions; $i++) {
93 93
             if ($i == 0) {
94 94
                 $this->theta[0] -= $this->learningRate * array_sum($updates);
95 95
             } else {
Please login to merge, or discard this patch.
src/Phpml/Helper/Optimizer/StochasticGD.php 1 patch
Spacing   +4 added lines, -4 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Helper\Optimizer;
6 6
 
@@ -72,7 +72,7 @@  discard block
 block discarded – undo
72 72
      *
73 73
      * @var array
74 74
      */
75
-    protected $costValues= [];
75
+    protected $costValues = [];
76 76
 
77 77
     /**
78 78
      * Initializes the SGD optimizer for the given number of dimensions
@@ -216,7 +216,7 @@  discard block
 block discarded – undo
216 216
             $this->theta[0] -= $this->learningRate * $gradient;
217 217
 
218 218
             // Update other values
219
-            for ($i=1; $i <= $this->dimensions; $i++) {
219
+            for ($i = 1; $i <= $this->dimensions; $i++) {
220 220
                 $this->theta[$i] -= $this->learningRate *
221 221
                     ($gradient * $sample[$i - 1] + $penalty * $this->theta[$i]);
222 222
             }
@@ -240,7 +240,7 @@  discard block
 block discarded – undo
240 240
     {
241 241
         // Check for early stop: No change larger than threshold (default 1e-5)
242 242
         $diff = array_map(
243
-            function ($w1, $w2) {
243
+            function($w1, $w2) {
244 244
                 return abs($w1 - $w2) > $this->threshold ? 1 : 0;
245 245
             },
246 246
             $oldTheta, $this->theta);
Please login to merge, or discard this patch.
src/Phpml/Helper/Optimizer/Optimizer.php 1 patch
Spacing   +2 added lines, -2 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Helper\Optimizer;
6 6
 
@@ -31,7 +31,7 @@  discard block
 block discarded – undo
31 31
 
32 32
         // Inits the weights randomly
33 33
         $this->theta = [];
34
-        for ($i=0; $i < $this->dimensions; $i++) {
34
+        for ($i = 0; $i < $this->dimensions; $i++) {
35 35
             $this->theta[] = rand() / (float) getrandmax();
36 36
         }
37 37
     }
Please login to merge, or discard this patch.
src/Phpml/Helper/Optimizer/ConjugateGradient.php 1 patch
Spacing   +3 added lines, -3 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Helper\Optimizer;
6 6
 
@@ -34,7 +34,7 @@  discard block
 block discarded – undo
34 34
 
35 35
         $d = mp::muls($this->gradient($this->theta), -1);
36 36
 
37
-        for ($i=0; $i < $this->maxIterations; $i++) {
37
+        for ($i = 0; $i < $this->maxIterations; $i++) {
38 38
             // Obtain α that minimizes f(θ + α.d)
39 39
             $alpha = $this->getAlpha(array_sum($d));
40 40
 
@@ -161,7 +161,7 @@  discard block
 block discarded – undo
161 161
     {
162 162
         $theta = $this->theta;
163 163
 
164
-        for ($i=0; $i < $this->dimensions + 1; $i++) {
164
+        for ($i = 0; $i < $this->dimensions + 1; $i++) {
165 165
             if ($i == 0) {
166 166
                 $theta[$i] += $alpha * array_sum($d);
167 167
             } else {
Please login to merge, or discard this patch.
src/Phpml/Math/Matrix.php 2 patches
Doc Comments   +2 added lines, -2 removed lines patch added patch discarded remove patch
@@ -106,7 +106,7 @@  discard block
 block discarded – undo
106 106
     }
107 107
 
108 108
     /**
109
-     * @param $column
109
+     * @param integer $column
110 110
      *
111 111
      * @return array
112 112
      *
@@ -278,7 +278,7 @@  discard block
 block discarded – undo
278 278
      * Element-wise addition or substraction depending on the given sign parameter
279 279
      *
280 280
      * @param Matrix $other
281
-     * @param type $sign
281
+     * @param integer $sign
282 282
      */
283 283
     protected function _add(Matrix $other, $sign = 1)
284 284
     {
Please login to merge, or discard this patch.
Spacing   +4 added lines, -4 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Math;
6 6
 
@@ -286,8 +286,8 @@  discard block
 block discarded – undo
286 286
         $a2 = $other->toArray();
287 287
 
288 288
         $newMatrix = [];
289
-        for ($i=0; $i < $this->rows; $i++) {
290
-            for ($k=0; $k < $this->columns; $k++) {
289
+        for ($i = 0; $i < $this->rows; $i++) {
290
+            for ($k = 0; $k < $this->columns; $k++) {
291 291
                 $newMatrix[$i][$k] = $a1[$i][$k] + $sign * $a2[$i][$k];
292 292
             }
293 293
         }
@@ -428,7 +428,7 @@  discard block
 block discarded – undo
428 428
      */
429 429
     public static function map(array $array, $callable)
430 430
     {
431
-        $func = function ($row) use ($callable) {
431
+        $func = function($row) use ($callable) {
432 432
             return array_map($callable, $row);
433 433
         };
434 434
 
Please login to merge, or discard this patch.
src/Phpml/Math/Statistic/Covariance.php 2 patches
Doc Comments   -1 removed lines patch added patch discarded remove patch
@@ -60,7 +60,6 @@
 block discarded – undo
60 60
      * @param int $i
61 61
      * @param int $k
62 62
      * @param type $sample
63
-     * @param int $n
64 63
      * @param float $meanX
65 64
      * @param float $meanY
66 65
      */
Please login to merge, or discard this patch.
Spacing   +4 added lines, -4 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\Math\Statistic;
6 6
 
@@ -133,14 +133,14 @@  discard block
 block discarded – undo
133 133
 
134 134
         if ($means === null) {
135 135
             $means = [];
136
-            for ($i=0; $i < $n; $i++) {
136
+            for ($i = 0; $i < $n; $i++) {
137 137
                 $means[] = Mean::arithmetic(array_column($data, $i));
138 138
             }
139 139
         }
140 140
 
141 141
         $cov = [];
142
-        for ($i=0; $i < $n; $i++) {
143
-            for ($k=0; $k < $n; $k++) {
142
+        for ($i = 0; $i < $n; $i++) {
143
+            for ($k = 0; $k < $n; $k++) {
144 144
                 if ($i > $k) {
145 145
                     $cov[$i][$k] = $cov[$k][$i];
146 146
                 } else {
Please login to merge, or discard this patch.
src/Phpml/DimensionReduction/KernelPCA.php 2 patches
Indentation   +3 added lines, -3 removed lines patch added patch discarded remove patch
@@ -136,9 +136,9 @@
 block discarded – undo
136 136
         $N_K_N = $N->multiply($K_N);
137 137
 
138 138
         return $K->subtract($N_K)
139
-                 ->subtract($K_N)
140
-                 ->add($N_K_N)
141
-                 ->toArray();
139
+                    ->subtract($K_N)
140
+                    ->add($N_K_N)
141
+                    ->toArray();
142 142
     }
143 143
 
144 144
     /**
Please login to merge, or discard this patch.
Spacing   +10 added lines, -10 removed lines patch added patch discarded remove patch
@@ -1,6 +1,6 @@  discard block
 block discarded – undo
1 1
 <?php
2 2
 
3
-declare(strict_types=1);
3
+declare(strict_types = 1);
4 4
 
5 5
 namespace Phpml\DimensionReduction;
6 6
 
@@ -46,7 +46,7 @@  discard block
 block discarded – undo
46 46
      */
47 47
     public function __construct(int $kernel = self::KERNEL_RBF, $totalVariance = null, $numFeatures = null, $gamma = null)
48 48
     {
49
-        if (! in_array($kernel, [self::KERNEL_RBF, self::KERNEL_SIGMOID, self::KERNEL_LAPLACIAN])) {
49
+        if (!in_array($kernel, [self::KERNEL_RBF, self::KERNEL_SIGMOID, self::KERNEL_LAPLACIAN])) {
50 50
             throw new \Exception("KernelPCA can be initialized with the following kernels only: RBF, Sigmoid and Laplacian");
51 51
         }
52 52
 
@@ -100,8 +100,8 @@  discard block
 block discarded – undo
100 100
         $kernelFunc = $this->getKernel();
101 101
 
102 102
         $matrix = [];
103
-        for ($i=0; $i < $numRows; $i++) {
104
-            for ($k=0; $k < $numRows; $k++) {
103
+        for ($i = 0; $i < $numRows; $i++) {
104
+            for ($k = 0; $k < $numRows; $k++) {
105 105
                 if ($i <= $k) {
106 106
                     $matrix[$i][$k] = $kernelFunc($data[$i], $data[$k]);
107 107
                 } else {
@@ -124,7 +124,7 @@  discard block
 block discarded – undo
124 124
      */
125 125
     protected function centerMatrix(array $matrix, int $n)
126 126
     {
127
-        $N = array_fill(0, $n, array_fill(0, $n, 1.0/$n));
127
+        $N = array_fill(0, $n, array_fill(0, $n, 1.0 / $n));
128 128
         $N = new Matrix($N, false);
129 129
         $K = new Matrix($matrix, false);
130 130
 
@@ -157,7 +157,7 @@  discard block
 block discarded – undo
157 157
             case self::KERNEL_RBF:
158 158
                 // k(x,y)=exp(-γ.|x-y|) where |..| is Euclidean distance
159 159
                 $dist = new Euclidean();
160
-                return function ($x, $y = null) use ($dist) {
160
+                return function($x, $y = null) use ($dist) {
161 161
                     if ($y === null) {
162 162
                         foreach ($x as $i => $element) {
163 163
                             $x[$i] = exp(-$this->gamma * $element);
@@ -166,12 +166,12 @@  discard block
 block discarded – undo
166 166
                         return $x;
167 167
                     }
168 168
 
169
-                    return exp(-$this->gamma *    $dist->distance($x, $y));
169
+                    return exp(-$this->gamma * $dist->distance($x, $y));
170 170
                 };
171 171
 
172 172
             case self::KERNEL_SIGMOID:
173 173
                 // k(x,y)=tanh(γ.xT.y+c0) where c0=0
174
-                return function ($x, $y = null) {
174
+                return function($x, $y = null) {
175 175
                     if ($y === null) {
176 176
                         foreach ($x as $i => $element) {
177 177
                             $x[$i] = tanh($this->gamma * $element);
@@ -190,7 +190,7 @@  discard block
 block discarded – undo
190 190
             case self::KERNEL_LAPLACIAN:
191 191
                 // k(x,y)=exp(-γ.|x-y|) where |..| is Manhattan distance
192 192
                 $dist = new Manhattan();
193
-                return function ($x, $y = null) use ($dist) {
193
+                return function($x, $y = null) use ($dist) {
194 194
                     if ($y === null) {
195 195
                         foreach ($x as $i => $element) {
196 196
                             $x[$i] = exp(-$this->gamma * $element);
@@ -228,7 +228,7 @@  discard block
 block discarded – undo
228 228
     protected function projectSample(array $pairs)
229 229
     {
230 230
         // Normalize eigenvectors by eig = eigVectors / eigValues
231
-        $func = function ($eigVal, $eigVect) {
231
+        $func = function($eigVal, $eigVect) {
232 232
             $m = new Matrix($eigVect, false);
233 233
             $a = $m->divideByScalar(sqrt($eigVal))->toArray();
234 234
 
Please login to merge, or discard this patch.