@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Classification; |
6 | 6 | |
@@ -114,7 +114,7 @@ discard block |
||
114 | 114 | { |
115 | 115 | $types = []; |
116 | 116 | $featureCount = count($samples[0]); |
117 | - for ($i=0; $i < $featureCount; $i++) { |
|
117 | + for ($i = 0; $i < $featureCount; $i++) { |
|
118 | 118 | $values = array_column($samples, $i); |
119 | 119 | $isCategorical = self::isCategoricalColumn($values); |
120 | 120 | $types[] = $isCategorical ? self::NOMINAL : self::CONTINUOUS; |
@@ -140,7 +140,7 @@ discard block |
||
140 | 140 | // otherwise group the records so that we can classify the leaf |
141 | 141 | // in case maximum depth is reached |
142 | 142 | $leftRecords = []; |
143 | - $rightRecords= []; |
|
143 | + $rightRecords = []; |
|
144 | 144 | $remainingTargets = []; |
145 | 145 | $prevRecord = null; |
146 | 146 | $allSame = true; |
@@ -158,12 +158,12 @@ discard block |
||
158 | 158 | if ($split->evaluate($record)) { |
159 | 159 | $leftRecords[] = $recordNo; |
160 | 160 | } else { |
161 | - $rightRecords[]= $recordNo; |
|
161 | + $rightRecords[] = $recordNo; |
|
162 | 162 | } |
163 | 163 | |
164 | 164 | // Group remaining targets |
165 | 165 | $target = $this->targets[$recordNo]; |
166 | - if (! array_key_exists($target, $remainingTargets)) { |
|
166 | + if (!array_key_exists($target, $remainingTargets)) { |
|
167 | 167 | $remainingTargets[$target] = 1; |
168 | 168 | } else { |
169 | 169 | $remainingTargets[$target]++; |
@@ -179,7 +179,7 @@ discard block |
||
179 | 179 | $split->leftLeaf = $this->getSplitLeaf($leftRecords, $depth + 1); |
180 | 180 | } |
181 | 181 | if ($rightRecords) { |
182 | - $split->rightLeaf= $this->getSplitLeaf($rightRecords, $depth + 1); |
|
182 | + $split->rightLeaf = $this->getSplitLeaf($rightRecords, $depth + 1); |
|
183 | 183 | } |
184 | 184 | } |
185 | 185 | |
@@ -251,7 +251,7 @@ discard block |
||
251 | 251 | protected function getSelectedFeatures() : array |
252 | 252 | { |
253 | 253 | $allFeatures = range(0, $this->featureCount - 1); |
254 | - if ($this->numUsableFeatures === 0 && ! $this->selectedFeatures) { |
|
254 | + if ($this->numUsableFeatures === 0 && !$this->selectedFeatures) { |
|
255 | 255 | return $allFeatures; |
256 | 256 | } |
257 | 257 | |
@@ -288,7 +288,7 @@ discard block |
||
288 | 288 | $countMatrix[$label][$rowIndex]++; |
289 | 289 | } |
290 | 290 | $giniParts = [0, 0]; |
291 | - for ($i=0; $i<=1; $i++) { |
|
291 | + for ($i = 0; $i <= 1; $i++) { |
|
292 | 292 | $part = 0; |
293 | 293 | $sum = array_sum(array_column($countMatrix, $i)); |
294 | 294 | if ($sum > 0) { |
@@ -311,7 +311,7 @@ discard block |
||
311 | 311 | // Detect and convert continuous data column values into |
312 | 312 | // discrete values by using the median as a threshold value |
313 | 313 | $columns = []; |
314 | - for ($i=0; $i<$this->featureCount; $i++) { |
|
314 | + for ($i = 0; $i < $this->featureCount; $i++) { |
|
315 | 315 | $values = array_column($samples, $i); |
316 | 316 | if ($this->columnTypes[$i] == self::CONTINUOUS) { |
317 | 317 | $median = Mean::median($values); |
@@ -4,7 +4,6 @@ |
||
4 | 4 | |
5 | 5 | namespace Phpml\Classification\Linear; |
6 | 6 | |
7 | -use Phpml\Classification\Classifier; |
|
8 | 7 | use Phpml\Helper\Optimizer\ConjugateGradient; |
9 | 8 | |
10 | 9 | class LogisticRegression extends Adaline |
@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Classification\Linear; |
6 | 6 | |
@@ -13,12 +13,12 @@ discard block |
||
13 | 13 | /** |
14 | 14 | * Batch training: Gradient descent algorithm (default) |
15 | 15 | */ |
16 | - const BATCH_TRAINING = 1; |
|
16 | + const BATCH_TRAINING = 1; |
|
17 | 17 | |
18 | 18 | /** |
19 | 19 | * Online training: Stochastic gradient descent learning |
20 | 20 | */ |
21 | - const ONLINE_TRAINING = 2; |
|
21 | + const ONLINE_TRAINING = 2; |
|
22 | 22 | |
23 | 23 | /** |
24 | 24 | * Conjugate Batch: Conjugate Gradient algorithm |
@@ -74,14 +74,14 @@ discard block |
||
74 | 74 | string $penalty = 'L2') |
75 | 75 | { |
76 | 76 | $trainingTypes = range(self::BATCH_TRAINING, self::CONJUGATE_GRAD_TRAINING); |
77 | - if (! in_array($trainingType, $trainingTypes)) { |
|
78 | - throw new \Exception("Logistic regression can only be trained with " . |
|
79 | - "batch (gradient descent), online (stochastic gradient descent) " . |
|
77 | + if (!in_array($trainingType, $trainingTypes)) { |
|
78 | + throw new \Exception("Logistic regression can only be trained with ". |
|
79 | + "batch (gradient descent), online (stochastic gradient descent) ". |
|
80 | 80 | "or conjugate batch (conjugate gradients) algorithms"); |
81 | 81 | } |
82 | 82 | |
83 | - if (! in_array($cost, ['log', 'sse'])) { |
|
84 | - throw new \Exception("Logistic regression cost function can be one of the following: \n" . |
|
83 | + if (!in_array($cost, ['log', 'sse'])) { |
|
84 | + throw new \Exception("Logistic regression cost function can be one of the following: \n". |
|
85 | 85 | "'log' for log-likelihood and 'sse' for sum of squared errors"); |
86 | 86 | } |
87 | 87 | |
@@ -177,7 +177,7 @@ discard block |
||
177 | 177 | * The gradient of the cost function to be used with gradient descent: |
178 | 178 | * ∇J(x) = -(y - h(x)) = (h(x) - y) |
179 | 179 | */ |
180 | - $callback = function ($weights, $sample, $y) use ($penalty) { |
|
180 | + $callback = function($weights, $sample, $y) use ($penalty) { |
|
181 | 181 | $this->weights = $weights; |
182 | 182 | $hX = $this->output($sample); |
183 | 183 | |
@@ -208,7 +208,7 @@ discard block |
||
208 | 208 | * The gradient of the cost function: |
209 | 209 | * ∇J(x) = -(h(x) - y) . h(x) . (1 - h(x)) |
210 | 210 | */ |
211 | - $callback = function ($weights, $sample, $y) use ($penalty) { |
|
211 | + $callback = function($weights, $sample, $y) use ($penalty) { |
|
212 | 212 | $this->weights = $weights; |
213 | 213 | $hX = $this->output($sample); |
214 | 214 |
@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Helper\Optimizer; |
6 | 6 | |
@@ -42,7 +42,7 @@ discard block |
||
42 | 42 | |
43 | 43 | $this->updateWeightsWithUpdates($updates, $totalPenalty); |
44 | 44 | |
45 | - $this->costValues[] = array_sum($errors)/$this->sampleCount; |
|
45 | + $this->costValues[] = array_sum($errors) / $this->sampleCount; |
|
46 | 46 | |
47 | 47 | if ($this->earlyStop($theta)) { |
48 | 48 | break; |
@@ -63,7 +63,7 @@ discard block |
||
63 | 63 | protected function gradient(array $theta) |
64 | 64 | { |
65 | 65 | $costs = []; |
66 | - $gradient= []; |
|
66 | + $gradient = []; |
|
67 | 67 | $totalPenalty = 0; |
68 | 68 | |
69 | 69 | foreach ($this->samples as $index => $sample) { |
@@ -73,7 +73,7 @@ discard block |
||
73 | 73 | list($cost, $grad, $penalty) = array_pad($result, 3, 0); |
74 | 74 | |
75 | 75 | $costs[] = $cost; |
76 | - $gradient[]= $grad; |
|
76 | + $gradient[] = $grad; |
|
77 | 77 | $totalPenalty += $penalty; |
78 | 78 | } |
79 | 79 | |
@@ -89,7 +89,7 @@ discard block |
||
89 | 89 | protected function updateWeightsWithUpdates(array $updates, float $penalty) |
90 | 90 | { |
91 | 91 | // Updates all weights at once |
92 | - for ($i=0; $i <= $this->dimensions; $i++) { |
|
92 | + for ($i = 0; $i <= $this->dimensions; $i++) { |
|
93 | 93 | if ($i == 0) { |
94 | 94 | $this->theta[0] -= $this->learningRate * array_sum($updates); |
95 | 95 | } else { |
@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Helper\Optimizer; |
6 | 6 | |
@@ -72,7 +72,7 @@ discard block |
||
72 | 72 | * |
73 | 73 | * @var array |
74 | 74 | */ |
75 | - protected $costValues= []; |
|
75 | + protected $costValues = []; |
|
76 | 76 | |
77 | 77 | /** |
78 | 78 | * Initializes the SGD optimizer for the given number of dimensions |
@@ -216,7 +216,7 @@ discard block |
||
216 | 216 | $this->theta[0] -= $this->learningRate * $gradient; |
217 | 217 | |
218 | 218 | // Update other values |
219 | - for ($i=1; $i <= $this->dimensions; $i++) { |
|
219 | + for ($i = 1; $i <= $this->dimensions; $i++) { |
|
220 | 220 | $this->theta[$i] -= $this->learningRate * |
221 | 221 | ($gradient * $sample[$i - 1] + $penalty * $this->theta[$i]); |
222 | 222 | } |
@@ -240,7 +240,7 @@ discard block |
||
240 | 240 | { |
241 | 241 | // Check for early stop: No change larger than threshold (default 1e-5) |
242 | 242 | $diff = array_map( |
243 | - function ($w1, $w2) { |
|
243 | + function($w1, $w2) { |
|
244 | 244 | return abs($w1 - $w2) > $this->threshold ? 1 : 0; |
245 | 245 | }, |
246 | 246 | $oldTheta, $this->theta); |
@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Helper\Optimizer; |
6 | 6 | |
@@ -31,7 +31,7 @@ discard block |
||
31 | 31 | |
32 | 32 | // Inits the weights randomly |
33 | 33 | $this->theta = []; |
34 | - for ($i=0; $i < $this->dimensions; $i++) { |
|
34 | + for ($i = 0; $i < $this->dimensions; $i++) { |
|
35 | 35 | $this->theta[] = rand() / (float) getrandmax(); |
36 | 36 | } |
37 | 37 | } |
@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Helper\Optimizer; |
6 | 6 | |
@@ -34,7 +34,7 @@ discard block |
||
34 | 34 | |
35 | 35 | $d = mp::muls($this->gradient($this->theta), -1); |
36 | 36 | |
37 | - for ($i=0; $i < $this->maxIterations; $i++) { |
|
37 | + for ($i = 0; $i < $this->maxIterations; $i++) { |
|
38 | 38 | // Obtain α that minimizes f(θ + α.d) |
39 | 39 | $alpha = $this->getAlpha(array_sum($d)); |
40 | 40 | |
@@ -161,7 +161,7 @@ discard block |
||
161 | 161 | { |
162 | 162 | $theta = $this->theta; |
163 | 163 | |
164 | - for ($i=0; $i < $this->dimensions + 1; $i++) { |
|
164 | + for ($i = 0; $i < $this->dimensions + 1; $i++) { |
|
165 | 165 | if ($i == 0) { |
166 | 166 | $theta[$i] += $alpha * array_sum($d); |
167 | 167 | } else { |
@@ -1,6 +1,6 @@ |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml; |
6 | 6 |
@@ -106,7 +106,7 @@ discard block |
||
106 | 106 | } |
107 | 107 | |
108 | 108 | /** |
109 | - * @param $column |
|
109 | + * @param integer $column |
|
110 | 110 | * |
111 | 111 | * @return array |
112 | 112 | * |
@@ -278,7 +278,7 @@ discard block |
||
278 | 278 | * Element-wise addition or substraction depending on the given sign parameter |
279 | 279 | * |
280 | 280 | * @param Matrix $other |
281 | - * @param type $sign |
|
281 | + * @param integer $sign |
|
282 | 282 | */ |
283 | 283 | protected function _add(Matrix $other, $sign = 1) |
284 | 284 | { |
@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Math; |
6 | 6 | |
@@ -179,7 +179,7 @@ discard block |
||
179 | 179 | public function transpose() |
180 | 180 | { |
181 | 181 | if ($this->rows == 1) { |
182 | - $matrix = array_map(function ($el) { |
|
182 | + $matrix = array_map(function($el) { |
|
183 | 183 | return [$el]; |
184 | 184 | }, $this->matrix[0]); |
185 | 185 | } else { |
@@ -283,8 +283,8 @@ discard block |
||
283 | 283 | $a2 = $other->toArray(); |
284 | 284 | |
285 | 285 | $newMatrix = []; |
286 | - for ($i=0; $i < $this->rows; $i++) { |
|
287 | - for ($k=0; $k < $this->columns; $k++) { |
|
286 | + for ($i = 0; $i < $this->rows; $i++) { |
|
287 | + for ($k = 0; $k < $this->columns; $k++) { |
|
288 | 288 | $newMatrix[$i][$k] = $a1[$i][$k] + $sign * $a2[$i][$k]; |
289 | 289 | } |
290 | 290 | } |
@@ -60,7 +60,6 @@ |
||
60 | 60 | * @param int $i |
61 | 61 | * @param int $k |
62 | 62 | * @param type $sample |
63 | - * @param int $n |
|
64 | 63 | * @param float $meanX |
65 | 64 | * @param float $meanY |
66 | 65 | */ |
@@ -1,6 +1,6 @@ discard block |
||
1 | 1 | <?php |
2 | 2 | |
3 | -declare(strict_types=1); |
|
3 | +declare(strict_types = 1); |
|
4 | 4 | |
5 | 5 | namespace Phpml\Math\Statistic; |
6 | 6 | |
@@ -133,14 +133,14 @@ discard block |
||
133 | 133 | |
134 | 134 | if ($means === null) { |
135 | 135 | $means = []; |
136 | - for ($i=0; $i < $n; $i++) { |
|
136 | + for ($i = 0; $i < $n; $i++) { |
|
137 | 137 | $means[] = Mean::arithmetic(array_column($data, $i)); |
138 | 138 | } |
139 | 139 | } |
140 | 140 | |
141 | 141 | $cov = []; |
142 | - for ($i=0; $i < $n; $i++) { |
|
143 | - for ($k=0; $k < $n; $k++) { |
|
142 | + for ($i = 0; $i < $n; $i++) { |
|
143 | + for ($k = 0; $k < $n; $k++) { |
|
144 | 144 | if ($i > $k) { |
145 | 145 | $cov[$i][$k] = $cov[$k][$i]; |
146 | 146 | } else { |