@@ -114,6 +114,7 @@ |
||
| 114 | 114 | } |
| 115 | 115 | |
| 116 | 116 | /** |
| 117 | + * @param integer $index |
|
| 117 | 118 | * @return array |
| 118 | 119 | */ |
| 119 | 120 | protected function getRandomSubset($index) |
@@ -6,10 +6,8 @@ |
||
| 6 | 6 | |
| 7 | 7 | use Phpml\Helper\Predictable; |
| 8 | 8 | use Phpml\Helper\Trainable; |
| 9 | -use Phpml\Math\Statistic\Mean; |
|
| 10 | 9 | use Phpml\Classification\Classifier; |
| 11 | 10 | use Phpml\Classification\DecisionTree; |
| 12 | -use Phpml\Classification\NaiveBayes; |
|
| 13 | 11 | |
| 14 | 12 | class Bagging implements Classifier |
| 15 | 13 | { |
@@ -1,6 +1,6 @@ discard block |
||
| 1 | 1 | <?php |
| 2 | 2 | |
| 3 | -declare(strict_types=1); |
|
| 3 | +declare(strict_types = 1); |
|
| 4 | 4 | |
| 5 | 5 | namespace Phpml\Classification\Ensemble; |
| 6 | 6 | |
@@ -118,13 +118,13 @@ discard block |
||
| 118 | 118 | */ |
| 119 | 119 | protected function getRandomSubset($index) |
| 120 | 120 | { |
| 121 | - $subsetLength = (int)ceil(sqrt($this->numSamples)); |
|
| 121 | + $subsetLength = (int) ceil(sqrt($this->numSamples)); |
|
| 122 | 122 | $denom = $this->subsetRatio / 2; |
| 123 | 123 | $subsetLength = $this->numSamples / (1 / $denom); |
| 124 | 124 | $index = $index * $subsetLength % $this->numSamples; |
| 125 | 125 | $samples = []; |
| 126 | 126 | $targets = []; |
| 127 | - for ($i=0; $i<$subsetLength * 2; $i++) { |
|
| 127 | + for ($i = 0; $i < $subsetLength * 2; $i++) { |
|
| 128 | 128 | $rand = rand($index, $this->numSamples - 1); |
| 129 | 129 | $samples[] = $this->samples[$rand]; |
| 130 | 130 | $targets[] = $this->targets[$rand]; |
@@ -138,7 +138,7 @@ discard block |
||
| 138 | 138 | protected function initClassifiers() |
| 139 | 139 | { |
| 140 | 140 | $classifiers = []; |
| 141 | - for ($i=0; $i<$this->numClassifier; $i++) { |
|
| 141 | + for ($i = 0; $i < $this->numClassifier; $i++) { |
|
| 142 | 142 | $ref = new \ReflectionClass($this->classifier); |
| 143 | 143 | if ($this->classifierOptions) { |
| 144 | 144 | $obj = $ref->newInstanceArgs($this->classifierOptions); |
@@ -4,8 +4,6 @@ |
||
| 4 | 4 | namespace Phpml\Classification\Ensemble; |
| 5 | 5 | |
| 6 | 6 | use Phpml\Classification\Ensemble\Bagging; |
| 7 | -use Phpml\Classification\DecisionTree; |
|
| 8 | -use Phpml\Classification\NaiveBayes; |
|
| 9 | 7 | |
| 10 | 8 | class RandomForest extends Bagging |
| 11 | 9 | { |
@@ -1,5 +1,5 @@ discard block |
||
| 1 | 1 | <?php |
| 2 | -declare(strict_types=1); |
|
| 2 | +declare(strict_types = 1); |
|
| 3 | 3 | |
| 4 | 4 | namespace Phpml\Classification\Ensemble; |
| 5 | 5 | |
@@ -36,10 +36,10 @@ discard block |
||
| 36 | 36 | list($subset, $targets) = parent::getRandomSubset($index); |
| 37 | 37 | |
| 38 | 38 | $features = []; |
| 39 | - $featureCount = (int)ceil($this->featureSubsetRatio * $this->featureCount); |
|
| 39 | + $featureCount = (int) ceil($this->featureSubsetRatio * $this->featureCount); |
|
| 40 | 40 | while (count($features) < $featureCount) { |
| 41 | 41 | $rand = rand(0, $this->featureCount - 1); |
| 42 | - if (! in_array($rand, $features)) { |
|
| 42 | + if (!in_array($rand, $features)) { |
|
| 43 | 43 | $features[] = $rand; |
| 44 | 44 | } |
| 45 | 45 | } |
@@ -50,7 +50,7 @@ discard block |
||
| 50 | 50 | foreach ($features as $colIndex) { |
| 51 | 51 | $columns[] = array_column($subset, $colIndex); |
| 52 | 52 | } |
| 53 | - $subset= array_map(null, ...$columns); |
|
| 53 | + $subset = array_map(null, ...$columns); |
|
| 54 | 54 | |
| 55 | 55 | return [$subset, $targets]; |
| 56 | 56 | } |
@@ -62,7 +62,7 @@ discard block |
||
| 62 | 62 | protected function predictSample(array $sample) |
| 63 | 63 | { |
| 64 | 64 | $predictions = []; |
| 65 | - for ($i=0; $i<count($this->classifiers); $i++) { |
|
| 65 | + for ($i = 0; $i < count($this->classifiers); $i++) { |
|
| 66 | 66 | $samplePiece = []; |
| 67 | 67 | foreach ($this->classifierColumns[$i] as $colIndex) { |
| 68 | 68 | $samplePiece[] = $sample[$colIndex]; |
@@ -136,7 +136,7 @@ |
||
| 136 | 136 | |
| 137 | 137 | /** |
| 138 | 138 | * @param array $records |
| 139 | - * @return DecisionTreeLeaf[] |
|
| 139 | + * @return null|DecisionTreeLeaf |
|
| 140 | 140 | */ |
| 141 | 141 | protected function getBestSplit($records) |
| 142 | 142 | { |
@@ -275,7 +275,7 @@ |
||
| 275 | 275 | } |
| 276 | 276 | } while ($node); |
| 277 | 277 | |
| 278 | - if ($node) { |
|
| 278 | + if ($node) { |
|
| 279 | 279 | return $node->classValue; |
| 280 | 280 | } |
| 281 | 281 | return $this->labels[0]; |