splitbrain /
dokuwiki
This project does not seem to handle request data directly as such no vulnerable execution paths were found.
include, or for example
via PHP's auto-loading mechanism.
These results are based on our legacy PHP analysis, consider migrating to our new PHP analysis engine instead. Learn more
| 1 | <?php |
||
| 2 | |||
| 3 | namespace dokuwiki; |
||
| 4 | |||
| 5 | use dokuwiki\Extension\Event; |
||
| 6 | use dokuwiki\Sitemap\Mapper; |
||
| 7 | use dokuwiki\Subscriptions\BulkSubscriptionSender; |
||
| 8 | |||
| 9 | /** |
||
| 10 | * Class TaskRunner |
||
| 11 | * |
||
| 12 | * Run an asynchronous task. |
||
| 13 | */ |
||
| 14 | class TaskRunner |
||
| 15 | { |
||
| 16 | /** |
||
| 17 | * Run the next task |
||
| 18 | * |
||
| 19 | * @todo refactor to remove dependencies on globals |
||
| 20 | * @triggers INDEXER_TASKS_RUN |
||
| 21 | */ |
||
| 22 | public function run() |
||
| 23 | { |
||
| 24 | global $INPUT, $conf, $ID; |
||
| 25 | |||
| 26 | // keep running after browser closes connection |
||
| 27 | @ignore_user_abort(true); |
||
| 28 | |||
| 29 | // check if user abort worked, if yes send output early |
||
| 30 | $defer = !@ignore_user_abort() || $conf['broken_iua']; |
||
| 31 | $output = $INPUT->has('debug') && $conf['allowdebug']; |
||
| 32 | if(!$defer && !$output){ |
||
| 33 | $this->sendGIF(); |
||
| 34 | } |
||
| 35 | |||
| 36 | $ID = cleanID($INPUT->str('id')); |
||
| 37 | |||
| 38 | // Catch any possible output (e.g. errors) |
||
| 39 | if(!$output) { |
||
| 40 | ob_start(); |
||
| 41 | } else { |
||
| 42 | header('Content-Type: text/plain'); |
||
| 43 | } |
||
| 44 | |||
| 45 | // run one of the jobs |
||
| 46 | $tmp = []; // No event data |
||
| 47 | $evt = new Event('INDEXER_TASKS_RUN', $tmp); |
||
| 48 | if ($evt->advise_before()) { |
||
| 49 | $this->runIndexer() or |
||
| 50 | $this->runSitemapper() or |
||
| 51 | $this->sendDigest() or |
||
| 52 | $this->runTrimRecentChanges() or |
||
| 53 | $this->runTrimRecentChanges(true) or |
||
| 54 | $evt->advise_after(); |
||
| 55 | } |
||
| 56 | |||
| 57 | if(!$output) { |
||
| 58 | ob_end_clean(); |
||
| 59 | if($defer) { |
||
| 60 | $this->sendGIF(); |
||
| 61 | } |
||
| 62 | } |
||
| 63 | } |
||
| 64 | |||
| 65 | /** |
||
| 66 | * Just send a 1x1 pixel blank gif to the browser |
||
| 67 | * |
||
| 68 | * @author Andreas Gohr <[email protected]> |
||
| 69 | * @author Harry Fuecks <[email protected]> |
||
| 70 | */ |
||
| 71 | protected function sendGIF() |
||
| 72 | { |
||
| 73 | $img = base64_decode('R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAEALAAAAAABAAEAAAIBTAA7'); |
||
| 74 | header('Content-Type: image/gif'); |
||
| 75 | header('Content-Length: '.strlen($img)); |
||
| 76 | header('Connection: Close'); |
||
| 77 | print $img; |
||
| 78 | tpl_flush(); |
||
| 79 | // Browser should drop connection after this |
||
| 80 | // Thinks it's got the whole image |
||
| 81 | } |
||
| 82 | |||
| 83 | /** |
||
| 84 | * Trims the recent changes cache (or imports the old changelog) as needed. |
||
| 85 | * |
||
| 86 | * @param bool $media_changes If the media changelog shall be trimmed instead of |
||
| 87 | * the page changelog |
||
| 88 | * |
||
| 89 | * @return bool |
||
| 90 | * @triggers TASK_RECENTCHANGES_TRIM |
||
| 91 | * @author Ben Coburn <[email protected]> |
||
| 92 | */ |
||
| 93 | protected function runTrimRecentChanges($media_changes = false) |
||
| 94 | { |
||
| 95 | global $conf; |
||
| 96 | |||
| 97 | echo "runTrimRecentChanges($media_changes): started" . NL; |
||
| 98 | |||
| 99 | $fn = ($media_changes ? $conf['media_changelog'] : $conf['changelog']); |
||
| 100 | |||
| 101 | // Trim the Recent Changes |
||
| 102 | // Trims the recent changes cache to the last $conf['changes_days'] recent |
||
| 103 | // changes or $conf['recent'] items, which ever is larger. |
||
| 104 | // The trimming is only done once a day. |
||
| 105 | if (file_exists($fn) && |
||
| 106 | (@filemtime($fn . '.trimmed') + 86400) < time() && |
||
| 107 | !file_exists($fn . '_tmp')) { |
||
| 108 | @touch($fn . '.trimmed'); |
||
| 109 | io_lock($fn); |
||
| 110 | $lines = file($fn); |
||
| 111 | if (count($lines) <= $conf['recent']) { |
||
| 112 | // nothing to trim |
||
| 113 | io_unlock($fn); |
||
| 114 | echo "runTrimRecentChanges($media_changes): finished" . NL; |
||
| 115 | return false; |
||
| 116 | } |
||
| 117 | |||
| 118 | io_saveFile($fn . '_tmp', ''); // presave tmp as 2nd lock |
||
| 119 | $trim_time = time() - $conf['recent_days'] * 86400; |
||
| 120 | $out_lines = []; |
||
| 121 | $old_lines = []; |
||
| 122 | for ($i = 0; $i < count($lines); $i++) { |
||
|
0 ignored issues
–
show
|
|||
| 123 | $log = parseChangelogLine($lines[$i]); |
||
| 124 | if ($log === false) { |
||
| 125 | continue; // discard junk |
||
| 126 | } |
||
| 127 | |||
| 128 | if ($log['date'] < $trim_time) { |
||
| 129 | // keep old lines for now (append .$i to prevent key collisions) |
||
| 130 | $old_lines[$log['date'] . ".$i"] = $lines[$i]; |
||
| 131 | } else { |
||
| 132 | // definitely keep these lines |
||
| 133 | $out_lines[$log['date'] . ".$i"] = $lines[$i]; |
||
| 134 | } |
||
| 135 | } |
||
| 136 | |||
| 137 | if (count($lines) == count($out_lines)) { |
||
| 138 | // nothing to trim |
||
| 139 | @unlink($fn . '_tmp'); |
||
| 140 | io_unlock($fn); |
||
| 141 | echo "runTrimRecentChanges($media_changes): finished" . NL; |
||
| 142 | return false; |
||
| 143 | } |
||
| 144 | |||
| 145 | // sort the final result, it shouldn't be necessary, |
||
| 146 | // however the extra robustness in making the changelog cache self-correcting is worth it |
||
| 147 | ksort($out_lines); |
||
| 148 | $extra = $conf['recent'] - count($out_lines); // do we need extra lines do bring us up to minimum |
||
| 149 | if ($extra > 0) { |
||
| 150 | ksort($old_lines); |
||
| 151 | $out_lines = array_merge(array_slice($old_lines, -$extra), $out_lines); |
||
| 152 | } |
||
| 153 | |||
| 154 | $eventData = [ |
||
| 155 | 'isMedia' => $media_changes, |
||
| 156 | 'trimmedChangelogLines' => $out_lines, |
||
| 157 | 'removedChangelogLines' => $extra > 0 ? array_slice($old_lines, 0, -$extra) : $old_lines, |
||
| 158 | ]; |
||
| 159 | Event::createAndTrigger('TASK_RECENTCHANGES_TRIM', $eventData); |
||
| 160 | $out_lines = $eventData['trimmedChangelogLines']; |
||
| 161 | |||
| 162 | // save trimmed changelog |
||
| 163 | io_saveFile($fn . '_tmp', implode('', $out_lines)); |
||
| 164 | @unlink($fn); |
||
| 165 | if (!rename($fn . '_tmp', $fn)) { |
||
| 166 | // rename failed so try another way... |
||
| 167 | io_unlock($fn); |
||
| 168 | io_saveFile($fn, implode('', $out_lines)); |
||
| 169 | @unlink($fn . '_tmp'); |
||
| 170 | } else { |
||
| 171 | io_unlock($fn); |
||
| 172 | } |
||
| 173 | echo "runTrimRecentChanges($media_changes): finished" . NL; |
||
| 174 | return true; |
||
| 175 | } |
||
| 176 | |||
| 177 | // nothing done |
||
| 178 | echo "runTrimRecentChanges($media_changes): finished" . NL; |
||
| 179 | return false; |
||
| 180 | } |
||
| 181 | |||
| 182 | |||
| 183 | /** |
||
| 184 | * Runs the indexer for the current page |
||
| 185 | * |
||
| 186 | * @author Andreas Gohr <[email protected]> |
||
| 187 | */ |
||
| 188 | protected function runIndexer() |
||
| 189 | { |
||
| 190 | global $ID; |
||
| 191 | print 'runIndexer(): started' . NL; |
||
| 192 | |||
| 193 | if ((string) $ID === '') { |
||
| 194 | return false; |
||
| 195 | } |
||
| 196 | |||
| 197 | // do the work |
||
| 198 | return idx_addPage($ID, true); |
||
| 199 | } |
||
| 200 | |||
| 201 | /** |
||
| 202 | * Builds a Google Sitemap of all public pages known to the indexer |
||
| 203 | * |
||
| 204 | * The map is placed in the root directory named sitemap.xml.gz - This |
||
| 205 | * file needs to be writable! |
||
| 206 | * |
||
| 207 | * @author Andreas Gohr |
||
| 208 | * @link https://www.google.com/webmasters/sitemaps/docs/en/about.html |
||
| 209 | */ |
||
| 210 | protected function runSitemapper() |
||
| 211 | { |
||
| 212 | print 'runSitemapper(): started' . NL; |
||
| 213 | $result = Mapper::generate() && Mapper::pingSearchEngines(); |
||
| 214 | print 'runSitemapper(): finished' . NL; |
||
| 215 | return $result; |
||
| 216 | } |
||
| 217 | |||
| 218 | /** |
||
| 219 | * Send digest and list mails for all subscriptions which are in effect for the |
||
| 220 | * current page |
||
| 221 | * |
||
| 222 | * @author Adrian Lang <[email protected]> |
||
| 223 | */ |
||
| 224 | protected function sendDigest() |
||
| 225 | { |
||
| 226 | global $ID; |
||
| 227 | |||
| 228 | echo 'sendDigest(): started' . NL; |
||
| 229 | if (!actionOK('subscribe')) { |
||
| 230 | echo 'sendDigest(): disabled' . NL; |
||
| 231 | return false; |
||
| 232 | } |
||
| 233 | $sub = new BulkSubscriptionSender(); |
||
| 234 | $sent = $sub->sendBulk($ID); |
||
| 235 | |||
| 236 | echo "sendDigest(): sent $sent mails" . NL; |
||
| 237 | echo 'sendDigest(): finished' . NL; |
||
| 238 | return (bool)$sent; |
||
| 239 | } |
||
| 240 | } |
||
| 241 |
If the size of the collection does not change during the iteration, it is generally a good practice to compute it beforehand, and not on each iteration: