Completed
Push — issue/139 ( 971053...0c29f3 )
by Tomas Norre
17:25
created
Classes/Hooks/StaticFileCacheCreateUriHook.php 1 patch
Doc Comments   +1 added lines, -1 removed lines patch added patch discarded remove patch
@@ -53,7 +53,7 @@
 block discarded – undo
53 53
      * @param string $uri
54 54
      * @param TypoScriptFrontendController $frontend
55 55
      *
56
-     * @return array
56
+     * @return string[]
57 57
      *
58 58
      * @throws \Exception
59 59
      *
Please login to merge, or discard this patch.
Classes/Api/CrawlerApi.php 1 patch
Doc Comments   +1 added lines, -1 removed lines patch added patch discarded remove patch
@@ -209,7 +209,7 @@
 block discarded – undo
209 209
     /**
210 210
      * Determines if a page is queued
211 211
      *
212
-     * @param $uid
212
+     * @param integer $uid
213 213
      * @param bool $unprocessed_only
214 214
      * @param bool $timed_only
215 215
      * @param bool $timestamp
Please login to merge, or discard this patch.
cli/class.tx_crawler_cli_im.php 1 patch
Indentation   +5 added lines, -5 removed lines patch added patch discarded remove patch
@@ -49,11 +49,11 @@
 block discarded – undo
49 49
 
50 50
         // Adding options to help archive:
51 51
         /**
52
-        * We removed the "proc" option as it seemd not to be working any more. But as the complete handling of the crawler has changed regarding the configuration
53
-        * this is completely ok. Since configuration records were introduced to configure "what should be done" additionally to page ts the way to setup jobs
54
-        * has drifted from selecting filtering processing instructions to selecting/filtering configuration keys (you can configure the processing instructions there).
55
-        * This is also reflected in the backend modules and allows you a much clearer and powerful way to work with the crawler extension.
56
-        */
52
+         * We removed the "proc" option as it seemd not to be working any more. But as the complete handling of the crawler has changed regarding the configuration
53
+         * this is completely ok. Since configuration records were introduced to configure "what should be done" additionally to page ts the way to setup jobs
54
+         * has drifted from selecting filtering processing instructions to selecting/filtering configuration keys (you can configure the processing instructions there).
55
+         * This is also reflected in the backend modules and allows you a much clearer and powerful way to work with the crawler extension.
56
+         */
57 57
         // $this->cli_options[] = array('-proc listOfProcInstr', 'Comma list of processing instructions. These are the "actions" carried out when crawling and you must specify at least one. Depends on third-party extensions. Examples are "tx_cachemgm_recache" from "cachemgm" extension (will recache pages), "tx_staticpub_publish" from "staticpub" (publishing pages to static files) or "tx_indexedsearch_reindex" from "indexed_search" (indexes pages).');
58 58
         // TODO: cleanup here!
59 59
         $this->cli_options[] = ['-d depth', 'Tree depth, 0-99', "How many levels under the 'page_id' to include."];
Please login to merge, or discard this patch.
view/process/class.tx_crawler_view_process_list.php 1 patch
Doc Comments   +3 added lines, -3 removed lines patch added patch discarded remove patch
@@ -311,7 +311,7 @@  discard block
 block discarded – undo
311 311
     /**
312 312
      * Returns a tag for the refresh icon
313 313
      *
314
-     * @return string
314
+     * @return AOE\Crawler\Utility\ButtonUtility
315 315
      */
316 316
     protected function getRefreshLink()
317 317
     {
@@ -325,7 +325,7 @@  discard block
 block discarded – undo
325 325
     /**
326 326
      * Returns a link for the panel to enable or disable the crawler
327 327
      *
328
-     * @return string
328
+     * @return AOE\Crawler\Utility\ButtonUtility
329 329
      */
330 330
     protected function getEnableDisableLink()
331 331
     {
@@ -351,7 +351,7 @@  discard block
 block discarded – undo
351 351
      *
352 352
      * @param void
353 353
      *
354
-     * @return string a-tag
354
+     * @return AOE\Crawler\Utility\ButtonUtility|null a-tag
355 355
      */
356 356
     protected function getModeLink()
357 357
     {
Please login to merge, or discard this patch.
class.tx_crawler_lib.php 1 patch
Doc Comments   +21 added lines, -6 removed lines patch added patch discarded remove patch
@@ -422,7 +422,7 @@  discard block
 block discarded – undo
422 422
      *
423 423
      * @param  string $piString                     PI to test
424 424
      * @param  array  $incomingProcInstructions     Processing instructions
425
-     * @return boolean                              TRUE if found
425
+     * @return boolean|null                              TRUE if found
426 426
      */
427 427
     public function drawURLs_PIfilter($piString, array $incomingProcInstructions)
428 428
     {
@@ -632,6 +632,9 @@  discard block
 block discarded – undo
632 632
         return $baseUrl;
633 633
     }
634 634
 
635
+    /**
636
+     * @param integer $rootid
637
+     */
635 638
     public function getConfigurationsForBranch($rootid, $depth)
636 639
     {
637 640
         $configurationsForBranch = [];
@@ -735,6 +738,7 @@  discard block
 block discarded – undo
735 738
      *
736 739
      * @param    array        Array with key (GET var name) and values (value of GET var which is configuration for expansion)
737 740
      * @param    integer        Current page ID
741
+     * @param integer $pid
738 742
      * @return    array        Array with key (GET var name) with the value being an array of all possible values for that key.
739 743
      */
740 744
     public function expandParameters($paramArray, $pid)
@@ -853,7 +857,7 @@  discard block
 block discarded – undo
853 857
      * The number of URLs will be the multiplication of the number of parameter values for each key
854 858
      *
855 859
      * @param  array  $paramArray   Output of expandParameters(): Array with keys (GET var names) and for each an array of values
856
-     * @param  array  $urls         URLs accumulated in this array (for recursion)
860
+     * @param  string[]  $urls         URLs accumulated in this array (for recursion)
857 861
      * @return array                URLs accumulated, if number of urls exceed 'maxCompileUrls' it will return false as an error!
858 862
      */
859 863
     public function compileUrls($paramArray, $urls = [])
@@ -1033,6 +1037,8 @@  discard block
 block discarded – undo
1033 1037
      * @param    integer        Scheduled-time
1034 1038
      * @param     string        (optional) configuration hash
1035 1039
      * @param     bool        (optional) skip inner duplication check
1040
+     * @param string $url
1041
+     * @param double $tstamp
1036 1042
      * @return    bool        true if the url was added, false if it already existed
1037 1043
      */
1038 1044
     public function addUrl(
@@ -1503,6 +1509,7 @@  discard block
 block discarded – undo
1503 1509
 
1504 1510
     /**
1505 1511
      * @param message
1512
+     * @param string $message
1506 1513
      */
1507 1514
     protected function log($message)
1508 1515
     {
@@ -1517,7 +1524,7 @@  discard block
 block discarded – undo
1517 1524
      * @param array $url
1518 1525
      * @param string $crawlerId
1519 1526
      *
1520
-     * @return array
1527
+     * @return string[]
1521 1528
      */
1522 1529
     protected function buildRequestHeaderArray(array $url, $crawlerId)
1523 1530
     {
@@ -1626,6 +1633,12 @@  discard block
 block discarded – undo
1626 1633
      * @param    boolean        If set (and submitcrawlUrls is false) will fill $downloadUrls with entries)
1627 1634
      * @param    array        Array of processing instructions
1628 1635
      * @param    array        Array of configuration keys
1636
+     * @param integer $id
1637
+     * @param integer $depth
1638
+     * @param integer $scheduledTime
1639
+     * @param integer $reqMinute
1640
+     * @param boolean $submitCrawlUrls
1641
+     * @param boolean $downloadCrawlUrls
1629 1642
      * @return    string        HTML code
1630 1643
      */
1631 1644
     public function getPageTreeAndUrls(
@@ -1773,6 +1786,7 @@  discard block
 block discarded – undo
1773 1786
      *
1774 1787
      * @param    array        Page row
1775 1788
      * @param    string        Page icon and title for row
1789
+     * @param string $pageTitleAndIcon
1776 1790
      * @return    string        HTML <tr> content (one or more)
1777 1791
      */
1778 1792
     public function drawURLs_addRowsForPage(array $pageRow, $pageTitleAndIcon)
@@ -2064,7 +2078,7 @@  discard block
 block discarded – undo
2064 2078
     /**
2065 2079
      * Function executed by crawler_im.php cli script.
2066 2080
      *
2067
-     * @return bool
2081
+     * @return null|boolean
2068 2082
      */
2069 2083
     public function CLI_main_flush()
2070 2084
     {
@@ -2123,7 +2137,7 @@  discard block
 block discarded – undo
2123 2137
      * @param  int $countInARun
2124 2138
      * @param  int $sleepTime
2125 2139
      * @param  int $sleepAfterFinish
2126
-     * @return string                   Status message
2140
+     * @return integer                   Status message
2127 2141
      */
2128 2142
     public function CLI_run($countInARun, $sleepTime, $sleepAfterFinish)
2129 2143
     {
@@ -2398,6 +2412,7 @@  discard block
 block discarded – undo
2398 2412
      * Used to determine timeouts and to ensure a proper cleanup if there's a timeout
2399 2413
      *
2400 2414
      * @param  string  identification string for the process
2415
+     * @param string $pid
2401 2416
      * @return boolean determines if the process is still active / has resources
2402 2417
      *
2403 2418
      * FIXME: Please remove Transaction, not needed as only a select query.
@@ -2438,7 +2453,7 @@  discard block
 block discarded – undo
2438 2453
     /**
2439 2454
      * @param bool $get_as_float
2440 2455
      *
2441
-     * @return mixed
2456
+     * @return string
2442 2457
      */
2443 2458
     protected function microtime($get_as_float = false)
2444 2459
     {
Please login to merge, or discard this patch.
domain/queue/class.tx_crawler_domain_queue_repository.php 1 patch
Indentation   +1 added lines, -1 removed lines patch added patch discarded remove patch
@@ -64,7 +64,7 @@
 block discarded – undo
64 64
     {
65 65
         $db = $this->getDB();
66 66
         $where = 'process_id_completed=' . $db->fullQuoteStr($process->getProcess_id(), $this->tableName) .
67
-                   ' AND exec_time > 0 ';
67
+                    ' AND exec_time > 0 ';
68 68
         $limit = 1;
69 69
         $groupby = '';
70 70
 
Please login to merge, or discard this patch.