@@ -430,6 +430,10 @@ discard block |
||
430 | 430 | * @param array Array which is passed by reference and contains the an id per url to secure we will not crawl duplicates |
431 | 431 | * @param array Array which will be filled with URLS for download if flag is set. |
432 | 432 | * @param array Array of processing instructions |
433 | + * @param integer $scheduledTime |
|
434 | + * @param integer $reqMinute |
|
435 | + * @param boolean $submitCrawlUrls |
|
436 | + * @param boolean $downloadCrawlUrls |
|
433 | 437 | * @return string List of URLs (meant for display in backend module) |
434 | 438 | * |
435 | 439 | */ |
@@ -512,7 +516,7 @@ discard block |
||
512 | 516 | * |
513 | 517 | * @param string $piString PI to test |
514 | 518 | * @param array $incomingProcInstructions Processing instructions |
515 | - * @return boolean |
|
519 | + * @return boolean|null |
|
516 | 520 | */ |
517 | 521 | public function drawURLs_PIfilter($piString, array $incomingProcInstructions) |
518 | 522 | { |
@@ -734,7 +738,7 @@ discard block |
||
734 | 738 | } |
735 | 739 | |
736 | 740 | /** |
737 | - * @param $rootid |
|
741 | + * @param integer $rootid |
|
738 | 742 | * @param $depth |
739 | 743 | * @return array |
740 | 744 | * |
@@ -997,7 +1001,7 @@ discard block |
||
997 | 1001 | * The number of URLs will be the multiplication of the number of parameter values for each key |
998 | 1002 | * |
999 | 1003 | * @param array $paramArray Output of expandParameters(): Array with keys (GET var names) and for each an array of values |
1000 | - * @param array $urls URLs accumulated in this array (for recursion) |
|
1004 | + * @param string[] $urls URLs accumulated in this array (for recursion) |
|
1001 | 1005 | * @return array |
1002 | 1006 | */ |
1003 | 1007 | public function compileUrls($paramArray, $urls = []) |
@@ -1756,7 +1760,7 @@ discard block |
||
1756 | 1760 | * @param array $url |
1757 | 1761 | * @param string $crawlerId |
1758 | 1762 | * |
1759 | - * @return array |
|
1763 | + * @return string[] |
|
1760 | 1764 | */ |
1761 | 1765 | protected function buildRequestHeaderArray(array $url, $crawlerId) |
1762 | 1766 | { |
@@ -2034,6 +2038,7 @@ discard block |
||
2034 | 2038 | * |
2035 | 2039 | * @param array Page row |
2036 | 2040 | * @param string Page icon and title for row |
2041 | + * @param string $pageTitleAndIcon |
|
2037 | 2042 | * @return string HTML <tr> content (one or more) |
2038 | 2043 | */ |
2039 | 2044 | public function drawURLs_addRowsForPage(array $pageRow, $pageTitleAndIcon) |
@@ -2224,7 +2229,7 @@ discard block |
||
2224 | 2229 | * @param int $countInARun |
2225 | 2230 | * @param int $sleepTime |
2226 | 2231 | * @param int $sleepAfterFinish |
2227 | - * @return string |
|
2232 | + * @return integer |
|
2228 | 2233 | */ |
2229 | 2234 | public function CLI_run($countInARun, $sleepTime, $sleepAfterFinish) |
2230 | 2235 | { |
@@ -2526,6 +2531,7 @@ discard block |
||
2526 | 2531 | * Used to determine timeouts and to ensure a proper cleanup if there's a timeout |
2527 | 2532 | * |
2528 | 2533 | * @param string identification string for the process |
2534 | + * @param string $pid |
|
2529 | 2535 | * @return boolean determines if the process is still active / has resources |
2530 | 2536 | * |
2531 | 2537 | * TODO: Please consider moving this to Domain Model for Process or in ProcessRepository |
@@ -2687,7 +2693,7 @@ discard block |
||
2687 | 2693 | * Check whether the Crawling Protocol should be http or https |
2688 | 2694 | * |
2689 | 2695 | * @param $crawlerConfiguration |
2690 | - * @param $pageConfiguration |
|
2696 | + * @param boolean $pageConfiguration |
|
2691 | 2697 | * |
2692 | 2698 | * @return bool |
2693 | 2699 | */ |