1
|
|
|
<?php |
2
|
|
|
/** |
3
|
|
|
* Created by PhpStorm. |
4
|
|
|
* User: lenon |
5
|
|
|
* Date: 03/04/16 |
6
|
|
|
* Time: 19:24. |
7
|
|
|
*/ |
8
|
|
|
|
9
|
|
|
namespace Aszone\SearchHacking; |
10
|
|
|
|
11
|
|
|
use Symfony\Component\DomCrawler\Crawler; |
12
|
|
|
use Aszone\FakeHeaders\FakeHeaders; |
13
|
|
|
use GuzzleHttp\Client; |
14
|
|
|
|
15
|
|
|
class Utils |
16
|
|
|
{ |
17
|
|
View Code Duplication |
public static function sanitazeLinks($links = array()) |
|
|
|
|
18
|
|
|
{ |
19
|
|
|
$hrefs = array(); |
20
|
|
|
|
21
|
|
|
if (!empty($links)) { |
22
|
|
|
foreach ($links as $keyLink => $valueLink) { |
23
|
|
|
$url = static::clearLink($valueLink->getAttribute('href')); |
24
|
|
|
$validResultOfBlackList = static::checkBlacklist($url); |
|
|
|
|
25
|
|
|
|
26
|
|
|
if (!$validResultOfBlackList and $url) { |
|
|
|
|
27
|
|
|
$hrefs[] = $url; |
28
|
|
|
} |
29
|
|
|
} |
30
|
|
|
|
31
|
|
|
$hrefs = array_unique($hrefs); |
32
|
|
|
} |
33
|
|
|
|
34
|
|
|
return $hrefs; |
35
|
|
|
} |
36
|
|
|
|
37
|
|
|
public static function checkBlacklist($url = '') |
38
|
|
|
{ |
39
|
|
|
if (!empty($url)) { |
40
|
|
|
$validXmlrpc = preg_match("/(https?\:\/\/|^)(.+?)\//", $url, $matches, PREG_OFFSET_CAPTURE); |
|
|
|
|
41
|
|
|
$url = ''; |
42
|
|
|
|
43
|
|
|
if (isset($matches[2][0])) { |
44
|
|
|
$url = $matches[2][0]; |
45
|
|
|
} |
46
|
|
|
|
47
|
|
|
$ini_blakclist = parse_ini_file(__DIR__.'/../resources/Blacklist.ini'); |
48
|
|
|
|
49
|
|
|
$key = array_search($url, $ini_blakclist); |
50
|
|
|
|
51
|
|
|
if ($key != false) { |
52
|
|
|
return true; |
53
|
|
|
} |
54
|
|
|
} |
55
|
|
|
|
56
|
|
|
return false; |
57
|
|
|
} |
58
|
|
|
|
59
|
|
|
public static function clearLink($url = '') |
60
|
|
|
{ |
61
|
|
|
if (!empty($url)) { |
62
|
|
|
$validXmlrpc = preg_match('/search%3Fq%3Dcache:.+?:(.+?)%252B/', $url, $matches, PREG_OFFSET_CAPTURE); |
|
|
|
|
63
|
|
|
|
64
|
|
|
if (isset($matches[1][0])) { |
65
|
|
|
return $matches[1][0]; |
66
|
|
|
} |
67
|
|
|
|
68
|
|
|
$validXmlrpc = preg_match("/search\?q=cache:.+?:(.+?)\+/", $url, $matches, PREG_OFFSET_CAPTURE); |
|
|
|
|
69
|
|
|
|
70
|
|
|
if (isset($matches[1][0])) { |
71
|
|
|
return $matches[1][0]; |
72
|
|
|
} |
73
|
|
|
|
74
|
|
|
$validXmlrpc = preg_match('/url=(.*?)&tld/', $url, $matches, PREG_OFFSET_CAPTURE); |
|
|
|
|
75
|
|
|
|
76
|
|
|
if (isset($matches[1][0])) { |
77
|
|
|
return urldecode($matches[1][0]); |
78
|
|
|
} |
79
|
|
|
|
80
|
|
|
//Msn Bing |
81
|
|
|
$validXmlrpc = preg_match("/^((http|https):\/\/|www).+?\/?ld=.+?\&u=(.+?)\n/", $url, $matches, PREG_OFFSET_CAPTURE); |
|
|
|
|
82
|
|
|
|
83
|
|
|
if (isset($matches[1][0])) { |
84
|
|
|
return urldecode($matches[1][0]); |
85
|
|
|
} |
86
|
|
|
|
87
|
|
|
$validXmlrpc = preg_match("/^((http|https):\/\/|www)(.+?)\//", $url, $matches, PREG_OFFSET_CAPTURE); |
|
|
|
|
88
|
|
|
|
89
|
|
|
if (isset($matches[0][0])) { |
90
|
|
|
$check[] = strpos($url, 'www.blogger.com'); |
|
|
|
|
91
|
|
|
$check[] = strpos($url, 'youtube.com'); |
92
|
|
|
$check[] = strpos($url, '.google.'); |
93
|
|
|
$check[] = strpos($url, 'yandex.ru'); |
94
|
|
|
$check[] = strpos($url, 'microsoft.com'); |
95
|
|
|
$check[] = strpos($url, 'microsofttranslator.com'); |
96
|
|
|
$check[] = strpos($url, '.yahoo.com'); |
97
|
|
|
$check[] = strpos($url, 'yahoo.uservoice.com'); |
98
|
|
|
$check[] = strpos($url, 'www.mozilla.org'); |
99
|
|
|
$check[] = strpos($url, 'www.facebook.com'); |
100
|
|
|
$check[] = strpos($url, 'go.mail.ru'); |
101
|
|
|
$check[] = strpos($url, '/search/srpcache?p='); |
102
|
|
|
$check[] = strpos($url, 'flickr.com'); |
103
|
|
|
|
104
|
|
|
$tmp = array_filter($check); |
105
|
|
|
|
106
|
|
|
if (empty($tmp)) { |
107
|
|
|
return trim($url); |
108
|
|
|
} |
109
|
|
|
} |
110
|
|
|
} |
111
|
|
|
|
112
|
|
|
return false; |
113
|
|
|
} |
114
|
|
|
|
115
|
|
|
public static function getLinks($body) |
116
|
|
|
{ |
117
|
|
|
$crawler = new Crawler($body); |
118
|
|
|
|
119
|
|
|
return $crawler->filter('a'); |
120
|
|
|
} |
121
|
|
|
|
122
|
|
|
public static function getBody($urlOfSearch, $proxy) |
123
|
|
|
{ |
124
|
|
|
$header = new FakeHeaders(); |
125
|
|
|
$valid = true; |
|
|
|
|
126
|
|
|
|
127
|
|
|
try { |
128
|
|
|
$client = new Client([ |
129
|
|
|
'defaults' => [ |
130
|
|
|
'headers' => ['User-Agent' => $header->getUserAgent()], |
131
|
|
|
'proxy' => $proxy, |
132
|
|
|
'timeout' => 60, |
133
|
|
|
], |
134
|
|
|
]); |
135
|
|
|
|
136
|
|
|
return $client->get($urlOfSearch)->getBody()->getContents(); |
137
|
|
|
} catch (\Exception $e) { |
138
|
|
|
$message = 'ERROR : '.$e->getMessage()."\n"; |
139
|
|
|
|
140
|
|
|
if ($proxy == false) { |
141
|
|
|
$message .= "Your ip is blocked, we are using proxy at now...\n"; |
142
|
|
|
} |
143
|
|
|
|
144
|
|
|
return $message; |
145
|
|
|
} |
146
|
|
|
|
147
|
|
|
return false; |
|
|
|
|
148
|
|
|
} |
149
|
|
|
|
150
|
|
|
public static function getBodyByVirginProxies($urlOfSearch, $urlProxie, $proxy) |
151
|
|
|
{ |
152
|
|
|
$header = new FakeHeaders(); |
153
|
|
|
|
154
|
|
|
echo 'Proxy : '.$urlProxie."\n"; |
155
|
|
|
|
156
|
|
|
$dataToPost = ['body' => ['url' => $urlOfSearch]]; |
157
|
|
|
|
158
|
|
|
$valid = true; |
159
|
|
|
while ($valid == true) { |
|
|
|
|
160
|
|
|
try { |
161
|
|
|
$client = new Client([ |
162
|
|
|
'defaults' => [ |
163
|
|
|
'headers' => ['User-Agent' => $header->getUserAgent()], |
164
|
|
|
'proxy' => $proxy, |
165
|
|
|
'timeout' => 60, |
166
|
|
|
], |
167
|
|
|
]); |
168
|
|
|
|
169
|
|
|
$res = $client->post($urlProxie, $dataToPost); |
170
|
|
|
$body = $res->getBody()->getContents(); |
171
|
|
|
|
172
|
|
|
//check if change new tor ip |
173
|
|
|
$valid = false; |
174
|
|
|
} catch (\Exception $e) { |
175
|
|
|
echo 'ERROR : '.$e->getMessage()."\n"; |
176
|
|
|
if ($proxy == false) { |
177
|
|
|
echo "This ip of virgin proxy is blocked, we are using proxy at now...\n"; |
178
|
|
|
} |
179
|
|
|
|
180
|
|
|
return 'repeat'; |
181
|
|
|
} |
182
|
|
|
} |
183
|
|
|
|
184
|
|
|
return $body; |
|
|
|
|
185
|
|
|
} |
186
|
|
|
} |
187
|
|
|
|
Duplicated code is one of the most pungent code smells. If you need to duplicate the same code in three or more different places, we strongly encourage you to look into extracting the code into a single class or operation.
You can also find more detailed suggestions in the “Code” section of your repository.