| Conditions | 47 |
| Total Lines | 158 |
| Lines | 0 |
| Ratio | 0 % |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like pyspider.fetcher.Fetcher.http_fetch() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
| 1 | #!/usr/bin/env python |
||
| 180 | def http_fetch(self, url, task, callback): |
||
| 181 | '''HTTP fetcher''' |
||
| 182 | start_time = time.time() |
||
| 183 | |||
| 184 | self.on_fetch('http', task) |
||
| 185 | fetch = copy.deepcopy(self.default_options) |
||
| 186 | fetch['url'] = url |
||
| 187 | fetch['headers'] = tornado.httputil.HTTPHeaders(fetch['headers']) |
||
| 188 | fetch['headers']['User-Agent'] = self.user_agent |
||
| 189 | task_fetch = task.get('fetch', {}) |
||
| 190 | for each in self.allowed_options: |
||
| 191 | if each in task_fetch: |
||
| 192 | fetch[each] = task_fetch[each] |
||
| 193 | fetch['headers'].update(task_fetch.get('headers', {})) |
||
| 194 | |||
| 195 | if task.get('track'): |
||
| 196 | track_headers = tornado.httputil.HTTPHeaders( |
||
| 197 | task.get('track', {}).get('fetch', {}).get('headers') or {}) |
||
| 198 | track_ok = task.get('track', {}).get('process', {}).get('ok', False) |
||
| 199 | else: |
||
| 200 | track_headers = {} |
||
| 201 | track_ok = False |
||
| 202 | # proxy |
||
| 203 | proxy_string = None |
||
| 204 | if isinstance(task_fetch.get('proxy'), six.string_types): |
||
| 205 | proxy_string = task_fetch['proxy'] |
||
| 206 | elif self.proxy and task_fetch.get('proxy', True): |
||
| 207 | proxy_string = self.proxy |
||
| 208 | if proxy_string: |
||
| 209 | if '://' not in proxy_string: |
||
| 210 | proxy_string = 'http://' + proxy_string |
||
| 211 | proxy_splited = urlsplit(proxy_string) |
||
| 212 | if proxy_splited.username: |
||
| 213 | fetch['proxy_username'] = proxy_splited.username |
||
| 214 | if six.PY2: |
||
| 215 | fetch['proxy_username'] = fetch['proxy_username'].encode('utf8') |
||
| 216 | if proxy_splited.password: |
||
| 217 | fetch['proxy_password'] = proxy_splited.password |
||
| 218 | if six.PY2: |
||
| 219 | fetch['proxy_password'] = fetch['proxy_password'].encode('utf8') |
||
| 220 | fetch['proxy_host'] = proxy_splited.hostname.encode('utf8') |
||
| 221 | if six.PY2: |
||
| 222 | fetch['proxy_host'] = fetch['proxy_host'].encode('utf8') |
||
| 223 | fetch['proxy_port'] = proxy_splited.port or 8080 |
||
| 224 | |||
| 225 | # etag |
||
| 226 | if task_fetch.get('etag', True): |
||
| 227 | _t = None |
||
| 228 | if isinstance(task_fetch.get('etag'), six.string_types): |
||
| 229 | _t = task_fetch.get('etag') |
||
| 230 | elif track_ok: |
||
| 231 | _t = track_headers.get('etag') |
||
| 232 | if _t and 'If-None-Match' not in fetch['headers']: |
||
| 233 | fetch['headers']['If-None-Match'] = _t |
||
| 234 | # last modifed |
||
| 235 | if task_fetch.get('last_modified', True): |
||
| 236 | _t = None |
||
| 237 | if isinstance(task_fetch.get('last_modifed'), six.string_types): |
||
| 238 | _t = task_fetch.get('last_modifed') |
||
| 239 | elif track_ok: |
||
| 240 | _t = track_headers.get('last-modified') |
||
| 241 | if _t and 'If-Modified-Since' not in fetch['headers']: |
||
| 242 | fetch['headers']['If-Modified-Since'] = _t |
||
| 243 | |||
| 244 | session = cookies.RequestsCookieJar() |
||
| 245 | |||
| 246 | # fix for tornado request obj |
||
| 247 | if 'Cookie' in fetch['headers']: |
||
| 248 | c = http_cookies.SimpleCookie() |
||
| 249 | try: |
||
| 250 | c.load(fetch['headers']['Cookie']) |
||
| 251 | except AttributeError: |
||
| 252 | c.load(utils.utf8(fetch['headers']['Cookie'])) |
||
| 253 | for key in c: |
||
| 254 | session.set(key, c[key]) |
||
| 255 | del fetch['headers']['Cookie'] |
||
| 256 | fetch['follow_redirects'] = False |
||
| 257 | if 'timeout' in fetch: |
||
| 258 | fetch['connect_timeout'] = fetch['request_timeout'] = fetch['timeout'] |
||
| 259 | del fetch['timeout'] |
||
| 260 | if 'data' in fetch: |
||
| 261 | fetch['body'] = fetch['data'] |
||
| 262 | del fetch['data'] |
||
| 263 | if 'cookies' in fetch: |
||
| 264 | session.update(fetch['cookies']) |
||
| 265 | del fetch['cookies'] |
||
| 266 | |||
| 267 | store = {} |
||
| 268 | store['max_redirects'] = task_fetch.get('max_redirects', 5) |
||
| 269 | |||
| 270 | def handle_response(response): |
||
| 271 | extract_cookies_to_jar(session, response.request, response.headers) |
||
| 272 | if (response.code in (301, 302, 303, 307) |
||
| 273 | and response.headers.get('Location') |
||
| 274 | and task_fetch.get('allow_redirects', True)): |
||
| 275 | if store['max_redirects'] <= 0: |
||
| 276 | error = tornado.httpclient.HTTPError( |
||
| 277 | 599, 'Maximum (%d) redirects followed' % task_fetch.get('max_redirects', 5), |
||
| 278 | response) |
||
| 279 | return handle_error(error) |
||
| 280 | if response.code in (302, 303): |
||
| 281 | fetch['method'] = 'GET' |
||
| 282 | if 'body' in fetch: |
||
| 283 | del fetch['body'] |
||
| 284 | fetch['url'] = urljoin(fetch['url'], response.headers['Location']) |
||
| 285 | fetch['request_timeout'] -= time.time() - start_time |
||
| 286 | if fetch['request_timeout'] < 0: |
||
| 287 | fetch['request_timeout'] = 0.1 |
||
| 288 | fetch['connect_timeout'] = fetch['request_timeout'] |
||
| 289 | store['max_redirects'] -= 1 |
||
| 290 | return make_request(fetch) |
||
| 291 | |||
| 292 | result = {} |
||
| 293 | result['orig_url'] = url |
||
| 294 | result['content'] = response.body or '' |
||
| 295 | result['headers'] = dict(response.headers) |
||
| 296 | result['status_code'] = response.code |
||
| 297 | result['url'] = response.effective_url or url |
||
| 298 | result['cookies'] = session.get_dict() |
||
| 299 | result['time'] = time.time() - start_time |
||
| 300 | result['save'] = task_fetch.get('save') |
||
| 301 | if response.error: |
||
| 302 | result['error'] = utils.text(response.error) |
||
| 303 | if 200 <= response.code < 300: |
||
| 304 | logger.info("[%d] %s:%s %s %.2fs", response.code, |
||
| 305 | task.get('project'), task.get('taskid'), |
||
| 306 | url, result['time']) |
||
| 307 | else: |
||
| 308 | logger.warning("[%d] %s:%s %s %.2fs", response.code, |
||
| 309 | task.get('project'), task.get('taskid'), |
||
| 310 | url, result['time']) |
||
| 311 | callback('http', task, result) |
||
| 312 | self.on_result('http', task, result) |
||
| 313 | return task, result |
||
| 314 | |||
| 315 | handle_error = lambda x: self.handle_error('http', |
||
| 316 | url, task, start_time, callback, x) |
||
| 317 | |||
| 318 | def make_request(fetch): |
||
| 319 | try: |
||
| 320 | request = tornado.httpclient.HTTPRequest(**fetch) |
||
| 321 | cookie_header = cookies.get_cookie_header(session, request) |
||
| 322 | if cookie_header: |
||
| 323 | request.headers['Cookie'] = cookie_header |
||
| 324 | if self.async: |
||
| 325 | self.http_client.fetch(request, handle_response) |
||
| 326 | else: |
||
| 327 | return handle_response(self.http_client.fetch(request)) |
||
| 328 | except tornado.httpclient.HTTPError as e: |
||
| 329 | if e.response: |
||
| 330 | return handle_response(e.response) |
||
| 331 | else: |
||
| 332 | return handle_error(e) |
||
| 333 | except Exception as e: |
||
| 334 | logger.exception(fetch) |
||
| 335 | return handle_error(e) |
||
| 336 | |||
| 337 | return make_request(fetch) |
||
| 338 | |||
| 525 |