| Total Complexity | 1 |
| Total Lines | 56 |
| Duplicated Lines | 0 % |
| Changes | 1 | ||
| Bugs | 1 | Features | 0 |
| 1 | #!/usr/bin/env python |
||
| 13 | class TestBaseHandler(unittest.TestCase): |
||
| 14 | sample_task_http = { |
||
| 15 | 'taskid': 'taskid', |
||
| 16 | 'project': 'project', |
||
| 17 | 'url': '', |
||
| 18 | 'fetch': { |
||
| 19 | 'method': 'GET', |
||
| 20 | 'headers': { |
||
| 21 | 'Cookie': 'a=b', |
||
| 22 | 'a': 'b' |
||
| 23 | }, |
||
| 24 | 'cookies': { |
||
| 25 | 'c': 'd', |
||
| 26 | }, |
||
| 27 | 'timeout': 60, |
||
| 28 | 'save': 'abc', |
||
| 29 | }, |
||
| 30 | 'process': { |
||
| 31 | 'callback': 'callback', |
||
| 32 | 'save': [1, 2, 3], |
||
| 33 | }, |
||
| 34 | } |
||
| 35 | |||
| 36 | def test_task_join_crawl_config(self): |
||
| 37 | task = dict(self.sample_task_http) |
||
| 38 | crawl_config = { |
||
| 39 | 'taskid': 'xxxx', # should not affect finial task |
||
| 40 | 'proxy': 'username:password@hostname:port', # should add proxy |
||
| 41 | 'headers': { # should merge headers |
||
| 42 | 'Cookie': 'abc', # should not affect cookie |
||
| 43 | 'c': 'd', # should add header c |
||
| 44 | } |
||
| 45 | } |
||
| 46 | |||
| 47 | ret = BaseHandler.task_join_crawl_config(task, crawl_config) |
||
| 48 | self.assertDictEqual(ret, { |
||
| 49 | 'taskid': 'taskid', |
||
| 50 | 'project': 'project', |
||
| 51 | 'url': '', |
||
| 52 | 'fetch': { |
||
| 53 | 'method': 'GET', |
||
| 54 | 'proxy': 'username:password@hostname:port', |
||
| 55 | 'headers': { |
||
| 56 | 'Cookie': 'a=b', |
||
| 57 | 'a': 'b', |
||
| 58 | 'c': 'd' |
||
| 59 | }, |
||
| 60 | 'cookies': { |
||
| 61 | 'c': 'd', |
||
| 62 | }, |
||
| 63 | 'timeout': 60, |
||
| 64 | 'save': 'abc', |
||
| 65 | }, |
||
| 66 | 'process': { |
||
| 67 | 'callback': 'callback', |
||
| 68 | 'save': [1, 2, 3], |
||
| 69 | }, |
||
| 71 |