Total Complexity | 58 |
Total Lines | 355 |
Duplicated Lines | 0 % |
Complex classes like st2debug.cmd.DebugInfoCollector often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more |
||
132 | class DebugInfoCollector(object): |
||
133 | def __init__(self, include_logs, include_configs, include_content, include_system_info, |
||
134 | include_shell_commands=False, user_info=None, debug=False, config_file=None, |
||
135 | output_path=None): |
||
136 | self.include_logs = include_logs |
||
137 | self.include_configs = include_configs |
||
138 | self.include_content = include_content |
||
139 | self.include_system_info = include_system_info |
||
140 | self.include_shell_commands = include_shell_commands |
||
141 | self.user_info = user_info |
||
142 | self.debug = debug |
||
143 | self.output_path = output_path |
||
144 | |||
145 | config_file = config_file or {} |
||
146 | self.st2_config_file_path = config_file.get('st2_config_file_path', ST2_CONFIG_FILE_PATH) |
||
147 | self.mistral_config_file_path = config_file.get('mistral_config_file_path', |
||
148 | MISTRAL_CONFIG_FILE_PATH) |
||
149 | self.log_files_paths = config_file.get('log_files_paths', LOG_FILE_PATHS[:]) |
||
150 | self.gpg_key = config_file.get('gpg_key', GPG_KEY) |
||
151 | self.gpg_key_fingerprint = config_file.get('gpg_key_fingerprint', GPG_KEY_FINGERPRINT) |
||
152 | self.s3_bucket_url = config_file.get('s3_bucket_url', S3_BUCKET_URL) |
||
153 | self.company_name = config_file.get('company_name', COMPANY_NAME) |
||
154 | self.shell_commands = config_file.get('shell_commands', SHELL_COMMANDS) |
||
155 | |||
156 | self.st2_config_file_name = os.path.basename(self.st2_config_file_path) |
||
157 | self.mistral_config_file_name = os.path.basename(self.mistral_config_file_path) |
||
158 | self.config_file_paths = [ |
||
159 | self.st2_config_file_path, |
||
160 | self.mistral_config_file_path |
||
161 | ] |
||
162 | |||
163 | def run(self, encrypt=False, upload=False, existing_file=None): |
||
164 | temp_files = [] |
||
165 | |||
166 | try: |
||
167 | if existing_file: |
||
168 | working_file = existing_file |
||
169 | else: |
||
170 | # Create a new archive if an existing file hasn't been provided |
||
171 | working_file = self.create_archive() |
||
172 | if not encrypt and not upload: |
||
173 | LOG.info('Debug tarball successfully ' |
||
|
|||
174 | 'generated and can be reviewed at: %s' % working_file) |
||
175 | else: |
||
176 | temp_files.append(working_file) |
||
177 | |||
178 | if encrypt: |
||
179 | working_file = self.encrypt_archive(archive_file_path=working_file) |
||
180 | if not upload: |
||
181 | LOG.info('Encrypted debug tarball successfully generated at: %s' % |
||
182 | working_file) |
||
183 | else: |
||
184 | temp_files.append(working_file) |
||
185 | |||
186 | if upload: |
||
187 | self.upload_archive(archive_file_path=working_file) |
||
188 | tarball_name = os.path.basename(working_file) |
||
189 | LOG.info('Debug tarball successfully uploaded to %s (name=%s)' % |
||
190 | (self.company_name, tarball_name)) |
||
191 | LOG.info('When communicating with support, please let them know the ' |
||
192 | 'tarball name - %s' % tarball_name) |
||
193 | finally: |
||
194 | # Remove temp files |
||
195 | for temp_file in temp_files: |
||
196 | assert temp_file.startswith('/tmp') |
||
197 | remove_file(file_path=temp_file) |
||
198 | |||
199 | def create_archive(self): |
||
200 | """ |
||
201 | Create an archive with debugging information. |
||
202 | |||
203 | :return: Path to the generated archive. |
||
204 | :rtype: ``str`` |
||
205 | """ |
||
206 | |||
207 | try: |
||
208 | # 1. Create temporary directory with the final directory structure where we will move |
||
209 | # files which will be processed and included in the tarball |
||
210 | temp_dir_path = self.create_temp_directories() |
||
211 | |||
212 | # Prepend temp_dir_path to OUTPUT_PATHS |
||
213 | output_paths = {} |
||
214 | for key, path in OUTPUT_PATHS.iteritems(): |
||
215 | output_paths[key] = os.path.join(temp_dir_path, path) |
||
216 | |||
217 | # 2. Moves all the files to the temporary directory |
||
218 | LOG.info('Collecting files...') |
||
219 | if self.include_logs: |
||
220 | self.collect_logs(output_paths['logs']) |
||
221 | if self.include_configs: |
||
222 | self.collect_config_files(output_paths['configs']) |
||
223 | if self.include_content: |
||
224 | self.collect_pack_content(output_paths['content']) |
||
225 | if self.include_system_info: |
||
226 | self.add_system_information(output_paths['system_info']) |
||
227 | if self.user_info: |
||
228 | self.add_user_info(output_paths['user_info']) |
||
229 | if self.include_shell_commands: |
||
230 | self.add_shell_command_output(output_paths['commands']) |
||
231 | |||
232 | # 3. Create a tarball |
||
233 | return self.create_tarball(temp_dir_path) |
||
234 | |||
235 | except Exception as e: |
||
236 | LOG.exception('Failed to generate tarball', exc_info=True) |
||
237 | raise e |
||
238 | |||
239 | def encrypt_archive(self, archive_file_path): |
||
240 | """ |
||
241 | Encrypt archive with debugging information using our public key. |
||
242 | |||
243 | :param archive_file_path: Path to the non-encrypted tarball file. |
||
244 | :type archive_file_path: ``str`` |
||
245 | |||
246 | :return: Path to the encrypted archive. |
||
247 | :rtype: ``str`` |
||
248 | """ |
||
249 | try: |
||
250 | assert archive_file_path.endswith('.tar.gz') |
||
251 | |||
252 | LOG.info('Encrypting tarball...') |
||
253 | gpg = gnupg.GPG(verbose=self.debug) |
||
254 | |||
255 | # Import our public key |
||
256 | import_result = gpg.import_keys(self.gpg_key) |
||
257 | # pylint: disable=no-member |
||
258 | assert import_result.count == 1 |
||
259 | |||
260 | encrypted_archive_output_file_name = os.path.basename(archive_file_path) + '.asc' |
||
261 | encrypted_archive_output_file_path = os.path.join('/tmp', |
||
262 | encrypted_archive_output_file_name) |
||
263 | with open(archive_file_path, 'rb') as fp: |
||
264 | gpg.encrypt_file(file=fp, |
||
265 | recipients=self.gpg_key_fingerprint, |
||
266 | always_trust=True, |
||
267 | output=encrypted_archive_output_file_path) |
||
268 | return encrypted_archive_output_file_path |
||
269 | except Exception as e: |
||
270 | LOG.exception('Failed to encrypt archive', exc_info=True) |
||
271 | raise e |
||
272 | |||
273 | def upload_archive(self, archive_file_path): |
||
274 | try: |
||
275 | assert archive_file_path.endswith('.asc') |
||
276 | |||
277 | LOG.debug('Uploading tarball...') |
||
278 | file_name = os.path.basename(archive_file_path) |
||
279 | url = self.s3_bucket_url + file_name |
||
280 | assert url.startswith('https://') |
||
281 | |||
282 | with open(archive_file_path, 'rb') as fp: |
||
283 | response = requests.put(url=url, files={'file': fp}) |
||
284 | assert response.status_code == httplib.OK |
||
285 | except Exception as e: |
||
286 | LOG.exception('Failed to upload tarball to %s' % self.company_name, exc_info=True) |
||
287 | raise e |
||
288 | |||
289 | def collect_logs(self, output_path): |
||
290 | LOG.debug('Including log files') |
||
291 | for file_path_glob in self.log_files_paths: |
||
292 | log_file_list = get_full_file_list(file_path_glob=file_path_glob) |
||
293 | copy_files(file_paths=log_file_list, destination=output_path) |
||
294 | |||
295 | def collect_config_files(self, output_path): |
||
296 | LOG.debug('Including config files') |
||
297 | copy_files(file_paths=self.config_file_paths, destination=output_path) |
||
298 | |||
299 | st2_config_path = os.path.join(output_path, self.st2_config_file_name) |
||
300 | process_st2_config(config_path=st2_config_path) |
||
301 | |||
302 | mistral_config_path = os.path.join(output_path, self.mistral_config_file_name) |
||
303 | process_mistral_config(config_path=mistral_config_path) |
||
304 | |||
305 | @staticmethod |
||
306 | def collect_pack_content(output_path): |
||
307 | LOG.debug('Including content') |
||
308 | |||
309 | packs_base_paths = get_packs_base_paths() |
||
310 | for index, packs_base_path in enumerate(packs_base_paths, 1): |
||
311 | dst = os.path.join(output_path, 'dir-%s' % index) |
||
312 | |||
313 | try: |
||
314 | shutil.copytree(src=packs_base_path, dst=dst) |
||
315 | except IOError: |
||
316 | continue |
||
317 | |||
318 | base_pack_dirs = get_dirs_in_path(file_path=output_path) |
||
319 | |||
320 | for base_pack_dir in base_pack_dirs: |
||
321 | pack_dirs = get_dirs_in_path(file_path=base_pack_dir) |
||
322 | |||
323 | for pack_dir in pack_dirs: |
||
324 | process_content_pack_dir(pack_dir=pack_dir) |
||
325 | |||
326 | def add_system_information(self, output_path): |
||
327 | LOG.debug('Including system info') |
||
328 | |||
329 | system_information = yaml.dump(self.get_system_information(), |
||
330 | default_flow_style=False) |
||
331 | |||
332 | with open(output_path, 'w') as fp: |
||
333 | fp.write(system_information) |
||
334 | |||
335 | def add_user_info(self, output_path): |
||
336 | LOG.debug('Including user info') |
||
337 | user_info = yaml.dump(self.user_info, default_flow_style=False) |
||
338 | |||
339 | with open(output_path, 'w') as fp: |
||
340 | fp.write(user_info) |
||
341 | |||
342 | def add_shell_command_output(self, output_path): |
||
343 | """" |
||
344 | Get output of the required shell command and redirect the output to a file. |
||
345 | :param output_path: Directory where output files will be written |
||
346 | """ |
||
347 | LOG.debug('Including the required shell commands output files') |
||
348 | for cmd in self.shell_commands: |
||
349 | output_file = os.path.join(output_path, '%s.txt' % self.format_output_filename(cmd)) |
||
350 | exit_code, stdout, stderr = run_command(cmd=cmd, shell=True) |
||
351 | with open(output_file, 'w') as fp: |
||
352 | fp.write('[BEGIN STDOUT]\n') |
||
353 | fp.write(stdout) |
||
354 | fp.write('[END STDOUT]\n') |
||
355 | fp.write('[BEGIN STDERR]\n') |
||
356 | fp.write(stderr) |
||
357 | fp.write('[END STDERR]') |
||
358 | |||
359 | def create_tarball(self, temp_dir_path): |
||
360 | LOG.info('Creating tarball...') |
||
361 | if self.output_path: |
||
362 | output_file_path = self.output_path |
||
363 | output_file_name = os.path.basename(output_file_path) |
||
364 | else: |
||
365 | date = date_utils.get_datetime_utc_now().strftime(DATE_FORMAT) |
||
366 | values = {'hostname': socket.gethostname(), 'date': date} |
||
367 | |||
368 | output_file_name = OUTPUT_FILENAME_TEMPLATE % values |
||
369 | output_file_path = os.path.join('/tmp', output_file_name) |
||
370 | |||
371 | with tarfile.open(output_file_path, 'w:gz') as tar: |
||
372 | tar.add(temp_dir_path, arcname=output_file_name.split(".")[0]) |
||
373 | |||
374 | return output_file_path |
||
375 | |||
376 | @staticmethod |
||
377 | def create_temp_directories(): |
||
378 | temp_dir_path = tempfile.mkdtemp() |
||
379 | |||
380 | for directory_name in DIRECTORY_STRUCTURE: |
||
381 | full_path = os.path.join(temp_dir_path, directory_name) |
||
382 | os.mkdir(full_path) |
||
383 | |||
384 | return temp_dir_path |
||
385 | |||
386 | @staticmethod |
||
387 | def format_output_filename(cmd): |
||
388 | """" |
||
389 | Format the file name such as removing white spaces and special characters. |
||
390 | :param cmd: shell command |
||
391 | :return: formatted output file name |
||
392 | :rtype: ``str`` |
||
393 | """ |
||
394 | for char in cmd: |
||
395 | if char in ' !@#$%^&*()[]{};:,./<>?\|`~=+"': |
||
396 | cmd = cmd.replace(char, "") |
||
397 | return cmd |
||
398 | |||
399 | @staticmethod |
||
400 | def get_system_information(): |
||
401 | """ |
||
402 | Retrieve system information which is included in the report. |
||
403 | |||
404 | :rtype: ``dict`` |
||
405 | """ |
||
406 | system_information = { |
||
407 | 'hostname': socket.gethostname(), |
||
408 | 'operating_system': {}, |
||
409 | 'hardware': { |
||
410 | 'cpu': {}, |
||
411 | 'memory': {} |
||
412 | }, |
||
413 | 'python': {}, |
||
414 | 'stackstorm': {}, |
||
415 | 'mistral': {} |
||
416 | } |
||
417 | |||
418 | # Operating system information |
||
419 | system_information['operating_system']['system'] = platform.system() |
||
420 | system_information['operating_system']['release'] = platform.release() |
||
421 | system_information['operating_system']['operating_system'] = platform.platform() |
||
422 | system_information['operating_system']['platform'] = platform.system() |
||
423 | system_information['operating_system']['architecture'] = ' '.join(platform.architecture()) |
||
424 | |||
425 | if platform.system().lower() == 'linux': |
||
426 | distribution = ' '.join(platform.linux_distribution()) |
||
427 | system_information['operating_system']['distribution'] = distribution |
||
428 | |||
429 | system_information['python']['version'] = sys.version.split('\n')[0] |
||
430 | |||
431 | # Hardware information |
||
432 | cpu_info = get_cpu_info() |
||
433 | |||
434 | if cpu_info: |
||
435 | core_count = len(cpu_info) |
||
436 | model = cpu_info[0]['model_name'] |
||
437 | system_information['hardware']['cpu'] = { |
||
438 | 'core_count': core_count, |
||
439 | 'model_name': model |
||
440 | } |
||
441 | else: |
||
442 | # Unsupported platform |
||
443 | system_information['hardware']['cpu'] = 'unsupported platform' |
||
444 | |||
445 | memory_info = get_memory_info() |
||
446 | |||
447 | if memory_info: |
||
448 | total = memory_info['MemTotal'] / 1024 |
||
449 | free = memory_info['MemFree'] / 1024 |
||
450 | used = (total - free) |
||
451 | system_information['hardware']['memory'] = { |
||
452 | 'total': total, |
||
453 | 'used': used, |
||
454 | 'free': free |
||
455 | } |
||
456 | else: |
||
457 | # Unsupported platform |
||
458 | system_information['hardware']['memory'] = 'unsupported platform' |
||
459 | |||
460 | # StackStorm information |
||
461 | system_information['stackstorm']['version'] = st2_version |
||
462 | |||
463 | st2common_path = st2common.__file__ |
||
464 | st2common_path = os.path.dirname(st2common_path) |
||
465 | |||
466 | if 'st2common/st2common' in st2common_path: |
||
467 | # Assume we are running source install |
||
468 | base_install_path = st2common_path.replace('/st2common/st2common', '') |
||
469 | |||
470 | revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path) |
||
471 | |||
472 | system_information['stackstorm']['installation_method'] = 'source' |
||
473 | system_information['stackstorm']['revision_hash'] = revision_hash |
||
474 | else: |
||
475 | package_list = get_package_list(name_startswith='st2') |
||
476 | |||
477 | system_information['stackstorm']['installation_method'] = 'package' |
||
478 | system_information['stackstorm']['packages'] = package_list |
||
479 | |||
480 | # Mistral information |
||
481 | repo_path = '/opt/openstack/mistral' |
||
482 | revision_hash = get_repo_latest_revision_hash(repo_path=repo_path) |
||
483 | system_information['mistral']['installation_method'] = 'source' |
||
484 | system_information['mistral']['revision_hash'] = revision_hash |
||
485 | |||
486 | return system_information |
||
487 | |||
584 |