Total Complexity | 56 |
Total Lines | 439 |
Duplicated Lines | 0 % |
Complex classes like st2debug.cmd.DebugInfoCollector often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more |
||
132 | class DebugInfoCollector(object): |
||
133 | def __init__(self, include_logs, include_configs, include_content, include_system_info, |
||
134 | include_shell_commands=False, user_info=None, debug=False, config_file=None, |
||
135 | output_path=None): |
||
136 | """ |
||
137 | Initialize a DebugInfoCollector object. |
||
138 | |||
139 | :param include_logs: Include log files in generated archive. |
||
140 | :type include_logs: ``bool`` |
||
141 | :param include_configs: Include config files in generated archive. |
||
142 | :type include_configs: ``bool`` |
||
143 | :param include_content: Include pack contents in generated archive. |
||
144 | :type include_content: ``bool`` |
||
145 | :param include_system_info: Include system information in generated archive. |
||
146 | :type include_system_info: ``bool`` |
||
147 | :param include_shell_commands: Include shell command output in generated archive. |
||
148 | :type include_shell_commands: ``bool`` |
||
149 | :param user_info: User info to be included in generated archive. |
||
150 | :type user_info: ``dict`` |
||
151 | :param debug: Enable debug logging. |
||
152 | :type debug: ``bool`` |
||
153 | :param config_file: Values from config file to override defaults. |
||
154 | :type config_file: ``dict`` |
||
155 | :param output_path: Path to write output file to. (optional) |
||
156 | :type output_path: ``str`` |
||
157 | """ |
||
158 | self.include_logs = include_logs |
||
159 | self.include_configs = include_configs |
||
160 | self.include_content = include_content |
||
161 | self.include_system_info = include_system_info |
||
162 | self.include_shell_commands = include_shell_commands |
||
163 | self.user_info = user_info |
||
164 | self.debug = debug |
||
165 | self.output_path = output_path |
||
166 | |||
167 | config_file = config_file or {} |
||
168 | self.st2_config_file_path = config_file.get('st2_config_file_path', ST2_CONFIG_FILE_PATH) |
||
169 | self.mistral_config_file_path = config_file.get('mistral_config_file_path', |
||
170 | MISTRAL_CONFIG_FILE_PATH) |
||
171 | self.log_files_paths = config_file.get('log_files_paths', LOG_FILE_PATHS[:]) |
||
172 | self.gpg_key = config_file.get('gpg_key', GPG_KEY) |
||
173 | self.gpg_key_fingerprint = config_file.get('gpg_key_fingerprint', GPG_KEY_FINGERPRINT) |
||
174 | self.s3_bucket_url = config_file.get('s3_bucket_url', S3_BUCKET_URL) |
||
175 | self.company_name = config_file.get('company_name', COMPANY_NAME) |
||
176 | self.shell_commands = config_file.get('shell_commands', SHELL_COMMANDS) |
||
177 | |||
178 | self.st2_config_file_name = os.path.basename(self.st2_config_file_path) |
||
179 | self.mistral_config_file_name = os.path.basename(self.mistral_config_file_path) |
||
180 | self.config_file_paths = [ |
||
181 | self.st2_config_file_path, |
||
182 | self.mistral_config_file_path |
||
183 | ] |
||
184 | |||
185 | def run(self, encrypt=False, upload=False, existing_file=None): |
||
186 | """ |
||
187 | Run the specified steps. |
||
188 | |||
189 | :param encrypt: If true, encrypt the archive file. |
||
190 | :param encrypt: ``bool`` |
||
191 | :param upload: If true, upload the resulting file. |
||
192 | :param upload: ``bool`` |
||
193 | :param existing_file: Path to an existing archive file. If not specified a new |
||
194 | archive will be created. |
||
195 | :param existing_file: ``str`` |
||
196 | """ |
||
197 | temp_files = [] |
||
198 | |||
199 | try: |
||
200 | if existing_file: |
||
201 | working_file = existing_file |
||
202 | else: |
||
203 | # Create a new archive if an existing file hasn't been provided |
||
204 | working_file = self.create_archive() |
||
205 | if not encrypt and not upload: |
||
206 | LOG.info('Debug tarball successfully ' |
||
|
|||
207 | 'generated and can be reviewed at: %s' % working_file) |
||
208 | else: |
||
209 | temp_files.append(working_file) |
||
210 | |||
211 | if encrypt: |
||
212 | working_file = self.encrypt_archive(archive_file_path=working_file) |
||
213 | if not upload: |
||
214 | LOG.info('Encrypted debug tarball successfully generated at: %s' % |
||
215 | working_file) |
||
216 | else: |
||
217 | temp_files.append(working_file) |
||
218 | |||
219 | if upload: |
||
220 | self.upload_archive(archive_file_path=working_file) |
||
221 | tarball_name = os.path.basename(working_file) |
||
222 | LOG.info('Debug tarball successfully uploaded to %s (name=%s)' % |
||
223 | (self.company_name, tarball_name)) |
||
224 | LOG.info('When communicating with support, please let them know the ' |
||
225 | 'tarball name - %s' % tarball_name) |
||
226 | finally: |
||
227 | # Remove temp files |
||
228 | for temp_file in temp_files: |
||
229 | assert temp_file.startswith('/tmp') |
||
230 | remove_file(file_path=temp_file) |
||
231 | |||
232 | def create_archive(self): |
||
233 | """ |
||
234 | Create an archive with debugging information. |
||
235 | |||
236 | :return: Path to the generated archive. |
||
237 | :rtype: ``str`` |
||
238 | """ |
||
239 | |||
240 | try: |
||
241 | # 1. Create temporary directory with the final directory structure where we will move |
||
242 | # files which will be processed and included in the tarball |
||
243 | temp_dir_path = self.create_temp_directories() |
||
244 | |||
245 | # Prepend temp_dir_path to OUTPUT_PATHS |
||
246 | output_paths = {} |
||
247 | for key, path in OUTPUT_PATHS.iteritems(): |
||
248 | output_paths[key] = os.path.join(temp_dir_path, path) |
||
249 | |||
250 | # 2. Moves all the files to the temporary directory |
||
251 | LOG.info('Collecting files...') |
||
252 | if self.include_logs: |
||
253 | self.collect_logs(output_paths['logs']) |
||
254 | if self.include_configs: |
||
255 | self.collect_config_files(output_paths['configs']) |
||
256 | if self.include_content: |
||
257 | self.collect_pack_content(output_paths['content']) |
||
258 | if self.include_system_info: |
||
259 | self.add_system_information(output_paths['system_info']) |
||
260 | if self.user_info: |
||
261 | self.add_user_info(output_paths['user_info']) |
||
262 | if self.include_shell_commands: |
||
263 | self.add_shell_command_output(output_paths['commands']) |
||
264 | |||
265 | # 3. Create a tarball |
||
266 | return self.create_tarball(temp_dir_path) |
||
267 | |||
268 | except Exception as e: |
||
269 | LOG.exception('Failed to generate tarball', exc_info=True) |
||
270 | raise e |
||
271 | |||
272 | def encrypt_archive(self, archive_file_path): |
||
273 | """ |
||
274 | Encrypt archive with debugging information using our public key. |
||
275 | |||
276 | :param archive_file_path: Path to the non-encrypted tarball file. |
||
277 | :type archive_file_path: ``str`` |
||
278 | |||
279 | :return: Path to the encrypted archive. |
||
280 | :rtype: ``str`` |
||
281 | """ |
||
282 | try: |
||
283 | assert archive_file_path.endswith('.tar.gz') |
||
284 | |||
285 | LOG.info('Encrypting tarball...') |
||
286 | gpg = gnupg.GPG(verbose=self.debug) |
||
287 | |||
288 | # Import our public key |
||
289 | import_result = gpg.import_keys(self.gpg_key) |
||
290 | # pylint: disable=no-member |
||
291 | assert import_result.count == 1 |
||
292 | |||
293 | encrypted_archive_output_file_name = os.path.basename(archive_file_path) + '.asc' |
||
294 | encrypted_archive_output_file_path = os.path.join('/tmp', |
||
295 | encrypted_archive_output_file_name) |
||
296 | with open(archive_file_path, 'rb') as fp: |
||
297 | gpg.encrypt_file(file=fp, |
||
298 | recipients=self.gpg_key_fingerprint, |
||
299 | always_trust=True, |
||
300 | output=encrypted_archive_output_file_path) |
||
301 | return encrypted_archive_output_file_path |
||
302 | except Exception as e: |
||
303 | LOG.exception('Failed to encrypt archive', exc_info=True) |
||
304 | raise e |
||
305 | |||
306 | def upload_archive(self, archive_file_path): |
||
307 | """ |
||
308 | Upload the encrypted archive. |
||
309 | |||
310 | :param archive_file_path: Path to the encrypted tarball file. |
||
311 | :type archive_file_path: ``str`` |
||
312 | """ |
||
313 | try: |
||
314 | assert archive_file_path.endswith('.asc') |
||
315 | |||
316 | LOG.debug('Uploading tarball...') |
||
317 | file_name = os.path.basename(archive_file_path) |
||
318 | url = self.s3_bucket_url + file_name |
||
319 | assert url.startswith('https://') |
||
320 | |||
321 | with open(archive_file_path, 'rb') as fp: |
||
322 | response = requests.put(url=url, files={'file': fp}) |
||
323 | assert response.status_code == httplib.OK |
||
324 | except Exception as e: |
||
325 | LOG.exception('Failed to upload tarball to %s' % self.company_name, exc_info=True) |
||
326 | raise e |
||
327 | |||
328 | def collect_logs(self, output_path): |
||
329 | """ |
||
330 | Copy log files to the output path. |
||
331 | |||
332 | :param output_path: Path where log files will be copied to. |
||
333 | :type output_path: ``str`` |
||
334 | """ |
||
335 | LOG.debug('Including log files') |
||
336 | for file_path_glob in self.log_files_paths: |
||
337 | log_file_list = get_full_file_list(file_path_glob=file_path_glob) |
||
338 | copy_files(file_paths=log_file_list, destination=output_path) |
||
339 | |||
340 | def collect_config_files(self, output_path): |
||
341 | """ |
||
342 | Copy config files to the output path. |
||
343 | |||
344 | :param output_path: Path where config files will be copied to. |
||
345 | :type output_path: ``str`` |
||
346 | """ |
||
347 | LOG.debug('Including config files') |
||
348 | copy_files(file_paths=self.config_file_paths, destination=output_path) |
||
349 | |||
350 | st2_config_path = os.path.join(output_path, self.st2_config_file_name) |
||
351 | process_st2_config(config_path=st2_config_path) |
||
352 | |||
353 | mistral_config_path = os.path.join(output_path, self.mistral_config_file_name) |
||
354 | process_mistral_config(config_path=mistral_config_path) |
||
355 | |||
356 | @staticmethod |
||
357 | def collect_pack_content(output_path): |
||
358 | """ |
||
359 | Copy pack contents to the output path. |
||
360 | |||
361 | :param output_path: Path where pack contents will be copied to. |
||
362 | :type output_path: ``str`` |
||
363 | """ |
||
364 | LOG.debug('Including content') |
||
365 | |||
366 | packs_base_paths = get_packs_base_paths() |
||
367 | for index, packs_base_path in enumerate(packs_base_paths, 1): |
||
368 | dst = os.path.join(output_path, 'dir-%s' % index) |
||
369 | |||
370 | try: |
||
371 | shutil.copytree(src=packs_base_path, dst=dst) |
||
372 | except IOError: |
||
373 | continue |
||
374 | |||
375 | base_pack_dirs = get_dirs_in_path(file_path=output_path) |
||
376 | |||
377 | for base_pack_dir in base_pack_dirs: |
||
378 | pack_dirs = get_dirs_in_path(file_path=base_pack_dir) |
||
379 | |||
380 | for pack_dir in pack_dirs: |
||
381 | process_content_pack_dir(pack_dir=pack_dir) |
||
382 | |||
383 | def add_system_information(self, output_path): |
||
384 | """ |
||
385 | Collect and write system information to output path. |
||
386 | |||
387 | :param output_path: Path where system information will be written to. |
||
388 | :type output_path: ``str`` |
||
389 | """ |
||
390 | LOG.debug('Including system info') |
||
391 | |||
392 | system_information = yaml.dump(self.get_system_information(), |
||
393 | default_flow_style=False) |
||
394 | |||
395 | with open(output_path, 'w') as fp: |
||
396 | fp.write(system_information) |
||
397 | |||
398 | def add_user_info(self, output_path): |
||
399 | LOG.debug('Including user info') |
||
400 | user_info = yaml.dump(self.user_info, default_flow_style=False) |
||
401 | |||
402 | with open(output_path, 'w') as fp: |
||
403 | fp.write(user_info) |
||
404 | |||
405 | def add_shell_command_output(self, output_path): |
||
406 | """" |
||
407 | Get output of the required shell command and redirect the output to output path. |
||
408 | |||
409 | :param output_path: Directory where output files will be written |
||
410 | :param output_path: ``str`` |
||
411 | """ |
||
412 | LOG.debug('Including the required shell commands output files') |
||
413 | for cmd in self.shell_commands: |
||
414 | output_file = os.path.join(output_path, '%s.txt' % self.format_output_filename(cmd)) |
||
415 | exit_code, stdout, stderr = run_command(cmd=cmd, shell=True) |
||
416 | with open(output_file, 'w') as fp: |
||
417 | fp.write('[BEGIN STDOUT]\n') |
||
418 | fp.write(stdout) |
||
419 | fp.write('[END STDOUT]\n') |
||
420 | fp.write('[BEGIN STDERR]\n') |
||
421 | fp.write(stderr) |
||
422 | fp.write('[END STDERR]') |
||
423 | |||
424 | def create_tarball(self, temp_dir_path): |
||
425 | """ |
||
426 | Create tarball with the contents of temp_dir_path. |
||
427 | |||
428 | Tarball will be written to self.output_path, if set. Otherwise it will |
||
429 | be written to /tmp a name generated according to OUTPUT_FILENAME_TEMPLATE. |
||
430 | |||
431 | :param temp_dir_path: Base directory to include in tarbal. |
||
432 | :type temp_dir_path: ``str`` |
||
433 | |||
434 | :return: Path to the created tarball. |
||
435 | :rtype: ``str`` |
||
436 | """ |
||
437 | LOG.info('Creating tarball...') |
||
438 | if self.output_path: |
||
439 | output_file_path = self.output_path |
||
440 | else: |
||
441 | date = date_utils.get_datetime_utc_now().strftime(DATE_FORMAT) |
||
442 | values = {'hostname': socket.gethostname(), 'date': date} |
||
443 | |||
444 | output_file_name = OUTPUT_FILENAME_TEMPLATE % values |
||
445 | output_file_path = os.path.join('/tmp', output_file_name) |
||
446 | |||
447 | with tarfile.open(output_file_path, 'w:gz') as tar: |
||
448 | tar.add(temp_dir_path, arcname='') |
||
449 | |||
450 | return output_file_path |
||
451 | |||
452 | @staticmethod |
||
453 | def create_temp_directories(): |
||
454 | """ |
||
455 | Creates a new temp directory and creates the directory structure as defined |
||
456 | by DIRECTORY_STRUCTURE. |
||
457 | |||
458 | :return: Path to temp directory. |
||
459 | :rtype: ``str`` |
||
460 | """ |
||
461 | temp_dir_path = tempfile.mkdtemp() |
||
462 | |||
463 | for directory_name in DIRECTORY_STRUCTURE: |
||
464 | full_path = os.path.join(temp_dir_path, directory_name) |
||
465 | os.mkdir(full_path) |
||
466 | |||
467 | return temp_dir_path |
||
468 | |||
469 | @staticmethod |
||
470 | def format_output_filename(cmd): |
||
471 | """" |
||
472 | Remove whitespace and special characters from a shell command. |
||
473 | |||
474 | Used to create filename-safe representations of a shell command. |
||
475 | |||
476 | :param cmd: Shell command. |
||
477 | :type cmd: ``str`` |
||
478 | :return: Formatted filename. |
||
479 | :rtype: ``str`` |
||
480 | """ |
||
481 | return cmd.translate(None, """ !@#$%^&*()[]{};:,./<>?\|`~=+"'""") |
||
482 | |||
483 | @staticmethod |
||
484 | def get_system_information(): |
||
485 | """ |
||
486 | Retrieve system information which is included in the report. |
||
487 | |||
488 | :rtype: ``dict`` |
||
489 | """ |
||
490 | system_information = { |
||
491 | 'hostname': socket.gethostname(), |
||
492 | 'operating_system': {}, |
||
493 | 'hardware': { |
||
494 | 'cpu': {}, |
||
495 | 'memory': {} |
||
496 | }, |
||
497 | 'python': {}, |
||
498 | 'stackstorm': {}, |
||
499 | 'mistral': {} |
||
500 | } |
||
501 | |||
502 | # Operating system information |
||
503 | system_information['operating_system']['system'] = platform.system() |
||
504 | system_information['operating_system']['release'] = platform.release() |
||
505 | system_information['operating_system']['operating_system'] = platform.platform() |
||
506 | system_information['operating_system']['platform'] = platform.system() |
||
507 | system_information['operating_system']['architecture'] = ' '.join(platform.architecture()) |
||
508 | |||
509 | if platform.system().lower() == 'linux': |
||
510 | distribution = ' '.join(platform.linux_distribution()) |
||
511 | system_information['operating_system']['distribution'] = distribution |
||
512 | |||
513 | system_information['python']['version'] = sys.version.split('\n')[0] |
||
514 | |||
515 | # Hardware information |
||
516 | cpu_info = get_cpu_info() |
||
517 | |||
518 | if cpu_info: |
||
519 | core_count = len(cpu_info) |
||
520 | model = cpu_info[0]['model_name'] |
||
521 | system_information['hardware']['cpu'] = { |
||
522 | 'core_count': core_count, |
||
523 | 'model_name': model |
||
524 | } |
||
525 | else: |
||
526 | # Unsupported platform |
||
527 | system_information['hardware']['cpu'] = 'unsupported platform' |
||
528 | |||
529 | memory_info = get_memory_info() |
||
530 | |||
531 | if memory_info: |
||
532 | total = memory_info['MemTotal'] / 1024 |
||
533 | free = memory_info['MemFree'] / 1024 |
||
534 | used = (total - free) |
||
535 | system_information['hardware']['memory'] = { |
||
536 | 'total': total, |
||
537 | 'used': used, |
||
538 | 'free': free |
||
539 | } |
||
540 | else: |
||
541 | # Unsupported platform |
||
542 | system_information['hardware']['memory'] = 'unsupported platform' |
||
543 | |||
544 | # StackStorm information |
||
545 | system_information['stackstorm']['version'] = st2_version |
||
546 | |||
547 | st2common_path = st2common.__file__ |
||
548 | st2common_path = os.path.dirname(st2common_path) |
||
549 | |||
550 | if 'st2common/st2common' in st2common_path: |
||
551 | # Assume we are running source install |
||
552 | base_install_path = st2common_path.replace('/st2common/st2common', '') |
||
553 | |||
554 | revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path) |
||
555 | |||
556 | system_information['stackstorm']['installation_method'] = 'source' |
||
557 | system_information['stackstorm']['revision_hash'] = revision_hash |
||
558 | else: |
||
559 | package_list = get_package_list(name_startswith='st2') |
||
560 | |||
561 | system_information['stackstorm']['installation_method'] = 'package' |
||
562 | system_information['stackstorm']['packages'] = package_list |
||
563 | |||
564 | # Mistral information |
||
565 | repo_path = '/opt/openstack/mistral' |
||
566 | revision_hash = get_repo_latest_revision_hash(repo_path=repo_path) |
||
567 | system_information['mistral']['installation_method'] = 'source' |
||
568 | system_information['mistral']['revision_hash'] = revision_hash |
||
569 | |||
570 | return system_information |
||
571 | |||
668 |