1 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more |
||
2 | # contributor license agreements. See the NOTICE file distributed with |
||
3 | # this work for additional information regarding copyright ownership. |
||
4 | # The ASF licenses this file to You under the Apache License, Version 2.0 |
||
5 | # (the "License"); you may not use this file except in compliance with |
||
6 | # the License. You may obtain a copy of the License at |
||
7 | # |
||
8 | # http://www.apache.org/licenses/LICENSE-2.0 |
||
9 | # |
||
10 | # Unless required by applicable law or agreed to in writing, software |
||
11 | # distributed under the License is distributed on an "AS IS" BASIS, |
||
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||
13 | # See the License for the specific language governing permissions and |
||
14 | # limitations under the License. |
||
15 | |||
16 | """ |
||
17 | This script submits information which helps StackStorm employees debug different |
||
18 | user problems and issues to StackStorm. |
||
19 | |||
20 | By default the following information is included: |
||
21 | |||
22 | - Logs from /var/log/st2 |
||
23 | - StackStorm and mistral config file (/etc/st2/st2.conf, /etc/mistral/mistral.conf) |
||
24 | - All the content (integration packs). |
||
25 | - Information about your system and StackStorm installation (Operating system, |
||
26 | Python version, StackStorm version, Mistral version) |
||
27 | |||
28 | Note: This script currently assumes it's running on Linux. |
||
29 | """ |
||
30 | |||
31 | import os |
||
32 | import sys |
||
33 | import shutil |
||
34 | import socket |
||
35 | import logging |
||
36 | import tarfile |
||
37 | import argparse |
||
38 | import platform |
||
39 | import tempfile |
||
40 | |||
41 | import six |
||
42 | import yaml |
||
43 | import gnupg |
||
44 | import requests |
||
45 | from distutils.spawn import find_executable |
||
46 | |||
47 | import st2common |
||
48 | from st2common.content.utils import get_packs_base_paths |
||
49 | from st2common import __version__ as st2_version |
||
50 | from st2common import config |
||
51 | from st2common.util import date as date_utils |
||
52 | from st2common.util.shell import run_command |
||
53 | from st2debug.constants import GPG_KEY |
||
54 | from st2debug.constants import GPG_KEY_FINGERPRINT |
||
55 | from st2debug.constants import S3_BUCKET_URL |
||
56 | from st2debug.constants import COMPANY_NAME |
||
57 | from st2debug.constants import ARG_NAMES |
||
58 | from st2debug.utils.fs import copy_files |
||
59 | from st2debug.utils.fs import get_full_file_list |
||
60 | from st2debug.utils.fs import get_dirs_in_path |
||
61 | from st2debug.utils.fs import remove_file |
||
62 | from st2debug.utils.fs import remove_dir |
||
63 | from st2debug.utils.system_info import get_cpu_info |
||
64 | from st2debug.utils.system_info import get_memory_info |
||
65 | from st2debug.utils.system_info import get_package_list |
||
66 | from st2debug.utils.git_utils import get_repo_latest_revision_hash |
||
67 | from st2debug.processors import process_st2_config |
||
68 | from st2debug.processors import process_mistral_config |
||
69 | from st2debug.processors import process_content_pack_dir |
||
70 | |||
71 | LOG = logging.getLogger(__name__) |
||
72 | |||
73 | # Constants |
||
74 | GPG_INSTALLED = find_executable('gpg') is not None |
||
75 | |||
76 | LOG_FILE_PATHS = [ |
||
77 | '/var/log/st2/*.log', |
||
78 | '/var/log/mistral*.log' |
||
79 | ] |
||
80 | |||
81 | ST2_CONFIG_FILE_PATH = '/etc/st2/st2.conf' |
||
82 | MISTRAL_CONFIG_FILE_PATH = '/etc/mistral/mistral.conf' |
||
83 | |||
84 | SHELL_COMMANDS = [] |
||
85 | |||
86 | # Directory structure inside tarball |
||
87 | DIRECTORY_STRUCTURE = [ |
||
88 | 'configs/', |
||
89 | 'logs/', |
||
90 | 'content/', |
||
91 | 'commands/' |
||
92 | ] |
||
93 | |||
94 | OUTPUT_PATHS = { |
||
95 | 'logs': 'logs/', |
||
96 | 'configs': 'configs/', |
||
97 | 'content': 'content/', |
||
98 | 'commands': 'commands/', |
||
99 | 'system_info': 'system_info.yaml', |
||
100 | 'user_info': 'user_info.yaml' |
||
101 | } |
||
102 | |||
103 | # Options which should be removed from the st2 config |
||
104 | ST2_CONF_OPTIONS_TO_REMOVE = { |
||
105 | 'database': ['username', 'password'], |
||
106 | 'messaging': ['url'] |
||
107 | } |
||
108 | |||
109 | REMOVE_VALUE_NAME = '**removed**' |
||
110 | |||
111 | OUTPUT_FILENAME_TEMPLATE = 'st2-debug-output-%(hostname)s-%(date)s.tar.gz' |
||
112 | |||
113 | DATE_FORMAT = '%Y-%m-%d-%H%M%S' |
||
114 | |||
115 | try: |
||
116 | config.parse_args(args=[]) |
||
117 | except Exception: |
||
118 | pass |
||
119 | |||
120 | |||
121 | def setup_logging(): |
||
122 | root = LOG |
||
123 | root.setLevel(logging.INFO) |
||
124 | |||
125 | ch = logging.StreamHandler(sys.stdout) |
||
126 | ch.setLevel(logging.DEBUG) |
||
127 | formatter = logging.Formatter('%(asctime)s %(levelname)s - %(message)s') |
||
128 | ch.setFormatter(formatter) |
||
129 | root.addHandler(ch) |
||
130 | |||
131 | |||
132 | class DebugInfoCollector(object): |
||
133 | def __init__(self, include_logs, include_configs, include_content, include_system_info, |
||
134 | include_shell_commands=False, user_info=None, debug=False, config_file=None, |
||
135 | output_path=None): |
||
136 | """ |
||
137 | Initialize a DebugInfoCollector object. |
||
138 | |||
139 | :param include_logs: Include log files in generated archive. |
||
140 | :type include_logs: ``bool`` |
||
141 | :param include_configs: Include config files in generated archive. |
||
142 | :type include_configs: ``bool`` |
||
143 | :param include_content: Include pack contents in generated archive. |
||
144 | :type include_content: ``bool`` |
||
145 | :param include_system_info: Include system information in generated archive. |
||
146 | :type include_system_info: ``bool`` |
||
147 | :param include_shell_commands: Include shell command output in generated archive. |
||
148 | :type include_shell_commands: ``bool`` |
||
149 | :param user_info: User info to be included in generated archive. |
||
150 | :type user_info: ``dict`` |
||
151 | :param debug: Enable debug logging. |
||
152 | :type debug: ``bool`` |
||
153 | :param config_file: Values from config file to override defaults. |
||
154 | :type config_file: ``dict`` |
||
155 | :param output_path: Path to write output file to. (optional) |
||
156 | :type output_path: ``str`` |
||
157 | """ |
||
158 | self.include_logs = include_logs |
||
159 | self.include_configs = include_configs |
||
160 | self.include_content = include_content |
||
161 | self.include_system_info = include_system_info |
||
162 | self.include_shell_commands = include_shell_commands |
||
163 | self.user_info = user_info |
||
164 | self.debug = debug |
||
165 | self.output_path = output_path |
||
166 | |||
167 | config_file = config_file or {} |
||
168 | self.st2_config_file_path = config_file.get('st2_config_file_path', ST2_CONFIG_FILE_PATH) |
||
169 | self.mistral_config_file_path = config_file.get('mistral_config_file_path', |
||
170 | MISTRAL_CONFIG_FILE_PATH) |
||
171 | self.log_file_paths = config_file.get('log_file_paths', LOG_FILE_PATHS[:]) |
||
172 | self.gpg_key = config_file.get('gpg_key', GPG_KEY) |
||
173 | self.gpg_key_fingerprint = config_file.get('gpg_key_fingerprint', GPG_KEY_FINGERPRINT) |
||
174 | self.s3_bucket_url = config_file.get('s3_bucket_url', S3_BUCKET_URL) |
||
175 | self.company_name = config_file.get('company_name', COMPANY_NAME) |
||
176 | self.shell_commands = config_file.get('shell_commands', SHELL_COMMANDS) |
||
177 | |||
178 | self.st2_config_file_name = os.path.basename(self.st2_config_file_path) |
||
179 | self.mistral_config_file_name = os.path.basename(self.mistral_config_file_path) |
||
180 | self.config_file_paths = [ |
||
181 | self.st2_config_file_path, |
||
182 | self.mistral_config_file_path |
||
183 | ] |
||
184 | |||
185 | def run(self, encrypt=False, upload=False, existing_file=None): |
||
186 | """ |
||
187 | Run the specified steps. |
||
188 | |||
189 | :param encrypt: If true, encrypt the archive file. |
||
190 | :param encrypt: ``bool`` |
||
191 | :param upload: If true, upload the resulting file. |
||
192 | :param upload: ``bool`` |
||
193 | :param existing_file: Path to an existing archive file. If not specified a new |
||
194 | archive will be created. |
||
195 | :param existing_file: ``str`` |
||
196 | """ |
||
197 | temp_files = [] |
||
198 | |||
199 | try: |
||
200 | if existing_file: |
||
201 | working_file = existing_file |
||
202 | else: |
||
203 | # Create a new archive if an existing file hasn't been provided |
||
204 | working_file = self.create_archive() |
||
205 | if not encrypt and not upload: |
||
206 | LOG.info('Debug tarball successfully ' |
||
207 | 'generated and can be reviewed at: %s' % working_file) |
||
208 | else: |
||
209 | temp_files.append(working_file) |
||
210 | |||
211 | if encrypt: |
||
212 | working_file = self.encrypt_archive(archive_file_path=working_file) |
||
213 | if not upload: |
||
214 | LOG.info('Encrypted debug tarball successfully generated at: %s' % |
||
215 | working_file) |
||
216 | else: |
||
217 | temp_files.append(working_file) |
||
218 | |||
219 | if upload: |
||
220 | self.upload_archive(archive_file_path=working_file) |
||
221 | tarball_name = os.path.basename(working_file) |
||
222 | LOG.info('Debug tarball successfully uploaded to %s (name=%s)' % |
||
0 ignored issues
–
show
Coding Style
Best Practice
introduced
by
Loading history...
|
|||
223 | (self.company_name, tarball_name)) |
||
224 | LOG.info('When communicating with support, please let them know the ' |
||
225 | 'tarball name - %s' % tarball_name) |
||
226 | finally: |
||
227 | # Remove temp files |
||
228 | for temp_file in temp_files: |
||
229 | assert temp_file.startswith('/tmp') |
||
230 | remove_file(file_path=temp_file) |
||
231 | |||
232 | def create_archive(self): |
||
233 | """ |
||
234 | Create an archive with debugging information. |
||
235 | |||
236 | :return: Path to the generated archive. |
||
237 | :rtype: ``str`` |
||
238 | """ |
||
239 | |||
240 | try: |
||
241 | # 1. Create temporary directory with the final directory structure where we will move |
||
242 | # files which will be processed and included in the tarball |
||
243 | self._temp_dir_path = self.create_temp_directories() |
||
244 | |||
245 | # Prepend temp_dir_path to OUTPUT_PATHS |
||
246 | output_paths = {} |
||
247 | for key, path in six.iteritems(OUTPUT_PATHS): |
||
248 | output_paths[key] = os.path.join(self._temp_dir_path, path) |
||
249 | |||
250 | # 2. Moves all the files to the temporary directory |
||
251 | LOG.info('Collecting files...') |
||
252 | if self.include_logs: |
||
253 | self.collect_logs(output_paths['logs']) |
||
254 | if self.include_configs: |
||
255 | self.collect_config_files(output_paths['configs']) |
||
256 | if self.include_content: |
||
257 | self.collect_pack_content(output_paths['content']) |
||
258 | if self.include_system_info: |
||
259 | self.add_system_information(output_paths['system_info']) |
||
260 | if self.user_info: |
||
261 | self.add_user_info(output_paths['user_info']) |
||
262 | if self.include_shell_commands: |
||
263 | self.add_shell_command_output(output_paths['commands']) |
||
264 | |||
265 | # 3. Create a tarball |
||
266 | return self.create_tarball(self._temp_dir_path) |
||
267 | |||
268 | except Exception as e: |
||
269 | LOG.exception('Failed to generate tarball', exc_info=True) |
||
270 | raise e |
||
271 | |||
272 | finally: |
||
273 | # Ensure temp files are removed regardless of success or failure |
||
274 | assert self._temp_dir_path.startswith('/tmp') |
||
275 | remove_dir(self._temp_dir_path) |
||
276 | |||
277 | def encrypt_archive(self, archive_file_path): |
||
278 | """ |
||
279 | Encrypt archive with debugging information using our public key. |
||
280 | |||
281 | :param archive_file_path: Path to the non-encrypted tarball file. |
||
282 | :type archive_file_path: ``str`` |
||
283 | |||
284 | :return: Path to the encrypted archive. |
||
285 | :rtype: ``str`` |
||
286 | """ |
||
287 | try: |
||
288 | assert archive_file_path.endswith('.tar.gz') |
||
289 | |||
290 | LOG.info('Encrypting tarball...') |
||
291 | gpg = gnupg.GPG(verbose=self.debug) |
||
292 | |||
293 | # Import our public key |
||
294 | import_result = gpg.import_keys(self.gpg_key) |
||
295 | # pylint: disable=no-member |
||
296 | assert import_result.count == 1 |
||
297 | |||
298 | encrypted_archive_output_file_name = os.path.basename(archive_file_path) + '.asc' |
||
299 | encrypted_archive_output_file_path = os.path.join('/tmp', |
||
300 | encrypted_archive_output_file_name) |
||
301 | with open(archive_file_path, 'rb') as fp: |
||
302 | gpg.encrypt_file(file=fp, |
||
303 | recipients=self.gpg_key_fingerprint, |
||
304 | always_trust=True, |
||
305 | output=encrypted_archive_output_file_path) |
||
306 | return encrypted_archive_output_file_path |
||
307 | except Exception as e: |
||
308 | LOG.exception('Failed to encrypt archive', exc_info=True) |
||
309 | raise e |
||
310 | |||
311 | def upload_archive(self, archive_file_path): |
||
312 | """ |
||
313 | Upload the encrypted archive. |
||
314 | |||
315 | :param archive_file_path: Path to the encrypted tarball file. |
||
316 | :type archive_file_path: ``str`` |
||
317 | """ |
||
318 | try: |
||
319 | assert archive_file_path.endswith('.asc') |
||
320 | |||
321 | LOG.debug('Uploading tarball...') |
||
322 | file_name = os.path.basename(archive_file_path) |
||
323 | url = self.s3_bucket_url + file_name |
||
324 | assert url.startswith('https://') |
||
325 | |||
326 | with open(archive_file_path, 'rb') as fp: |
||
327 | response = requests.put(url=url, files={'file': fp}) |
||
328 | assert response.status_code == six.moves.http_client.OK |
||
329 | except Exception as e: |
||
330 | LOG.exception('Failed to upload tarball to %s' % self.company_name, exc_info=True) |
||
331 | raise e |
||
332 | |||
333 | def collect_logs(self, output_path): |
||
334 | """ |
||
335 | Copy log files to the output path. |
||
336 | |||
337 | :param output_path: Path where log files will be copied to. |
||
338 | :type output_path: ``str`` |
||
339 | """ |
||
340 | LOG.debug('Including log files') |
||
341 | for file_path_glob in self.log_file_paths: |
||
342 | log_file_list = get_full_file_list(file_path_glob=file_path_glob) |
||
343 | copy_files(file_paths=log_file_list, destination=output_path) |
||
344 | |||
345 | def collect_config_files(self, output_path): |
||
346 | """ |
||
347 | Copy config files to the output path. |
||
348 | |||
349 | :param output_path: Path where config files will be copied to. |
||
350 | :type output_path: ``str`` |
||
351 | """ |
||
352 | LOG.debug('Including config files') |
||
353 | copy_files(file_paths=self.config_file_paths, destination=output_path) |
||
354 | |||
355 | st2_config_path = os.path.join(output_path, self.st2_config_file_name) |
||
356 | process_st2_config(config_path=st2_config_path) |
||
357 | |||
358 | mistral_config_path = os.path.join(output_path, self.mistral_config_file_name) |
||
359 | process_mistral_config(config_path=mistral_config_path) |
||
360 | |||
361 | @staticmethod |
||
362 | def collect_pack_content(output_path): |
||
363 | """ |
||
364 | Copy pack contents to the output path. |
||
365 | |||
366 | :param output_path: Path where pack contents will be copied to. |
||
367 | :type output_path: ``str`` |
||
368 | """ |
||
369 | LOG.debug('Including content') |
||
370 | |||
371 | packs_base_paths = get_packs_base_paths() |
||
372 | for index, packs_base_path in enumerate(packs_base_paths, 1): |
||
373 | dst = os.path.join(output_path, 'dir-%s' % index) |
||
374 | |||
375 | try: |
||
376 | shutil.copytree(src=packs_base_path, dst=dst) |
||
377 | except IOError: |
||
378 | continue |
||
379 | |||
380 | base_pack_dirs = get_dirs_in_path(file_path=output_path) |
||
381 | |||
382 | for base_pack_dir in base_pack_dirs: |
||
383 | pack_dirs = get_dirs_in_path(file_path=base_pack_dir) |
||
384 | |||
385 | for pack_dir in pack_dirs: |
||
386 | process_content_pack_dir(pack_dir=pack_dir) |
||
387 | |||
388 | def add_system_information(self, output_path): |
||
389 | """ |
||
390 | Collect and write system information to output path. |
||
391 | |||
392 | :param output_path: Path where system information will be written to. |
||
393 | :type output_path: ``str`` |
||
394 | """ |
||
395 | LOG.debug('Including system info') |
||
396 | |||
397 | system_information = yaml.safe_dump(self.get_system_information(), |
||
398 | default_flow_style=False) |
||
399 | |||
400 | with open(output_path, 'w') as fp: |
||
401 | fp.write(system_information) |
||
402 | |||
403 | def add_user_info(self, output_path): |
||
404 | """ |
||
405 | Write user info to output path as YAML. |
||
406 | |||
407 | :param output_path: Path where user info will be written. |
||
408 | :type output_path: ``str`` |
||
409 | """ |
||
410 | LOG.debug('Including user info') |
||
411 | user_info = yaml.safe_dump(self.user_info, default_flow_style=False) |
||
412 | |||
413 | with open(output_path, 'w') as fp: |
||
414 | fp.write(user_info) |
||
415 | |||
416 | def add_shell_command_output(self, output_path): |
||
417 | """" |
||
418 | Get output of the required shell command and redirect the output to output path. |
||
419 | |||
420 | :param output_path: Directory where output files will be written |
||
421 | :param output_path: ``str`` |
||
422 | """ |
||
423 | LOG.debug('Including the required shell commands output files') |
||
424 | for cmd in self.shell_commands: |
||
425 | output_file = os.path.join(output_path, '%s.txt' % self.format_output_filename(cmd)) |
||
426 | exit_code, stdout, stderr = run_command(cmd=cmd, shell=True, cwd=output_path) |
||
427 | with open(output_file, 'w') as fp: |
||
428 | fp.write('[BEGIN STDOUT]\n') |
||
429 | fp.write(stdout) |
||
430 | fp.write('[END STDOUT]\n') |
||
431 | fp.write('[BEGIN STDERR]\n') |
||
432 | fp.write(stderr) |
||
433 | fp.write('[END STDERR]') |
||
434 | |||
435 | def create_tarball(self, temp_dir_path): |
||
436 | """ |
||
437 | Create tarball with the contents of temp_dir_path. |
||
438 | |||
439 | Tarball will be written to self.output_path, if set. Otherwise it will |
||
440 | be written to /tmp a name generated according to OUTPUT_FILENAME_TEMPLATE. |
||
441 | |||
442 | :param temp_dir_path: Base directory to include in tarbal. |
||
443 | :type temp_dir_path: ``str`` |
||
444 | |||
445 | :return: Path to the created tarball. |
||
446 | :rtype: ``str`` |
||
447 | """ |
||
448 | LOG.info('Creating tarball...') |
||
449 | if self.output_path: |
||
450 | output_file_path = self.output_path |
||
451 | else: |
||
452 | date = date_utils.get_datetime_utc_now().strftime(DATE_FORMAT) |
||
453 | values = {'hostname': socket.gethostname(), 'date': date} |
||
454 | |||
455 | output_file_name = OUTPUT_FILENAME_TEMPLATE % values |
||
456 | output_file_path = os.path.join('/tmp', output_file_name) |
||
457 | |||
458 | with tarfile.open(output_file_path, 'w:gz') as tar: |
||
459 | tar.add(temp_dir_path, arcname='') |
||
460 | |||
461 | return output_file_path |
||
462 | |||
463 | @staticmethod |
||
464 | def create_temp_directories(): |
||
465 | """ |
||
466 | Creates a new temp directory and creates the directory structure as defined |
||
467 | by DIRECTORY_STRUCTURE. |
||
468 | |||
469 | :return: Path to temp directory. |
||
470 | :rtype: ``str`` |
||
471 | """ |
||
472 | temp_dir_path = tempfile.mkdtemp() |
||
473 | |||
474 | for directory_name in DIRECTORY_STRUCTURE: |
||
475 | full_path = os.path.join(temp_dir_path, directory_name) |
||
476 | os.mkdir(full_path) |
||
477 | |||
478 | return temp_dir_path |
||
479 | |||
480 | @staticmethod |
||
481 | def format_output_filename(cmd): |
||
482 | """" |
||
483 | Remove whitespace and special characters from a shell command. |
||
484 | |||
485 | Used to create filename-safe representations of a shell command. |
||
486 | |||
487 | :param cmd: Shell command. |
||
488 | :type cmd: ``str`` |
||
489 | :return: Formatted filename. |
||
490 | :rtype: ``str`` |
||
491 | """ |
||
492 | if six.PY3: |
||
493 | cmd = cmd.translate(cmd.maketrans('', '', """ !@#$%^&*()[]{};:,./<>?\|`~=+"'""")) |
||
494 | else: |
||
495 | cmd = cmd.translate(None, """ !@#$%^&*()[]{};:,./<>?\|`~=+"'""") |
||
496 | |||
497 | return cmd |
||
498 | |||
499 | @staticmethod |
||
500 | def get_system_information(): |
||
501 | """ |
||
502 | Retrieve system information which is included in the report. |
||
503 | |||
504 | :rtype: ``dict`` |
||
505 | """ |
||
506 | system_information = { |
||
507 | 'hostname': socket.gethostname(), |
||
508 | 'operating_system': {}, |
||
509 | 'hardware': { |
||
510 | 'cpu': {}, |
||
511 | 'memory': {} |
||
512 | }, |
||
513 | 'python': {}, |
||
514 | 'stackstorm': {}, |
||
515 | 'mistral': {} |
||
516 | } |
||
517 | |||
518 | # Operating system information |
||
519 | system_information['operating_system']['system'] = platform.system() |
||
520 | system_information['operating_system']['release'] = platform.release() |
||
521 | system_information['operating_system']['operating_system'] = platform.platform() |
||
522 | system_information['operating_system']['platform'] = platform.system() |
||
523 | system_information['operating_system']['architecture'] = ' '.join(platform.architecture()) |
||
524 | |||
525 | if platform.system().lower() == 'linux': |
||
526 | distribution = ' '.join(platform.linux_distribution()) |
||
527 | system_information['operating_system']['distribution'] = distribution |
||
528 | |||
529 | system_information['python']['version'] = sys.version.split('\n')[0] |
||
530 | |||
531 | # Hardware information |
||
532 | cpu_info = get_cpu_info() |
||
533 | |||
534 | if cpu_info: |
||
535 | core_count = len(cpu_info) |
||
536 | model = cpu_info[0]['model_name'] |
||
537 | system_information['hardware']['cpu'] = { |
||
538 | 'core_count': core_count, |
||
539 | 'model_name': model |
||
540 | } |
||
541 | else: |
||
542 | # Unsupported platform |
||
543 | system_information['hardware']['cpu'] = 'unsupported platform' |
||
544 | |||
545 | memory_info = get_memory_info() |
||
546 | |||
547 | if memory_info: |
||
548 | total = memory_info['MemTotal'] / 1024 |
||
549 | free = memory_info['MemFree'] / 1024 |
||
550 | used = (total - free) |
||
551 | system_information['hardware']['memory'] = { |
||
552 | 'total': total, |
||
553 | 'used': used, |
||
554 | 'free': free |
||
555 | } |
||
556 | else: |
||
557 | # Unsupported platform |
||
558 | system_information['hardware']['memory'] = 'unsupported platform' |
||
559 | |||
560 | # StackStorm information |
||
561 | system_information['stackstorm']['version'] = st2_version |
||
562 | |||
563 | st2common_path = st2common.__file__ |
||
564 | st2common_path = os.path.dirname(st2common_path) |
||
565 | |||
566 | if 'st2common/st2common' in st2common_path: |
||
567 | # Assume we are running source install |
||
568 | base_install_path = st2common_path.replace('/st2common/st2common', '') |
||
569 | |||
570 | revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path) |
||
571 | |||
572 | system_information['stackstorm']['installation_method'] = 'source' |
||
573 | system_information['stackstorm']['revision_hash'] = revision_hash |
||
574 | else: |
||
575 | package_list = get_package_list(name_startswith='st2') |
||
576 | |||
577 | system_information['stackstorm']['installation_method'] = 'package' |
||
578 | system_information['stackstorm']['packages'] = package_list |
||
579 | |||
580 | # Mistral information |
||
581 | repo_path = '/opt/openstack/mistral' |
||
582 | revision_hash = get_repo_latest_revision_hash(repo_path=repo_path) |
||
583 | system_information['mistral']['installation_method'] = 'source' |
||
584 | system_information['mistral']['revision_hash'] = revision_hash |
||
585 | |||
586 | return system_information |
||
587 | |||
588 | |||
589 | def main(): |
||
590 | parser = argparse.ArgumentParser(description='') |
||
591 | parser.add_argument('--exclude-logs', action='store_true', default=False, |
||
592 | help='Don\'t include logs in the generated tarball') |
||
593 | parser.add_argument('--exclude-configs', action='store_true', default=False, |
||
594 | help='Don\'t include configs in the generated tarball') |
||
595 | parser.add_argument('--exclude-content', action='store_true', default=False, |
||
596 | help='Don\'t include content packs in the generated tarball') |
||
597 | parser.add_argument('--exclude-system-info', action='store_true', default=False, |
||
598 | help='Don\'t include system information in the generated tarball') |
||
599 | parser.add_argument('--exclude-shell-commands', action='store_true', default=False, |
||
600 | help='Don\'t include shell commands output in the generated tarball') |
||
601 | parser.add_argument('--yes', action='store_true', default=False, |
||
602 | help='Run in non-interactive mode and answer "yes" to all the questions') |
||
603 | parser.add_argument('--review', action='store_true', default=False, |
||
604 | help='Generate the tarball, but don\'t encrypt and upload it') |
||
605 | parser.add_argument('--debug', action='store_true', default=False, |
||
606 | help='Enable debug mode') |
||
607 | parser.add_argument('--config', action='store', default=None, |
||
608 | help='Get required configurations from config file') |
||
609 | parser.add_argument('--output', action='store', default=None, |
||
610 | help='Specify output file path') |
||
611 | parser.add_argument('--existing-file', action='store', default=None, |
||
612 | help='Specify an existing file to operate on') |
||
613 | args = parser.parse_args() |
||
614 | |||
615 | setup_logging() |
||
616 | |||
617 | # Ensure that not all options have been excluded |
||
618 | abort = True |
||
619 | for arg_name in ARG_NAMES: |
||
620 | abort &= getattr(args, arg_name, False) |
||
621 | |||
622 | if abort: |
||
623 | print('Generated tarball would be empty. Aborting.') |
||
624 | sys.exit(2) |
||
625 | |||
626 | # Get setting overrides from yaml file if specified |
||
627 | if args.config: |
||
628 | try: |
||
629 | with open(args.config, 'r') as yaml_file: |
||
630 | config_file = yaml.safe_load(yaml_file) |
||
631 | except Exception as e: |
||
632 | LOG.error('Failed to parse config file: %s' % e) |
||
633 | sys.exit(1) |
||
634 | |||
635 | if not isinstance(config_file, dict): |
||
636 | LOG.error('Unrecognized config file format') |
||
637 | sys.exit(1) |
||
638 | else: |
||
639 | config_file = {} |
||
640 | |||
641 | company_name = config_file.get('company_name', COMPANY_NAME) |
||
642 | |||
643 | # Defaults |
||
644 | encrypt = True |
||
645 | upload = True |
||
646 | |||
647 | if args.review: |
||
648 | encrypt = False |
||
649 | upload = False |
||
650 | |||
651 | if encrypt: |
||
652 | # When not running in review mode, GPG needs to be installed and |
||
653 | # available |
||
654 | if not GPG_INSTALLED: |
||
655 | msg = ('"gpg" binary not found, can\'t proceed. Make sure "gpg" is installed ' |
||
656 | 'and available in PATH.') |
||
657 | raise ValueError(msg) |
||
658 | |||
659 | if not args.yes and not args.existing_file and upload: |
||
660 | submitted_content = [name.replace('exclude_', '') for name in ARG_NAMES if |
||
661 | not getattr(args, name, False)] |
||
662 | submitted_content = ', '.join(submitted_content) |
||
663 | print('This will submit the following information to %s: %s' % (company_name, |
||
664 | submitted_content)) |
||
665 | value = six.moves.input('Are you sure you want to proceed? [y/n] ') |
||
666 | if value.strip().lower() not in ['y', 'yes']: |
||
667 | print('Aborting') |
||
668 | sys.exit(1) |
||
669 | |||
670 | # Prompt user for optional additional context info |
||
671 | user_info = {} |
||
672 | if not args.yes and not args.existing_file: |
||
673 | print('If you want us to get back to you via email, you can provide additional context ' |
||
674 | 'such as your name, email and an optional comment') |
||
675 | value = six.moves.input('Would you like to provide additional context? [y/n] ') |
||
676 | if value.strip().lower() in ['y', 'yes']: |
||
677 | user_info['name'] = six.moves.input('Name: ') |
||
678 | user_info['email'] = six.moves.input('Email: ') |
||
679 | user_info['comment'] = six.moves.input('Comment: ') |
||
680 | |||
681 | debug_collector = DebugInfoCollector(include_logs=not args.exclude_logs, |
||
682 | include_configs=not args.exclude_configs, |
||
683 | include_content=not args.exclude_content, |
||
684 | include_system_info=not args.exclude_system_info, |
||
685 | include_shell_commands=not args.exclude_shell_commands, |
||
686 | user_info=user_info, |
||
687 | debug=args.debug, |
||
688 | config_file=config_file, |
||
689 | output_path=args.output) |
||
690 | |||
691 | debug_collector.run(encrypt=encrypt, upload=upload, existing_file=args.existing_file) |
||
692 |