GitHub Access Token became invalid

It seems like the GitHub access token used for retrieving details about this repository from GitHub became invalid. This might prevent certain types of inspections from being run (in particular, everything related to pull requests).
Please ask an admin of your repository to re-new the access token on this website.
Passed
Push — develop-v1.3.1 ( 969842...8fa207 )
by
unknown
05:56
created

st2debug.cmd.DebugInfoCollector   C

Complexity

Total Complexity 56

Size/Duplication

Total Lines 439
Duplicated Lines 0 %
Metric Value
wmc 56
dl 0
loc 439
rs 5.5555

14 Methods

Rating   Name   Duplication   Size   Complexity  
A DebugInfoCollector.add_user_info() 0 12 2
B DebugInfoCollector.collect_pack_content() 0 26 5
A DebugInfoCollector.add_shell_command_output() 0 18 3
F DebugInfoCollector.create_archive() 0 39 9
A DebugInfoCollector.format_output_filename() 0 13 1
B DebugInfoCollector.create_tarball() 0 27 3
B DebugInfoCollector.upload_archive() 0 21 6
A DebugInfoCollector.collect_config_files() 0 15 1
A DebugInfoCollector.add_system_information() 0 14 2
A DebugInfoCollector.collect_logs() 0 11 2
F DebugInfoCollector.run() 0 46 9
A DebugInfoCollector.create_temp_directories() 0 16 2
A DebugInfoCollector.__init__() 0 50 1
B DebugInfoCollector.encrypt_archive() 0 33 5

How to fix   Complexity   

Complex Class

Complex classes like st2debug.cmd.DebugInfoCollector often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
2
# contributor license agreements.  See the NOTICE file distributed with
3
# this work for additional information regarding copyright ownership.
4
# The ASF licenses this file to You under the Apache License, Version 2.0
5
# (the "License"); you may not use this file except in compliance with
6
# the License.  You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15
16
"""
17
This script submits information which helps StackStorm employees debug different
18
user problems and issues to StackStorm.
19
20
By default the following information is included:
21
22
- Logs from /var/log/st2
23
- StackStorm and mistral config file (/etc/st2/st2.conf, /etc/mistral/mistral.conf)
24
- All the content (integration packs).
25
- Information about your system and StackStorm installation (Operating system,
26
  Python version, StackStorm version, Mistral version)
27
28
Note: This script currently assumes it's running on Linux.
29
"""
30
31
import os
32
import sys
33
import shutil
34
import socket
35
import logging
36
import tarfile
37
import argparse
38
import platform
39
import tempfile
40
import httplib
41
42
import six
43
import yaml
44
import gnupg
45
import requests
46
from distutils.spawn import find_executable
47
48
import st2common
49
from st2common.content.utils import get_packs_base_paths
50
from st2common import __version__ as st2_version
51
from st2common import config
52
from st2common.util import date as date_utils
53
from st2common.util.shell import run_command
54
from st2debug.constants import GPG_KEY
55
from st2debug.constants import GPG_KEY_FINGERPRINT
56
from st2debug.constants import S3_BUCKET_URL
57
from st2debug.constants import COMPANY_NAME
58
from st2debug.constants import ARG_NAMES
59
from st2debug.utils.fs import copy_files
60
from st2debug.utils.fs import get_full_file_list
61
from st2debug.utils.fs import get_dirs_in_path
62
from st2debug.utils.fs import remove_file
63
from st2debug.utils.system_info import get_cpu_info
64
from st2debug.utils.system_info import get_memory_info
65
from st2debug.utils.system_info import get_package_list
66
from st2debug.utils.git_utils import get_repo_latest_revision_hash
67
from st2debug.processors import process_st2_config
68
from st2debug.processors import process_mistral_config
69
from st2debug.processors import process_content_pack_dir
70
71
LOG = logging.getLogger(__name__)
72
73
# Constants
74
GPG_INSTALLED = find_executable('gpg') is not None
75
76
LOG_FILE_PATHS = [
77
    '/var/log/st2/*.log',
78
    '/var/log/mistral*.log'
79
]
80
81
ST2_CONFIG_FILE_PATH = '/etc/st2/st2.conf'
82
MISTRAL_CONFIG_FILE_PATH = '/etc/mistral/mistral.conf'
83
84
SHELL_COMMANDS = []
85
86
# Directory structure inside tarball
87
DIRECTORY_STRUCTURE = [
88
    'configs/',
89
    'logs/',
90
    'content/',
91
    'commands/'
92
]
93
94
OUTPUT_PATHS = {
95
    'logs': 'logs/',
96
    'configs': 'configs/',
97
    'content': 'content/',
98
    'commands': 'commands/',
99
    'system_info': 'system_info.yaml',
100
    'user_info': 'user_info.yaml'
101
}
102
103
# Options which should be removed from the st2 config
104
ST2_CONF_OPTIONS_TO_REMOVE = {
105
    'database': ['username', 'password'],
106
    'messaging': ['url']
107
}
108
109
REMOVE_VALUE_NAME = '**removed**'
110
111
OUTPUT_FILENAME_TEMPLATE = 'st2-debug-output-%(hostname)s-%(date)s.tar.gz'
112
113
DATE_FORMAT = '%Y-%m-%d-%H%M%S'
114
115
try:
116
    config.parse_args(args=[])
117
except Exception:
118
    pass
119
120
121
def setup_logging():
122
    root = LOG
123
    root.setLevel(logging.INFO)
124
125
    ch = logging.StreamHandler(sys.stdout)
126
    ch.setLevel(logging.DEBUG)
127
    formatter = logging.Formatter('%(asctime)s  %(levelname)s - %(message)s')
128
    ch.setFormatter(formatter)
129
    root.addHandler(ch)
130
131
132
class DebugInfoCollector(object):
133
    def __init__(self, include_logs, include_configs, include_content, include_system_info,
134
                 include_shell_commands=False, user_info=None, debug=False, config_file=None,
135
                 output_path=None):
136
        """
137
        Initialize a DebugInfoCollector object.
138
139
        :param include_logs: Include log files in generated archive.
140
        :type include_logs: ``bool``
141
        :param include_configs: Include config files in generated archive.
142
        :type include_configs: ``bool``
143
        :param include_content: Include pack contents in generated archive.
144
        :type include_content: ``bool``
145
        :param include_system_info: Include system information in generated archive.
146
        :type include_system_info: ``bool``
147
        :param include_shell_commands: Include shell command output in generated archive.
148
        :type include_shell_commands: ``bool``
149
        :param user_info: User info to be included in generated archive.
150
        :type user_info: ``dict``
151
        :param debug: Enable debug logging.
152
        :type debug: ``bool``
153
        :param config_file: Values from config file to override defaults.
154
        :type config_file: ``dict``
155
        :param output_path: Path to write output file to. (optional)
156
        :type output_path: ``str``
157
        """
158
        self.include_logs = include_logs
159
        self.include_configs = include_configs
160
        self.include_content = include_content
161
        self.include_system_info = include_system_info
162
        self.include_shell_commands = include_shell_commands
163
        self.user_info = user_info
164
        self.debug = debug
165
        self.output_path = output_path
166
167
        config_file = config_file or {}
168
        self.st2_config_file_path = config_file.get('st2_config_file_path', ST2_CONFIG_FILE_PATH)
169
        self.mistral_config_file_path = config_file.get('mistral_config_file_path',
170
                                                        MISTRAL_CONFIG_FILE_PATH)
171
        self.log_file_paths = config_file.get('log_file_paths', LOG_FILE_PATHS[:])
172
        self.gpg_key = config_file.get('gpg_key', GPG_KEY)
173
        self.gpg_key_fingerprint = config_file.get('gpg_key_fingerprint', GPG_KEY_FINGERPRINT)
174
        self.s3_bucket_url = config_file.get('s3_bucket_url', S3_BUCKET_URL)
175
        self.company_name = config_file.get('company_name', COMPANY_NAME)
176
        self.shell_commands = config_file.get('shell_commands', SHELL_COMMANDS)
177
178
        self.st2_config_file_name = os.path.basename(self.st2_config_file_path)
179
        self.mistral_config_file_name = os.path.basename(self.mistral_config_file_path)
180
        self.config_file_paths = [
181
            self.st2_config_file_path,
182
            self.mistral_config_file_path
183
        ]
184
185
    def run(self, encrypt=False, upload=False, existing_file=None):
186
        """
187
        Run the specified steps.
188
189
        :param encrypt: If true, encrypt the archive file.
190
        :param encrypt: ``bool``
191
        :param upload: If true, upload the resulting file.
192
        :param upload: ``bool``
193
        :param existing_file: Path to an existing archive file. If not specified a new
194
        archive will be created.
195
        :param existing_file: ``str``
196
        """
197
        temp_files = []
198
199
        try:
200
            if existing_file:
201
                working_file = existing_file
202
            else:
203
                # Create a new archive if an existing file hasn't been provided
204
                working_file = self.create_archive()
205
                if not encrypt and not upload:
206
                    LOG.info('Debug tarball successfully '
0 ignored issues
show
Coding Style Best Practice introduced by
Specify string format arguments as logging function parameters
Loading history...
207
                             'generated and can be reviewed at: %s' % working_file)
208
                else:
209
                    temp_files.append(working_file)
210
211
            if encrypt:
212
                working_file = self.encrypt_archive(archive_file_path=working_file)
213
                if not upload:
214
                    LOG.info('Encrypted debug tarball successfully generated at: %s' %
0 ignored issues
show
Coding Style Best Practice introduced by
Specify string format arguments as logging function parameters
Loading history...
215
                             working_file)
216
                else:
217
                    temp_files.append(working_file)
218
219
            if upload:
220
                self.upload_archive(archive_file_path=working_file)
221
                tarball_name = os.path.basename(working_file)
222
                LOG.info('Debug tarball successfully uploaded to %s (name=%s)' %
0 ignored issues
show
Coding Style Best Practice introduced by
Specify string format arguments as logging function parameters
Loading history...
223
                         (self.company_name, tarball_name))
224
                LOG.info('When communicating with support, please let them know the '
0 ignored issues
show
Coding Style Best Practice introduced by
Specify string format arguments as logging function parameters
Loading history...
225
                         'tarball name - %s' % tarball_name)
226
        finally:
227
            # Remove temp files
228
            for temp_file in temp_files:
229
                assert temp_file.startswith('/tmp')
230
                remove_file(file_path=temp_file)
231
232
    def create_archive(self):
233
        """
234
        Create an archive with debugging information.
235
236
        :return: Path to the generated archive.
237
        :rtype: ``str``
238
        """
239
240
        try:
241
            # 1. Create temporary directory with the final directory structure where we will move
242
            # files which will be processed and included in the tarball
243
            temp_dir_path = self.create_temp_directories()
244
245
            # Prepend temp_dir_path to OUTPUT_PATHS
246
            output_paths = {}
247
            for key, path in OUTPUT_PATHS.iteritems():
248
                output_paths[key] = os.path.join(temp_dir_path, path)
249
250
            # 2. Moves all the files to the temporary directory
251
            LOG.info('Collecting files...')
252
            if self.include_logs:
253
                self.collect_logs(output_paths['logs'])
254
            if self.include_configs:
255
                self.collect_config_files(output_paths['configs'])
256
            if self.include_content:
257
                self.collect_pack_content(output_paths['content'])
258
            if self.include_system_info:
259
                self.add_system_information(output_paths['system_info'])
260
            if self.user_info:
261
                self.add_user_info(output_paths['user_info'])
262
            if self.include_shell_commands:
263
                self.add_shell_command_output(output_paths['commands'])
264
265
            # 3. Create a tarball
266
            return self.create_tarball(temp_dir_path)
267
268
        except Exception as e:
269
            LOG.exception('Failed to generate tarball', exc_info=True)
270
            raise e
271
272
    def encrypt_archive(self, archive_file_path):
273
        """
274
        Encrypt archive with debugging information using our public key.
275
276
        :param archive_file_path: Path to the non-encrypted tarball file.
277
        :type archive_file_path: ``str``
278
279
        :return: Path to the encrypted archive.
280
        :rtype: ``str``
281
        """
282
        try:
283
            assert archive_file_path.endswith('.tar.gz')
284
285
            LOG.info('Encrypting tarball...')
286
            gpg = gnupg.GPG(verbose=self.debug)
287
288
            # Import our public key
289
            import_result = gpg.import_keys(self.gpg_key)
290
            # pylint: disable=no-member
291
            assert import_result.count == 1
292
293
            encrypted_archive_output_file_name = os.path.basename(archive_file_path) + '.asc'
294
            encrypted_archive_output_file_path = os.path.join('/tmp',
295
                                                              encrypted_archive_output_file_name)
296
            with open(archive_file_path, 'rb') as fp:
297
                gpg.encrypt_file(file=fp,
298
                                 recipients=self.gpg_key_fingerprint,
299
                                 always_trust=True,
300
                                 output=encrypted_archive_output_file_path)
301
            return encrypted_archive_output_file_path
302
        except Exception as e:
303
            LOG.exception('Failed to encrypt archive', exc_info=True)
304
            raise e
305
306
    def upload_archive(self, archive_file_path):
307
        """
308
        Upload the encrypted archive.
309
310
        :param archive_file_path: Path to the encrypted tarball file.
311
        :type archive_file_path: ``str``
312
        """
313
        try:
314
            assert archive_file_path.endswith('.asc')
315
316
            LOG.debug('Uploading tarball...')
317
            file_name = os.path.basename(archive_file_path)
318
            url = self.s3_bucket_url + file_name
319
            assert url.startswith('https://')
320
321
            with open(archive_file_path, 'rb') as fp:
322
                response = requests.put(url=url, files={'file': fp})
323
            assert response.status_code == httplib.OK
324
        except Exception as e:
325
            LOG.exception('Failed to upload tarball to %s' % self.company_name, exc_info=True)
0 ignored issues
show
Coding Style Best Practice introduced by
Specify string format arguments as logging function parameters
Loading history...
326
            raise e
327
328
    def collect_logs(self, output_path):
329
        """
330
        Copy log files to the output path.
331
332
        :param output_path: Path where log files will be copied to.
333
        :type output_path: ``str``
334
        """
335
        LOG.debug('Including log files')
336
        for file_path_glob in self.log_file_paths:
337
            log_file_list = get_full_file_list(file_path_glob=file_path_glob)
338
            copy_files(file_paths=log_file_list, destination=output_path)
339
340
    def collect_config_files(self, output_path):
341
        """
342
        Copy config files to the output path.
343
344
        :param output_path: Path where config files will be copied to.
345
        :type output_path: ``str``
346
        """
347
        LOG.debug('Including config files')
348
        copy_files(file_paths=self.config_file_paths, destination=output_path)
349
350
        st2_config_path = os.path.join(output_path, self.st2_config_file_name)
351
        process_st2_config(config_path=st2_config_path)
352
353
        mistral_config_path = os.path.join(output_path, self.mistral_config_file_name)
354
        process_mistral_config(config_path=mistral_config_path)
355
356
    @staticmethod
357
    def collect_pack_content(output_path):
358
        """
359
        Copy pack contents to the output path.
360
361
        :param output_path: Path where pack contents will be copied to.
362
        :type output_path: ``str``
363
        """
364
        LOG.debug('Including content')
365
366
        packs_base_paths = get_packs_base_paths()
367
        for index, packs_base_path in enumerate(packs_base_paths, 1):
368
            dst = os.path.join(output_path, 'dir-%s' % index)
369
370
            try:
371
                shutil.copytree(src=packs_base_path, dst=dst)
372
            except IOError:
373
                continue
374
375
        base_pack_dirs = get_dirs_in_path(file_path=output_path)
376
377
        for base_pack_dir in base_pack_dirs:
378
            pack_dirs = get_dirs_in_path(file_path=base_pack_dir)
379
380
            for pack_dir in pack_dirs:
381
                process_content_pack_dir(pack_dir=pack_dir)
382
383
    def add_system_information(self, output_path):
384
        """
385
        Collect and write system information to output path.
386
387
        :param output_path: Path where system information will be written to.
388
        :type output_path: ``str``
389
        """
390
        LOG.debug('Including system info')
391
392
        system_information = yaml.dump(self.get_system_information(),
393
                                       default_flow_style=False)
394
395
        with open(output_path, 'w') as fp:
396
            fp.write(system_information)
397
398
    def add_user_info(self, output_path):
399
        """
400
        Write user info to output path as YAML.
401
402
        :param output_path: Path where user info will be written.
403
        :type output_path: ``str``
404
        """
405
        LOG.debug('Including user info')
406
        user_info = yaml.dump(self.user_info, default_flow_style=False)
407
408
        with open(output_path, 'w') as fp:
409
            fp.write(user_info)
410
411
    def add_shell_command_output(self, output_path):
412
        """"
413
        Get output of the required shell command and redirect the output to output path.
414
415
        :param output_path: Directory where output files will be written
416
        :param output_path: ``str``
417
        """
418
        LOG.debug('Including the required shell commands output files')
419
        for cmd in self.shell_commands:
420
            output_file = os.path.join(output_path, '%s.txt' % self.format_output_filename(cmd))
421
            exit_code, stdout, stderr = run_command(cmd=cmd, shell=True)
422
            with open(output_file, 'w') as fp:
423
                fp.write('[BEGIN STDOUT]\n')
424
                fp.write(stdout)
425
                fp.write('[END STDOUT]\n')
426
                fp.write('[BEGIN STDERR]\n')
427
                fp.write(stderr)
428
                fp.write('[END STDERR]')
429
430
    def create_tarball(self, temp_dir_path):
431
        """
432
        Create tarball with the contents of temp_dir_path.
433
434
        Tarball will be written to self.output_path, if set. Otherwise it will
435
        be written to /tmp a name generated according to OUTPUT_FILENAME_TEMPLATE.
436
437
        :param temp_dir_path: Base directory to include in tarbal.
438
        :type temp_dir_path: ``str``
439
440
        :return: Path to the created tarball.
441
        :rtype: ``str``
442
        """
443
        LOG.info('Creating tarball...')
444
        if self.output_path:
445
            output_file_path = self.output_path
446
        else:
447
            date = date_utils.get_datetime_utc_now().strftime(DATE_FORMAT)
448
            values = {'hostname': socket.gethostname(), 'date': date}
449
450
            output_file_name = OUTPUT_FILENAME_TEMPLATE % values
451
            output_file_path = os.path.join('/tmp', output_file_name)
452
453
        with tarfile.open(output_file_path, 'w:gz') as tar:
454
            tar.add(temp_dir_path, arcname='')
455
456
        return output_file_path
457
458
    @staticmethod
459
    def create_temp_directories():
460
        """
461
        Creates a new temp directory and creates the directory structure as defined
462
        by DIRECTORY_STRUCTURE.
463
464
        :return: Path to temp directory.
465
        :rtype: ``str``
466
        """
467
        temp_dir_path = tempfile.mkdtemp()
468
469
        for directory_name in DIRECTORY_STRUCTURE:
470
            full_path = os.path.join(temp_dir_path, directory_name)
471
            os.mkdir(full_path)
472
473
        return temp_dir_path
474
475
    @staticmethod
476
    def format_output_filename(cmd):
477
        """"
478
        Remove whitespace and special characters from a shell command.
479
480
        Used to create filename-safe representations of a shell command.
481
482
        :param cmd: Shell command.
483
        :type cmd: ``str``
484
        :return: Formatted filename.
485
        :rtype: ``str``
486
        """
487
        return cmd.translate(None, """ !@#$%^&*()[]{};:,./<>?\|`~=+"'""")
0 ignored issues
show
Bug introduced by
A suspicious escape sequence \| was found. Did you maybe forget to add an r prefix?

Escape sequences in Python are generally interpreted according to rules similar to standard C. Only if strings are prefixed with r or R are they interpreted as regular expressions.

The escape sequence that was used indicates that you might have intended to write a regular expression.

Learn more about the available escape sequences. in the Python documentation.

Loading history...
488
489
    @staticmethod
490
    def get_system_information():
491
        """
492
        Retrieve system information which is included in the report.
493
494
        :rtype: ``dict``
495
        """
496
        system_information = {
497
            'hostname': socket.gethostname(),
498
            'operating_system': {},
499
            'hardware': {
500
                'cpu': {},
501
                'memory': {}
502
            },
503
            'python': {},
504
            'stackstorm': {},
505
            'mistral': {}
506
        }
507
508
        # Operating system information
509
        system_information['operating_system']['system'] = platform.system()
510
        system_information['operating_system']['release'] = platform.release()
511
        system_information['operating_system']['operating_system'] = platform.platform()
512
        system_information['operating_system']['platform'] = platform.system()
513
        system_information['operating_system']['architecture'] = ' '.join(platform.architecture())
514
515
        if platform.system().lower() == 'linux':
516
            distribution = ' '.join(platform.linux_distribution())
517
            system_information['operating_system']['distribution'] = distribution
518
519
        system_information['python']['version'] = sys.version.split('\n')[0]
520
521
        # Hardware information
522
        cpu_info = get_cpu_info()
523
524
        if cpu_info:
525
            core_count = len(cpu_info)
526
            model = cpu_info[0]['model_name']
527
            system_information['hardware']['cpu'] = {
528
                'core_count': core_count,
529
                'model_name': model
530
            }
531
        else:
532
            # Unsupported platform
533
            system_information['hardware']['cpu'] = 'unsupported platform'
534
535
        memory_info = get_memory_info()
536
537
        if memory_info:
538
            total = memory_info['MemTotal'] / 1024
539
            free = memory_info['MemFree'] / 1024
540
            used = (total - free)
541
            system_information['hardware']['memory'] = {
542
                'total': total,
543
                'used': used,
544
                'free': free
545
            }
546
        else:
547
            # Unsupported platform
548
            system_information['hardware']['memory'] = 'unsupported platform'
549
550
        # StackStorm information
551
        system_information['stackstorm']['version'] = st2_version
552
553
        st2common_path = st2common.__file__
554
        st2common_path = os.path.dirname(st2common_path)
555
556
        if 'st2common/st2common' in st2common_path:
557
            # Assume we are running source install
558
            base_install_path = st2common_path.replace('/st2common/st2common', '')
559
560
            revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path)
561
562
            system_information['stackstorm']['installation_method'] = 'source'
563
            system_information['stackstorm']['revision_hash'] = revision_hash
564
        else:
565
            package_list = get_package_list(name_startswith='st2')
566
567
            system_information['stackstorm']['installation_method'] = 'package'
568
            system_information['stackstorm']['packages'] = package_list
569
570
        # Mistral information
571
        repo_path = '/opt/openstack/mistral'
572
        revision_hash = get_repo_latest_revision_hash(repo_path=repo_path)
573
        system_information['mistral']['installation_method'] = 'source'
574
        system_information['mistral']['revision_hash'] = revision_hash
575
576
        return system_information
577
578
579
def main():
580
    parser = argparse.ArgumentParser(description='')
581
    parser.add_argument('--exclude-logs', action='store_true', default=False,
582
                        help='Don\'t include logs in the generated tarball')
583
    parser.add_argument('--exclude-configs', action='store_true', default=False,
584
                        help='Don\'t include configs in the generated tarball')
585
    parser.add_argument('--exclude-content', action='store_true', default=False,
586
                        help='Don\'t include content packs in the generated tarball')
587
    parser.add_argument('--exclude-system-info', action='store_true', default=False,
588
                        help='Don\'t include system information in the generated tarball')
589
    parser.add_argument('--exclude-shell-commands', action='store_true', default=False,
590
                        help='Don\'t include shell commands output in the generated tarball')
591
    parser.add_argument('--yes', action='store_true', default=False,
592
                        help='Run in non-interactive mode and answer "yes" to all the questions')
593
    parser.add_argument('--review', action='store_true', default=False,
594
                        help='Generate the tarball, but don\'t encrypt and upload it')
595
    parser.add_argument('--debug', action='store_true', default=False,
596
                        help='Enable debug mode')
597
    parser.add_argument('--config', action='store', default=None,
598
                        help='Get required configurations from config file')
599
    parser.add_argument('--output', action='store', default=None,
600
                        help='Specify output file path')
601
    parser.add_argument('--existing-file', action='store', default=None,
602
                        help='Specify an existing file to operate on')
603
    args = parser.parse_args()
604
605
    setup_logging()
606
607
    # Ensure that not all options have been excluded
608
    abort = True
609
    for arg_name in ARG_NAMES:
610
        abort &= getattr(args, arg_name, False)
611
612
    if abort:
613
        print('Generated tarball would be empty. Aborting.')
614
        sys.exit(2)
615
616
    # Get setting overrides from yaml file if specified
617
    if args.config:
618
        try:
619
            with open(args.config, 'r') as yaml_file:
620
                config_file = yaml.safe_load(yaml_file)
621
        except Exception as e:
622
            LOG.error('Failed to parse config file: %s' % e)
0 ignored issues
show
Coding Style Best Practice introduced by
Specify string format arguments as logging function parameters
Loading history...
623
            sys.exit(1)
624
625
        if not isinstance(config_file, dict):
626
            LOG.error('Unrecognized config file format')
627
            sys.exit(1)
628
    else:
629
        config_file = {}
630
631
    company_name = config_file.get('company_name', COMPANY_NAME)
632
633
    # Defaults
634
    encrypt = True
635
    upload = True
636
637
    if args.review:
638
        encrypt = False
639
        upload = False
640
641
    if encrypt:
642
        # When not running in review mode, GPG needs to be installed and
643
        # available
644
        if not GPG_INSTALLED:
645
            msg = ('"gpg" binary not found, can\'t proceed. Make sure "gpg" is installed '
646
                   'and available in PATH.')
647
            raise ValueError(msg)
648
649
    if not args.yes and not args.existing_file and upload:
650
        submitted_content = [name.replace('exclude_', '') for name in ARG_NAMES if
651
                             not getattr(args, name, False)]
652
        submitted_content = ', '.join(submitted_content)
653
        print('This will submit the following information to %s: %s' % (company_name,
654
                                                                        submitted_content))
655
        value = six.moves.input('Are you sure you want to proceed? [y/n] ')
656
        if value.strip().lower() not in ['y', 'yes']:
657
            print('Aborting')
658
            sys.exit(1)
659
660
    # Prompt user for optional additional context info
661
    user_info = {}
662
    if not args.yes and not args.existing_file:
663
        print('If you want us to get back to you via email, you can provide additional context '
664
              'such as your name, email and an optional comment')
665
        value = six.moves.input('Would you like to provide additional context? [y/n] ')
666
        if value.strip().lower() in ['y', 'yes']:
667
            user_info['name'] = six.moves.input('Name: ')
668
            user_info['email'] = six.moves.input('Email: ')
669
            user_info['comment'] = six.moves.input('Comment: ')
670
671
    debug_collector = DebugInfoCollector(include_logs=not args.exclude_logs,
672
                                         include_configs=not args.exclude_configs,
673
                                         include_content=not args.exclude_content,
674
                                         include_system_info=not args.exclude_system_info,
675
                                         include_shell_commands=not args.exclude_shell_commands,
676
                                         user_info=user_info,
677
                                         debug=args.debug,
678
                                         config_file=config_file,
679
                                         output_path=args.output)
680
681
    debug_collector.run(encrypt=encrypt, upload=upload, existing_file=args.existing_file)
682