1
|
|
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more |
2
|
|
|
# contributor license agreements. See the NOTICE file distributed with |
3
|
|
|
# this work for additional information regarding copyright ownership. |
4
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0 |
5
|
|
|
# (the "License"); you may not use this file except in compliance with |
6
|
|
|
# the License. You may obtain a copy of the License at |
7
|
|
|
# |
8
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
9
|
|
|
# |
10
|
|
|
# Unless required by applicable law or agreed to in writing, software |
11
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
12
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13
|
|
|
# See the License for the specific language governing permissions and |
14
|
|
|
# limitations under the License. |
15
|
|
|
|
16
|
|
|
""" |
17
|
|
|
This script submits information which helps StackStorm employees debug different |
18
|
|
|
user problems and issues to StackStorm. |
19
|
|
|
|
20
|
|
|
By default the following information is included: |
21
|
|
|
|
22
|
|
|
- Logs from /var/log/st2 |
23
|
|
|
- StackStorm and mistral config file (/etc/st2/st2.conf, /etc/mistral/mistral.conf) |
24
|
|
|
- All the content (integration packs). |
25
|
|
|
- Information about your system and StackStorm installation (Operating system, |
26
|
|
|
Python version, StackStorm version, Mistral version) |
27
|
|
|
|
28
|
|
|
Note: This script currently assumes it's running on Linux. |
29
|
|
|
""" |
30
|
|
|
|
31
|
|
|
import os |
32
|
|
|
import sys |
33
|
|
|
import shutil |
34
|
|
|
import socket |
35
|
|
|
import logging |
36
|
|
|
import tarfile |
37
|
|
|
import argparse |
38
|
|
|
import platform |
39
|
|
|
import tempfile |
40
|
|
|
import httplib |
41
|
|
|
|
42
|
|
|
import six |
43
|
|
|
import yaml |
44
|
|
|
import gnupg |
45
|
|
|
import requests |
46
|
|
|
from distutils.spawn import find_executable |
47
|
|
|
|
48
|
|
|
import st2common |
49
|
|
|
from st2common.content.utils import get_packs_base_paths |
50
|
|
|
from st2common import __version__ as st2_version |
51
|
|
|
from st2common import config |
52
|
|
|
from st2common.util import date as date_utils |
53
|
|
|
from st2common.util.shell import run_command |
54
|
|
|
from st2debug.constants import GPG_KEY |
55
|
|
|
from st2debug.constants import GPG_KEY_FINGERPRINT |
56
|
|
|
from st2debug.constants import S3_BUCKET_URL |
57
|
|
|
from st2debug.constants import COMPANY_NAME |
58
|
|
|
from st2debug.constants import ARG_NAMES |
59
|
|
|
from st2debug.utils.fs import copy_files |
60
|
|
|
from st2debug.utils.fs import get_full_file_list |
61
|
|
|
from st2debug.utils.fs import get_dirs_in_path |
62
|
|
|
from st2debug.utils.fs import remove_file |
63
|
|
|
from st2debug.utils.system_info import get_cpu_info |
64
|
|
|
from st2debug.utils.system_info import get_memory_info |
65
|
|
|
from st2debug.utils.system_info import get_package_list |
66
|
|
|
from st2debug.utils.git_utils import get_repo_latest_revision_hash |
67
|
|
|
from st2debug.processors import process_st2_config |
68
|
|
|
from st2debug.processors import process_mistral_config |
69
|
|
|
from st2debug.processors import process_content_pack_dir |
70
|
|
|
|
71
|
|
|
LOG = logging.getLogger(__name__) |
72
|
|
|
|
73
|
|
|
# Constants |
74
|
|
|
GPG_INSTALLED = find_executable('gpg') is not None |
75
|
|
|
|
76
|
|
|
LOG_FILE_PATHS = [ |
77
|
|
|
'/var/log/st2/*.log', |
78
|
|
|
'/var/log/mistral*.log' |
79
|
|
|
] |
80
|
|
|
|
81
|
|
|
ST2_CONFIG_FILE_PATH = '/etc/st2/st2.conf' |
82
|
|
|
MISTRAL_CONFIG_FILE_PATH = '/etc/mistral/mistral.conf' |
83
|
|
|
|
84
|
|
|
SHELL_COMMANDS = [] |
85
|
|
|
|
86
|
|
|
# Directory structure inside tarball |
87
|
|
|
DIRECTORY_STRUCTURE = [ |
88
|
|
|
'configs/', |
89
|
|
|
'logs/', |
90
|
|
|
'content/', |
91
|
|
|
'commands/' |
92
|
|
|
] |
93
|
|
|
|
94
|
|
|
OUTPUT_PATHS = { |
95
|
|
|
'logs': 'logs/', |
96
|
|
|
'configs': 'configs/', |
97
|
|
|
'content': 'content/', |
98
|
|
|
'commands': 'commands/', |
99
|
|
|
'system_info': 'system_info.yaml', |
100
|
|
|
'user_info': 'user_info.yaml' |
101
|
|
|
} |
102
|
|
|
|
103
|
|
|
# Options which should be removed from the st2 config |
104
|
|
|
ST2_CONF_OPTIONS_TO_REMOVE = { |
105
|
|
|
'database': ['username', 'password'], |
106
|
|
|
'messaging': ['url'] |
107
|
|
|
} |
108
|
|
|
|
109
|
|
|
REMOVE_VALUE_NAME = '**removed**' |
110
|
|
|
|
111
|
|
|
OUTPUT_FILENAME_TEMPLATE = 'st2-debug-output-%(hostname)s-%(date)s.tar.gz' |
112
|
|
|
|
113
|
|
|
DATE_FORMAT = '%Y-%m-%d-%H%M%S' |
114
|
|
|
|
115
|
|
|
try: |
116
|
|
|
config.parse_args(args=[]) |
117
|
|
|
except Exception: |
118
|
|
|
pass |
119
|
|
|
|
120
|
|
|
|
121
|
|
|
def setup_logging(): |
122
|
|
|
root = LOG |
123
|
|
|
root.setLevel(logging.INFO) |
124
|
|
|
|
125
|
|
|
ch = logging.StreamHandler(sys.stdout) |
126
|
|
|
ch.setLevel(logging.DEBUG) |
127
|
|
|
formatter = logging.Formatter('%(asctime)s %(levelname)s - %(message)s') |
128
|
|
|
ch.setFormatter(formatter) |
129
|
|
|
root.addHandler(ch) |
130
|
|
|
|
131
|
|
|
|
132
|
|
|
class DebugInfoCollector(object): |
133
|
|
|
def __init__(self, include_logs, include_configs, include_content, include_system_info, |
134
|
|
|
include_shell_commands=False, user_info=None, debug=False, config_file=None, |
135
|
|
|
output_path=None): |
136
|
|
|
self.include_logs = include_logs |
137
|
|
|
self.include_configs = include_configs |
138
|
|
|
self.include_content = include_content |
139
|
|
|
self.include_system_info = include_system_info |
140
|
|
|
self.include_shell_commands = include_shell_commands |
141
|
|
|
self.user_info = user_info |
142
|
|
|
self.debug = debug |
143
|
|
|
self.output_path = output_path |
144
|
|
|
|
145
|
|
|
config_file = config_file or {} |
146
|
|
|
self.st2_config_file_path = config_file.get('st2_config_file_path', ST2_CONFIG_FILE_PATH) |
147
|
|
|
self.mistral_config_file_path = config_file.get('mistral_config_file_path', |
148
|
|
|
MISTRAL_CONFIG_FILE_PATH) |
149
|
|
|
self.log_files_paths = config_file.get('log_files_paths', LOG_FILE_PATHS[:]) |
150
|
|
|
self.gpg_key = config_file.get('gpg_key', GPG_KEY) |
151
|
|
|
self.gpg_key_fingerprint = config_file.get('gpg_key_fingerprint', GPG_KEY_FINGERPRINT) |
152
|
|
|
self.s3_bucket_url = config_file.get('s3_bucket_url', S3_BUCKET_URL) |
153
|
|
|
self.company_name = config_file.get('company_name', COMPANY_NAME) |
154
|
|
|
self.shell_commands = config_file.get('shell_commands', SHELL_COMMANDS) |
155
|
|
|
|
156
|
|
|
self.st2_config_file_name = os.path.basename(self.st2_config_file_path) |
157
|
|
|
self.mistral_config_file_name = os.path.basename(self.mistral_config_file_path) |
158
|
|
|
self.config_file_paths = [ |
159
|
|
|
self.st2_config_file_path, |
160
|
|
|
self.mistral_config_file_path |
161
|
|
|
] |
162
|
|
|
|
163
|
|
|
def run(self, encrypt=False, upload=False, existing_file=None): |
164
|
|
|
temp_files = [] |
165
|
|
|
|
166
|
|
|
try: |
167
|
|
|
if existing_file: |
168
|
|
|
working_file = existing_file |
169
|
|
|
else: |
170
|
|
|
# Create a new archive if an existing file hasn't been provided |
171
|
|
|
working_file = self.create_archive() |
172
|
|
|
if not encrypt and not upload: |
173
|
|
|
LOG.info('Debug tarball successfully ' |
|
|
|
|
174
|
|
|
'generated and can be reviewed at: %s' % working_file) |
175
|
|
|
else: |
176
|
|
|
temp_files.append(working_file) |
177
|
|
|
|
178
|
|
|
if encrypt: |
179
|
|
|
working_file = self.encrypt_archive(archive_file_path=working_file) |
180
|
|
|
if not upload: |
181
|
|
|
LOG.info('Encrypted debug tarball successfully generated at: %s' % |
|
|
|
|
182
|
|
|
working_file) |
183
|
|
|
else: |
184
|
|
|
temp_files.append(working_file) |
185
|
|
|
|
186
|
|
|
if upload: |
187
|
|
|
self.upload_archive(archive_file_path=working_file) |
188
|
|
|
tarball_name = os.path.basename(working_file) |
189
|
|
|
LOG.info('Debug tarball successfully uploaded to %s (name=%s)' % |
|
|
|
|
190
|
|
|
(self.company_name, tarball_name)) |
191
|
|
|
LOG.info('When communicating with support, please let them know the ' |
|
|
|
|
192
|
|
|
'tarball name - %s' % tarball_name) |
193
|
|
|
finally: |
194
|
|
|
# Remove temp files |
195
|
|
|
for temp_file in temp_files: |
196
|
|
|
assert temp_file.startswith('/tmp') |
197
|
|
|
remove_file(file_path=temp_file) |
198
|
|
|
|
199
|
|
|
def create_archive(self): |
200
|
|
|
""" |
201
|
|
|
Create an archive with debugging information. |
202
|
|
|
|
203
|
|
|
:return: Path to the generated archive. |
204
|
|
|
:rtype: ``str`` |
205
|
|
|
""" |
206
|
|
|
|
207
|
|
|
try: |
208
|
|
|
# 1. Create temporary directory with the final directory structure where we will move |
209
|
|
|
# files which will be processed and included in the tarball |
210
|
|
|
temp_dir_path = self.create_temp_directories() |
211
|
|
|
|
212
|
|
|
# Prepend temp_dir_path to OUTPUT_PATHS |
213
|
|
|
output_paths = {} |
214
|
|
|
for key, path in OUTPUT_PATHS.iteritems(): |
215
|
|
|
output_paths[key] = os.path.join(temp_dir_path, path) |
216
|
|
|
|
217
|
|
|
# 2. Moves all the files to the temporary directory |
218
|
|
|
LOG.info('Collecting files...') |
219
|
|
|
if self.include_logs: |
220
|
|
|
self.collect_logs(output_paths['logs']) |
221
|
|
|
if self.include_configs: |
222
|
|
|
self.collect_config_files(output_paths['configs']) |
223
|
|
|
if self.include_content: |
224
|
|
|
self.collect_pack_content(output_paths['content']) |
225
|
|
|
if self.include_system_info: |
226
|
|
|
self.add_system_information(output_paths['system_info']) |
227
|
|
|
if self.user_info: |
228
|
|
|
self.add_user_info(output_paths['user_info']) |
229
|
|
|
if self.include_shell_commands: |
230
|
|
|
self.add_shell_command_output(output_paths['commands']) |
231
|
|
|
|
232
|
|
|
# 3. Create a tarball |
233
|
|
|
return self.create_tarball(temp_dir_path) |
234
|
|
|
|
235
|
|
|
except Exception as e: |
236
|
|
|
LOG.exception('Failed to generate tarball', exc_info=True) |
237
|
|
|
raise e |
238
|
|
|
|
239
|
|
|
def encrypt_archive(self, archive_file_path): |
240
|
|
|
""" |
241
|
|
|
Encrypt archive with debugging information using our public key. |
242
|
|
|
|
243
|
|
|
:param archive_file_path: Path to the non-encrypted tarball file. |
244
|
|
|
:type archive_file_path: ``str`` |
245
|
|
|
|
246
|
|
|
:return: Path to the encrypted archive. |
247
|
|
|
:rtype: ``str`` |
248
|
|
|
""" |
249
|
|
|
try: |
250
|
|
|
assert archive_file_path.endswith('.tar.gz') |
251
|
|
|
|
252
|
|
|
LOG.info('Encrypting tarball...') |
253
|
|
|
gpg = gnupg.GPG(verbose=self.debug) |
254
|
|
|
|
255
|
|
|
# Import our public key |
256
|
|
|
import_result = gpg.import_keys(self.gpg_key) |
257
|
|
|
# pylint: disable=no-member |
258
|
|
|
assert import_result.count == 1 |
259
|
|
|
|
260
|
|
|
encrypted_archive_output_file_name = os.path.basename(archive_file_path) + '.asc' |
261
|
|
|
encrypted_archive_output_file_path = os.path.join('/tmp', |
262
|
|
|
encrypted_archive_output_file_name) |
263
|
|
|
with open(archive_file_path, 'rb') as fp: |
264
|
|
|
gpg.encrypt_file(file=fp, |
265
|
|
|
recipients=self.gpg_key_fingerprint, |
266
|
|
|
always_trust=True, |
267
|
|
|
output=encrypted_archive_output_file_path) |
268
|
|
|
return encrypted_archive_output_file_path |
269
|
|
|
except Exception as e: |
270
|
|
|
LOG.exception('Failed to encrypt archive', exc_info=True) |
271
|
|
|
raise e |
272
|
|
|
|
273
|
|
|
def upload_archive(self, archive_file_path): |
274
|
|
|
try: |
275
|
|
|
assert archive_file_path.endswith('.asc') |
276
|
|
|
|
277
|
|
|
LOG.debug('Uploading tarball...') |
278
|
|
|
file_name = os.path.basename(archive_file_path) |
279
|
|
|
url = self.s3_bucket_url + file_name |
280
|
|
|
assert url.startswith('https://') |
281
|
|
|
|
282
|
|
|
with open(archive_file_path, 'rb') as fp: |
283
|
|
|
response = requests.put(url=url, files={'file': fp}) |
284
|
|
|
assert response.status_code == httplib.OK |
285
|
|
|
except Exception as e: |
286
|
|
|
LOG.exception('Failed to upload tarball to %s' % self.company_name, exc_info=True) |
|
|
|
|
287
|
|
|
raise e |
288
|
|
|
|
289
|
|
|
def collect_logs(self, output_path): |
290
|
|
|
LOG.debug('Including log files') |
291
|
|
|
for file_path_glob in self.log_files_paths: |
292
|
|
|
log_file_list = get_full_file_list(file_path_glob=file_path_glob) |
293
|
|
|
copy_files(file_paths=log_file_list, destination=output_path) |
294
|
|
|
|
295
|
|
|
def collect_config_files(self, output_path): |
296
|
|
|
LOG.debug('Including config files') |
297
|
|
|
copy_files(file_paths=self.config_file_paths, destination=output_path) |
298
|
|
|
|
299
|
|
|
st2_config_path = os.path.join(output_path, self.st2_config_file_name) |
300
|
|
|
process_st2_config(config_path=st2_config_path) |
301
|
|
|
|
302
|
|
|
mistral_config_path = os.path.join(output_path, self.mistral_config_file_name) |
303
|
|
|
process_mistral_config(config_path=mistral_config_path) |
304
|
|
|
|
305
|
|
|
@staticmethod |
306
|
|
|
def collect_pack_content(output_path): |
307
|
|
|
LOG.debug('Including content') |
308
|
|
|
|
309
|
|
|
packs_base_paths = get_packs_base_paths() |
310
|
|
|
for index, packs_base_path in enumerate(packs_base_paths, 1): |
311
|
|
|
dst = os.path.join(output_path, 'dir-%s' % index) |
312
|
|
|
|
313
|
|
|
try: |
314
|
|
|
shutil.copytree(src=packs_base_path, dst=dst) |
315
|
|
|
except IOError: |
316
|
|
|
continue |
317
|
|
|
|
318
|
|
|
base_pack_dirs = get_dirs_in_path(file_path=output_path) |
319
|
|
|
|
320
|
|
|
for base_pack_dir in base_pack_dirs: |
321
|
|
|
pack_dirs = get_dirs_in_path(file_path=base_pack_dir) |
322
|
|
|
|
323
|
|
|
for pack_dir in pack_dirs: |
324
|
|
|
process_content_pack_dir(pack_dir=pack_dir) |
325
|
|
|
|
326
|
|
|
def add_system_information(self, output_path): |
327
|
|
|
LOG.debug('Including system info') |
328
|
|
|
|
329
|
|
|
system_information = yaml.dump(self.get_system_information(), |
330
|
|
|
default_flow_style=False) |
331
|
|
|
|
332
|
|
|
with open(output_path, 'w') as fp: |
333
|
|
|
fp.write(system_information) |
334
|
|
|
|
335
|
|
|
def add_user_info(self, output_path): |
336
|
|
|
LOG.debug('Including user info') |
337
|
|
|
user_info = yaml.dump(self.user_info, default_flow_style=False) |
338
|
|
|
|
339
|
|
|
with open(output_path, 'w') as fp: |
340
|
|
|
fp.write(user_info) |
341
|
|
|
|
342
|
|
|
def add_shell_command_output(self, output_path): |
343
|
|
|
"""" |
344
|
|
|
Get output of the required shell command and redirect the output to a file. |
345
|
|
|
:param output_path: Directory where output files will be written |
346
|
|
|
""" |
347
|
|
|
LOG.debug('Including the required shell commands output files') |
348
|
|
|
for cmd in self.shell_commands: |
349
|
|
|
output_file = os.path.join(output_path, '%s.txt' % self.format_output_filename(cmd)) |
350
|
|
|
exit_code, stdout, stderr = run_command(cmd=cmd, shell=True) |
351
|
|
|
with open(output_file, 'w') as fp: |
352
|
|
|
fp.write('[BEGIN STDOUT]\n') |
353
|
|
|
fp.write(stdout) |
354
|
|
|
fp.write('[END STDOUT]\n') |
355
|
|
|
fp.write('[BEGIN STDERR]\n') |
356
|
|
|
fp.write(stderr) |
357
|
|
|
fp.write('[END STDERR]') |
358
|
|
|
|
359
|
|
|
def create_tarball(self, temp_dir_path): |
360
|
|
|
LOG.info('Creating tarball...') |
361
|
|
|
if self.output_path: |
362
|
|
|
output_file_path = self.output_path |
363
|
|
|
output_file_name = os.path.basename(output_file_path) |
364
|
|
|
else: |
365
|
|
|
date = date_utils.get_datetime_utc_now().strftime(DATE_FORMAT) |
366
|
|
|
values = {'hostname': socket.gethostname(), 'date': date} |
367
|
|
|
|
368
|
|
|
output_file_name = OUTPUT_FILENAME_TEMPLATE % values |
369
|
|
|
output_file_path = os.path.join('/tmp', output_file_name) |
370
|
|
|
|
371
|
|
|
with tarfile.open(output_file_path, 'w:gz') as tar: |
372
|
|
|
tar.add(temp_dir_path, arcname=output_file_name.split(".")[0]) |
373
|
|
|
|
374
|
|
|
return output_file_path |
375
|
|
|
|
376
|
|
|
@staticmethod |
377
|
|
|
def create_temp_directories(): |
378
|
|
|
temp_dir_path = tempfile.mkdtemp() |
379
|
|
|
|
380
|
|
|
for directory_name in DIRECTORY_STRUCTURE: |
381
|
|
|
full_path = os.path.join(temp_dir_path, directory_name) |
382
|
|
|
os.mkdir(full_path) |
383
|
|
|
|
384
|
|
|
return temp_dir_path |
385
|
|
|
|
386
|
|
|
@staticmethod |
387
|
|
|
def format_output_filename(cmd): |
388
|
|
|
"""" |
389
|
|
|
Format the file name such as removing white spaces and special characters. |
390
|
|
|
:param cmd: shell command |
391
|
|
|
:return: formatted output file name |
392
|
|
|
:rtype: ``str`` |
393
|
|
|
""" |
394
|
|
|
for char in cmd: |
395
|
|
|
if char in ' !@#$%^&*()[]{};:,./<>?\|`~=+"': |
|
|
|
|
396
|
|
|
cmd = cmd.replace(char, "") |
397
|
|
|
return cmd |
398
|
|
|
|
399
|
|
|
@staticmethod |
400
|
|
|
def get_system_information(): |
401
|
|
|
""" |
402
|
|
|
Retrieve system information which is included in the report. |
403
|
|
|
|
404
|
|
|
:rtype: ``dict`` |
405
|
|
|
""" |
406
|
|
|
system_information = { |
407
|
|
|
'hostname': socket.gethostname(), |
408
|
|
|
'operating_system': {}, |
409
|
|
|
'hardware': { |
410
|
|
|
'cpu': {}, |
411
|
|
|
'memory': {} |
412
|
|
|
}, |
413
|
|
|
'python': {}, |
414
|
|
|
'stackstorm': {}, |
415
|
|
|
'mistral': {} |
416
|
|
|
} |
417
|
|
|
|
418
|
|
|
# Operating system information |
419
|
|
|
system_information['operating_system']['system'] = platform.system() |
420
|
|
|
system_information['operating_system']['release'] = platform.release() |
421
|
|
|
system_information['operating_system']['operating_system'] = platform.platform() |
422
|
|
|
system_information['operating_system']['platform'] = platform.system() |
423
|
|
|
system_information['operating_system']['architecture'] = ' '.join(platform.architecture()) |
424
|
|
|
|
425
|
|
|
if platform.system().lower() == 'linux': |
426
|
|
|
distribution = ' '.join(platform.linux_distribution()) |
427
|
|
|
system_information['operating_system']['distribution'] = distribution |
428
|
|
|
|
429
|
|
|
system_information['python']['version'] = sys.version.split('\n')[0] |
430
|
|
|
|
431
|
|
|
# Hardware information |
432
|
|
|
cpu_info = get_cpu_info() |
433
|
|
|
|
434
|
|
|
if cpu_info: |
435
|
|
|
core_count = len(cpu_info) |
436
|
|
|
model = cpu_info[0]['model_name'] |
437
|
|
|
system_information['hardware']['cpu'] = { |
438
|
|
|
'core_count': core_count, |
439
|
|
|
'model_name': model |
440
|
|
|
} |
441
|
|
|
else: |
442
|
|
|
# Unsupported platform |
443
|
|
|
system_information['hardware']['cpu'] = 'unsupported platform' |
444
|
|
|
|
445
|
|
|
memory_info = get_memory_info() |
446
|
|
|
|
447
|
|
|
if memory_info: |
448
|
|
|
total = memory_info['MemTotal'] / 1024 |
449
|
|
|
free = memory_info['MemFree'] / 1024 |
450
|
|
|
used = (total - free) |
451
|
|
|
system_information['hardware']['memory'] = { |
452
|
|
|
'total': total, |
453
|
|
|
'used': used, |
454
|
|
|
'free': free |
455
|
|
|
} |
456
|
|
|
else: |
457
|
|
|
# Unsupported platform |
458
|
|
|
system_information['hardware']['memory'] = 'unsupported platform' |
459
|
|
|
|
460
|
|
|
# StackStorm information |
461
|
|
|
system_information['stackstorm']['version'] = st2_version |
462
|
|
|
|
463
|
|
|
st2common_path = st2common.__file__ |
464
|
|
|
st2common_path = os.path.dirname(st2common_path) |
465
|
|
|
|
466
|
|
|
if 'st2common/st2common' in st2common_path: |
467
|
|
|
# Assume we are running source install |
468
|
|
|
base_install_path = st2common_path.replace('/st2common/st2common', '') |
469
|
|
|
|
470
|
|
|
revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path) |
471
|
|
|
|
472
|
|
|
system_information['stackstorm']['installation_method'] = 'source' |
473
|
|
|
system_information['stackstorm']['revision_hash'] = revision_hash |
474
|
|
|
else: |
475
|
|
|
package_list = get_package_list(name_startswith='st2') |
476
|
|
|
|
477
|
|
|
system_information['stackstorm']['installation_method'] = 'package' |
478
|
|
|
system_information['stackstorm']['packages'] = package_list |
479
|
|
|
|
480
|
|
|
# Mistral information |
481
|
|
|
repo_path = '/opt/openstack/mistral' |
482
|
|
|
revision_hash = get_repo_latest_revision_hash(repo_path=repo_path) |
483
|
|
|
system_information['mistral']['installation_method'] = 'source' |
484
|
|
|
system_information['mistral']['revision_hash'] = revision_hash |
485
|
|
|
|
486
|
|
|
return system_information |
487
|
|
|
|
488
|
|
|
|
489
|
|
|
def main(): |
490
|
|
|
parser = argparse.ArgumentParser(description='') |
491
|
|
|
parser.add_argument('--exclude-logs', action='store_true', default=False, |
492
|
|
|
help='Don\'t include logs in the generated tarball') |
493
|
|
|
parser.add_argument('--exclude-configs', action='store_true', default=False, |
494
|
|
|
help='Don\'t include configs in the generated tarball') |
495
|
|
|
parser.add_argument('--exclude-content', action='store_true', default=False, |
496
|
|
|
help='Don\'t include content packs in the generated tarball') |
497
|
|
|
parser.add_argument('--exclude-system-info', action='store_true', default=False, |
498
|
|
|
help='Don\'t include system information in the generated tarball') |
499
|
|
|
parser.add_argument('--exclude-shell-commands', action='store_true', default=False, |
500
|
|
|
help='Don\'t include shell commands output in the generated tarball') |
501
|
|
|
parser.add_argument('--yes', action='store_true', default=False, |
502
|
|
|
help='Run in non-interactive mode and answer "yes" to all the questions') |
503
|
|
|
parser.add_argument('--review', action='store_true', default=False, |
504
|
|
|
help='Generate the tarball, but don\'t encrypt and upload it') |
505
|
|
|
parser.add_argument('--debug', action='store_true', default=False, |
506
|
|
|
help='Enable debug mode') |
507
|
|
|
parser.add_argument('--config', action='store', default=None, |
508
|
|
|
help='Get required configurations from config file') |
509
|
|
|
parser.add_argument('--output', action='store', default=None, |
510
|
|
|
help='Specify output file path') |
511
|
|
|
parser.add_argument('--existing-file', action='store', default=None, |
512
|
|
|
help='Specify an existing file to operate on') |
513
|
|
|
args = parser.parse_args() |
514
|
|
|
|
515
|
|
|
# Ensure that not all options have been excluded |
516
|
|
|
abort = True |
517
|
|
|
for arg_name in ARG_NAMES: |
518
|
|
|
abort &= getattr(args, arg_name, False) |
519
|
|
|
|
520
|
|
|
if abort: |
521
|
|
|
print('Generated tarball would be empty. Aborting.') |
522
|
|
|
sys.exit(2) |
523
|
|
|
|
524
|
|
|
# Get setting overrides from yaml file if specified |
525
|
|
|
if args.config: |
526
|
|
|
with open(args.config, 'r') as yaml_file: |
527
|
|
|
config_file = yaml.load(yaml_file) |
528
|
|
|
else: |
529
|
|
|
config_file = {} |
530
|
|
|
|
531
|
|
|
company_name = config_file.get('company_name', COMPANY_NAME) |
532
|
|
|
|
533
|
|
|
# Defaults |
534
|
|
|
encrypt = True |
535
|
|
|
upload = True |
536
|
|
|
|
537
|
|
|
if args.review: |
538
|
|
|
encrypt = False |
539
|
|
|
upload = False |
540
|
|
|
|
541
|
|
|
if encrypt: |
542
|
|
|
# When not running in review mode, GPG needs to be installed and |
543
|
|
|
# available |
544
|
|
|
if not GPG_INSTALLED: |
545
|
|
|
msg = ('"gpg" binary not found, can\'t proceed. Make sure "gpg" is installed ' |
546
|
|
|
'and available in PATH.') |
547
|
|
|
raise ValueError(msg) |
548
|
|
|
|
549
|
|
|
if not args.yes and not args.existing_file and upload: |
550
|
|
|
submitted_content = [name.replace('exclude_', '') for name in ARG_NAMES if |
551
|
|
|
not getattr(args, name, False)] |
552
|
|
|
submitted_content = ', '.join(submitted_content) |
553
|
|
|
print('This will submit the following information to %s: %s' % (company_name, |
554
|
|
|
submitted_content)) |
555
|
|
|
value = six.moves.input('Are you sure you want to proceed? [y/n] ') |
556
|
|
|
if value.strip().lower() not in ['y', 'yes']: |
557
|
|
|
print('Aborting') |
558
|
|
|
sys.exit(1) |
559
|
|
|
|
560
|
|
|
# Prompt user for optional additional context info |
561
|
|
|
user_info = {} |
562
|
|
|
if not args.yes and not args.existing_file: |
563
|
|
|
print('If you want us to get back to you via email, you can provide additional context ' |
564
|
|
|
'such as your name, email and an optional comment') |
565
|
|
|
value = six.moves.input('Would you like to provide additional context? [y/n] ') |
566
|
|
|
if value.strip().lower() in ['y', 'yes']: |
567
|
|
|
user_info['name'] = six.moves.input('Name: ') |
568
|
|
|
user_info['email'] = six.moves.input('Email: ') |
569
|
|
|
user_info['comment'] = six.moves.input('Comment: ') |
570
|
|
|
|
571
|
|
|
setup_logging() |
572
|
|
|
|
573
|
|
|
debug_collector = DebugInfoCollector(include_logs=not args.exclude_logs, |
574
|
|
|
include_configs=not args.exclude_configs, |
575
|
|
|
include_content=not args.exclude_content, |
576
|
|
|
include_system_info=not args.exclude_system_info, |
577
|
|
|
include_shell_commands=not args.exclude_shell_commands, |
578
|
|
|
user_info=user_info, |
579
|
|
|
debug=args.debug, |
580
|
|
|
config_file=config_file, |
581
|
|
|
output_path=args.output) |
582
|
|
|
|
583
|
|
|
debug_collector.run(encrypt=encrypt, upload=upload, existing_file=args.existing_file) |
584
|
|
|
|