1
|
|
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more |
2
|
|
|
# contributor license agreements. See the NOTICE file distributed with |
3
|
|
|
# this work for additional information regarding copyright ownership. |
4
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0 |
5
|
|
|
# (the "License"); you may not use this file except in compliance with |
6
|
|
|
# the License. You may obtain a copy of the License at |
7
|
|
|
# |
8
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
9
|
|
|
# |
10
|
|
|
# Unless required by applicable law or agreed to in writing, software |
11
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
12
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13
|
|
|
# See the License for the specific language governing permissions and |
14
|
|
|
# limitations under the License. |
15
|
|
|
|
16
|
|
|
""" |
17
|
|
|
This script submits information which helps StackStorm employees debug different |
18
|
|
|
user problems and issues to StackStorm. |
19
|
|
|
|
20
|
|
|
By default the following information is included: |
21
|
|
|
|
22
|
|
|
- Logs from /var/log/st2 |
23
|
|
|
- StackStorm and mistral config file (/etc/st2/st2.conf, /etc/mistral/mistral.conf) |
24
|
|
|
- All the content (integration packs). |
25
|
|
|
- Information about your system and StackStorm installation (Operating system, |
26
|
|
|
Python version, StackStorm version, Mistral version) |
27
|
|
|
|
28
|
|
|
Note: This script currently assumes it's running on Linux. |
29
|
|
|
""" |
30
|
|
|
|
31
|
|
|
import os |
32
|
|
|
import sys |
33
|
|
|
import shutil |
34
|
|
|
import socket |
35
|
|
|
import logging |
36
|
|
|
import tarfile |
37
|
|
|
import argparse |
38
|
|
|
import platform |
39
|
|
|
import tempfile |
40
|
|
|
import httplib |
41
|
|
|
|
42
|
|
|
import six |
43
|
|
|
import yaml |
44
|
|
|
import gnupg |
45
|
|
|
import requests |
46
|
|
|
from distutils.spawn import find_executable |
47
|
|
|
|
48
|
|
|
import st2common |
49
|
|
|
from st2common.content.utils import get_packs_base_paths |
50
|
|
|
from st2common import __version__ as st2_version |
51
|
|
|
from st2common import config |
52
|
|
|
from st2common.util import date as date_utils |
53
|
|
|
from st2common.util.shell import run_command |
54
|
|
|
from st2debug.constants import GPG_KEY |
55
|
|
|
from st2debug.constants import GPG_KEY_FINGERPRINT |
56
|
|
|
from st2debug.constants import S3_BUCKET_URL |
57
|
|
|
from st2debug.constants import COMPANY_NAME |
58
|
|
|
from st2debug.constants import ARG_NAMES |
59
|
|
|
from st2debug.utils.fs import copy_files |
60
|
|
|
from st2debug.utils.fs import get_full_file_list |
61
|
|
|
from st2debug.utils.fs import get_dirs_in_path |
62
|
|
|
from st2debug.utils.fs import remove_file |
63
|
|
|
from st2debug.utils.system_info import get_cpu_info |
64
|
|
|
from st2debug.utils.system_info import get_memory_info |
65
|
|
|
from st2debug.utils.system_info import get_package_list |
66
|
|
|
from st2debug.utils.git_utils import get_repo_latest_revision_hash |
67
|
|
|
from st2debug.processors import process_st2_config |
68
|
|
|
from st2debug.processors import process_mistral_config |
69
|
|
|
from st2debug.processors import process_content_pack_dir |
70
|
|
|
|
71
|
|
|
LOG = logging.getLogger(__name__) |
72
|
|
|
|
73
|
|
|
# Constants |
74
|
|
|
GPG_INSTALLED = find_executable('gpg') is not None |
75
|
|
|
|
76
|
|
|
LOG_FILE_PATHS = [ |
77
|
|
|
'/var/log/st2/*.log', |
78
|
|
|
'/var/log/mistral*.log' |
79
|
|
|
] |
80
|
|
|
|
81
|
|
|
ST2_CONFIG_FILE_PATH = '/etc/st2/st2.conf' |
82
|
|
|
MISTRAL_CONFIG_FILE_PATH = '/etc/mistral/mistral.conf' |
83
|
|
|
|
84
|
|
|
SHELL_COMMANDS = [] |
85
|
|
|
|
86
|
|
|
# Directory structure inside tarball |
87
|
|
|
DIRECTORY_STRUCTURE = [ |
88
|
|
|
'configs/', |
89
|
|
|
'logs/', |
90
|
|
|
'content/', |
91
|
|
|
'commands/' |
92
|
|
|
] |
93
|
|
|
|
94
|
|
|
OUTPUT_PATHS = { |
95
|
|
|
'logs': 'logs/', |
96
|
|
|
'configs': 'configs/', |
97
|
|
|
'content': 'content/', |
98
|
|
|
'commands': 'commands/', |
99
|
|
|
'system_info': 'system_info.yaml', |
100
|
|
|
'user_info': 'user_info.yaml' |
101
|
|
|
} |
102
|
|
|
|
103
|
|
|
# Options which should be removed from the st2 config |
104
|
|
|
ST2_CONF_OPTIONS_TO_REMOVE = { |
105
|
|
|
'database': ['username', 'password'], |
106
|
|
|
'messaging': ['url'] |
107
|
|
|
} |
108
|
|
|
|
109
|
|
|
REMOVE_VALUE_NAME = '**removed**' |
110
|
|
|
|
111
|
|
|
OUTPUT_FILENAME_TEMPLATE = 'st2-debug-output-%(hostname)s-%(date)s.tar.gz' |
112
|
|
|
|
113
|
|
|
DATE_FORMAT = '%Y-%m-%d-%H%M%S' |
114
|
|
|
|
115
|
|
|
try: |
116
|
|
|
config.parse_args(args=[]) |
117
|
|
|
except Exception: |
118
|
|
|
pass |
119
|
|
|
|
120
|
|
|
|
121
|
|
|
def setup_logging(): |
122
|
|
|
root = LOG |
123
|
|
|
root.setLevel(logging.INFO) |
124
|
|
|
|
125
|
|
|
ch = logging.StreamHandler(sys.stdout) |
126
|
|
|
ch.setLevel(logging.DEBUG) |
127
|
|
|
formatter = logging.Formatter('%(asctime)s %(levelname)s - %(message)s') |
128
|
|
|
ch.setFormatter(formatter) |
129
|
|
|
root.addHandler(ch) |
130
|
|
|
|
131
|
|
|
|
132
|
|
|
class DebugInfoCollector(object): |
133
|
|
|
def __init__(self, include_logs, include_configs, include_content, include_system_info, |
134
|
|
|
include_shell_commands=False, user_info=None, debug=False, config_file=None, |
135
|
|
|
output_path=None): |
136
|
|
|
""" |
137
|
|
|
Initialize a DebugInfoCollector object. |
138
|
|
|
|
139
|
|
|
:param include_logs: Include log files in generated archive. |
140
|
|
|
:type include_logs: ``bool`` |
141
|
|
|
:param include_configs: Include config files in generated archive. |
142
|
|
|
:type include_configs: ``bool`` |
143
|
|
|
:param include_content: Include pack contents in generated archive. |
144
|
|
|
:type include_content: ``bool`` |
145
|
|
|
:param include_system_info: Include system information in generated archive. |
146
|
|
|
:type include_system_info: ``bool`` |
147
|
|
|
:param include_shell_commands: Include shell command output in generated archive. |
148
|
|
|
:type include_shell_commands: ``bool`` |
149
|
|
|
:param user_info: User info to be included in generated archive. |
150
|
|
|
:type user_info: ``dict`` |
151
|
|
|
:param debug: Enable debug logging. |
152
|
|
|
:type debug: ``bool`` |
153
|
|
|
:param config_file: Values from config file to override defaults. |
154
|
|
|
:type config_file: ``dict`` |
155
|
|
|
:param output_path: Path to write output file to. (optional) |
156
|
|
|
:type output_path: ``str`` |
157
|
|
|
""" |
158
|
|
|
self.include_logs = include_logs |
159
|
|
|
self.include_configs = include_configs |
160
|
|
|
self.include_content = include_content |
161
|
|
|
self.include_system_info = include_system_info |
162
|
|
|
self.include_shell_commands = include_shell_commands |
163
|
|
|
self.user_info = user_info |
164
|
|
|
self.debug = debug |
165
|
|
|
self.output_path = output_path |
166
|
|
|
|
167
|
|
|
config_file = config_file or {} |
168
|
|
|
self.st2_config_file_path = config_file.get('st2_config_file_path', ST2_CONFIG_FILE_PATH) |
169
|
|
|
self.mistral_config_file_path = config_file.get('mistral_config_file_path', |
170
|
|
|
MISTRAL_CONFIG_FILE_PATH) |
171
|
|
|
self.log_files_paths = config_file.get('log_files_paths', LOG_FILE_PATHS[:]) |
172
|
|
|
self.gpg_key = config_file.get('gpg_key', GPG_KEY) |
173
|
|
|
self.gpg_key_fingerprint = config_file.get('gpg_key_fingerprint', GPG_KEY_FINGERPRINT) |
174
|
|
|
self.s3_bucket_url = config_file.get('s3_bucket_url', S3_BUCKET_URL) |
175
|
|
|
self.company_name = config_file.get('company_name', COMPANY_NAME) |
176
|
|
|
self.shell_commands = config_file.get('shell_commands', SHELL_COMMANDS) |
177
|
|
|
|
178
|
|
|
self.st2_config_file_name = os.path.basename(self.st2_config_file_path) |
179
|
|
|
self.mistral_config_file_name = os.path.basename(self.mistral_config_file_path) |
180
|
|
|
self.config_file_paths = [ |
181
|
|
|
self.st2_config_file_path, |
182
|
|
|
self.mistral_config_file_path |
183
|
|
|
] |
184
|
|
|
|
185
|
|
|
def run(self, encrypt=False, upload=False, existing_file=None): |
186
|
|
|
""" |
187
|
|
|
Run the specified steps. |
188
|
|
|
|
189
|
|
|
:param encrypt: If true, encrypt the archive file. |
190
|
|
|
:param encrypt: ``bool`` |
191
|
|
|
:param upload: If true, upload the resulting file. |
192
|
|
|
:param upload: ``bool`` |
193
|
|
|
:param existing_file: Path to an existing archive file. If not specified a new |
194
|
|
|
archive will be created. |
195
|
|
|
:param existing_file: ``str`` |
196
|
|
|
""" |
197
|
|
|
temp_files = [] |
198
|
|
|
|
199
|
|
|
try: |
200
|
|
|
if existing_file: |
201
|
|
|
working_file = existing_file |
202
|
|
|
else: |
203
|
|
|
# Create a new archive if an existing file hasn't been provided |
204
|
|
|
working_file = self.create_archive() |
205
|
|
|
if not encrypt and not upload: |
206
|
|
|
LOG.info('Debug tarball successfully ' |
|
|
|
|
207
|
|
|
'generated and can be reviewed at: %s' % working_file) |
208
|
|
|
else: |
209
|
|
|
temp_files.append(working_file) |
210
|
|
|
|
211
|
|
|
if encrypt: |
212
|
|
|
working_file = self.encrypt_archive(archive_file_path=working_file) |
213
|
|
|
if not upload: |
214
|
|
|
LOG.info('Encrypted debug tarball successfully generated at: %s' % |
|
|
|
|
215
|
|
|
working_file) |
216
|
|
|
else: |
217
|
|
|
temp_files.append(working_file) |
218
|
|
|
|
219
|
|
|
if upload: |
220
|
|
|
self.upload_archive(archive_file_path=working_file) |
221
|
|
|
tarball_name = os.path.basename(working_file) |
222
|
|
|
LOG.info('Debug tarball successfully uploaded to %s (name=%s)' % |
|
|
|
|
223
|
|
|
(self.company_name, tarball_name)) |
224
|
|
|
LOG.info('When communicating with support, please let them know the ' |
|
|
|
|
225
|
|
|
'tarball name - %s' % tarball_name) |
226
|
|
|
finally: |
227
|
|
|
# Remove temp files |
228
|
|
|
for temp_file in temp_files: |
229
|
|
|
assert temp_file.startswith('/tmp') |
230
|
|
|
remove_file(file_path=temp_file) |
231
|
|
|
|
232
|
|
|
def create_archive(self): |
233
|
|
|
""" |
234
|
|
|
Create an archive with debugging information. |
235
|
|
|
|
236
|
|
|
:return: Path to the generated archive. |
237
|
|
|
:rtype: ``str`` |
238
|
|
|
""" |
239
|
|
|
|
240
|
|
|
try: |
241
|
|
|
# 1. Create temporary directory with the final directory structure where we will move |
242
|
|
|
# files which will be processed and included in the tarball |
243
|
|
|
temp_dir_path = self.create_temp_directories() |
244
|
|
|
|
245
|
|
|
# Prepend temp_dir_path to OUTPUT_PATHS |
246
|
|
|
output_paths = {} |
247
|
|
|
for key, path in OUTPUT_PATHS.iteritems(): |
248
|
|
|
output_paths[key] = os.path.join(temp_dir_path, path) |
249
|
|
|
|
250
|
|
|
# 2. Moves all the files to the temporary directory |
251
|
|
|
LOG.info('Collecting files...') |
252
|
|
|
if self.include_logs: |
253
|
|
|
self.collect_logs(output_paths['logs']) |
254
|
|
|
if self.include_configs: |
255
|
|
|
self.collect_config_files(output_paths['configs']) |
256
|
|
|
if self.include_content: |
257
|
|
|
self.collect_pack_content(output_paths['content']) |
258
|
|
|
if self.include_system_info: |
259
|
|
|
self.add_system_information(output_paths['system_info']) |
260
|
|
|
if self.user_info: |
261
|
|
|
self.add_user_info(output_paths['user_info']) |
262
|
|
|
if self.include_shell_commands: |
263
|
|
|
self.add_shell_command_output(output_paths['commands']) |
264
|
|
|
|
265
|
|
|
# 3. Create a tarball |
266
|
|
|
return self.create_tarball(temp_dir_path) |
267
|
|
|
|
268
|
|
|
except Exception as e: |
269
|
|
|
LOG.exception('Failed to generate tarball', exc_info=True) |
270
|
|
|
raise e |
271
|
|
|
|
272
|
|
|
def encrypt_archive(self, archive_file_path): |
273
|
|
|
""" |
274
|
|
|
Encrypt archive with debugging information using our public key. |
275
|
|
|
|
276
|
|
|
:param archive_file_path: Path to the non-encrypted tarball file. |
277
|
|
|
:type archive_file_path: ``str`` |
278
|
|
|
|
279
|
|
|
:return: Path to the encrypted archive. |
280
|
|
|
:rtype: ``str`` |
281
|
|
|
""" |
282
|
|
|
try: |
283
|
|
|
assert archive_file_path.endswith('.tar.gz') |
284
|
|
|
|
285
|
|
|
LOG.info('Encrypting tarball...') |
286
|
|
|
gpg = gnupg.GPG(verbose=self.debug) |
287
|
|
|
|
288
|
|
|
# Import our public key |
289
|
|
|
import_result = gpg.import_keys(self.gpg_key) |
290
|
|
|
# pylint: disable=no-member |
291
|
|
|
assert import_result.count == 1 |
292
|
|
|
|
293
|
|
|
encrypted_archive_output_file_name = os.path.basename(archive_file_path) + '.asc' |
294
|
|
|
encrypted_archive_output_file_path = os.path.join('/tmp', |
295
|
|
|
encrypted_archive_output_file_name) |
296
|
|
|
with open(archive_file_path, 'rb') as fp: |
297
|
|
|
gpg.encrypt_file(file=fp, |
298
|
|
|
recipients=self.gpg_key_fingerprint, |
299
|
|
|
always_trust=True, |
300
|
|
|
output=encrypted_archive_output_file_path) |
301
|
|
|
return encrypted_archive_output_file_path |
302
|
|
|
except Exception as e: |
303
|
|
|
LOG.exception('Failed to encrypt archive', exc_info=True) |
304
|
|
|
raise e |
305
|
|
|
|
306
|
|
|
def upload_archive(self, archive_file_path): |
307
|
|
|
""" |
308
|
|
|
Upload the encrypted archive. |
309
|
|
|
|
310
|
|
|
:param archive_file_path: Path to the encrypted tarball file. |
311
|
|
|
:type archive_file_path: ``str`` |
312
|
|
|
""" |
313
|
|
|
try: |
314
|
|
|
assert archive_file_path.endswith('.asc') |
315
|
|
|
|
316
|
|
|
LOG.debug('Uploading tarball...') |
317
|
|
|
file_name = os.path.basename(archive_file_path) |
318
|
|
|
url = self.s3_bucket_url + file_name |
319
|
|
|
assert url.startswith('https://') |
320
|
|
|
|
321
|
|
|
with open(archive_file_path, 'rb') as fp: |
322
|
|
|
response = requests.put(url=url, files={'file': fp}) |
323
|
|
|
assert response.status_code == httplib.OK |
324
|
|
|
except Exception as e: |
325
|
|
|
LOG.exception('Failed to upload tarball to %s' % self.company_name, exc_info=True) |
|
|
|
|
326
|
|
|
raise e |
327
|
|
|
|
328
|
|
|
def collect_logs(self, output_path): |
329
|
|
|
""" |
330
|
|
|
Copy log files to the output path. |
331
|
|
|
|
332
|
|
|
:param output_path: Path where log files will be copied to. |
333
|
|
|
:type output_path: ``str`` |
334
|
|
|
""" |
335
|
|
|
LOG.debug('Including log files') |
336
|
|
|
for file_path_glob in self.log_files_paths: |
337
|
|
|
log_file_list = get_full_file_list(file_path_glob=file_path_glob) |
338
|
|
|
copy_files(file_paths=log_file_list, destination=output_path) |
339
|
|
|
|
340
|
|
|
def collect_config_files(self, output_path): |
341
|
|
|
""" |
342
|
|
|
Copy config files to the output path. |
343
|
|
|
|
344
|
|
|
:param output_path: Path where config files will be copied to. |
345
|
|
|
:type output_path: ``str`` |
346
|
|
|
""" |
347
|
|
|
LOG.debug('Including config files') |
348
|
|
|
copy_files(file_paths=self.config_file_paths, destination=output_path) |
349
|
|
|
|
350
|
|
|
st2_config_path = os.path.join(output_path, self.st2_config_file_name) |
351
|
|
|
process_st2_config(config_path=st2_config_path) |
352
|
|
|
|
353
|
|
|
mistral_config_path = os.path.join(output_path, self.mistral_config_file_name) |
354
|
|
|
process_mistral_config(config_path=mistral_config_path) |
355
|
|
|
|
356
|
|
|
@staticmethod |
357
|
|
|
def collect_pack_content(output_path): |
358
|
|
|
""" |
359
|
|
|
Copy pack contents to the output path. |
360
|
|
|
|
361
|
|
|
:param output_path: Path where pack contents will be copied to. |
362
|
|
|
:type output_path: ``str`` |
363
|
|
|
""" |
364
|
|
|
LOG.debug('Including content') |
365
|
|
|
|
366
|
|
|
packs_base_paths = get_packs_base_paths() |
367
|
|
|
for index, packs_base_path in enumerate(packs_base_paths, 1): |
368
|
|
|
dst = os.path.join(output_path, 'dir-%s' % index) |
369
|
|
|
|
370
|
|
|
try: |
371
|
|
|
shutil.copytree(src=packs_base_path, dst=dst) |
372
|
|
|
except IOError: |
373
|
|
|
continue |
374
|
|
|
|
375
|
|
|
base_pack_dirs = get_dirs_in_path(file_path=output_path) |
376
|
|
|
|
377
|
|
|
for base_pack_dir in base_pack_dirs: |
378
|
|
|
pack_dirs = get_dirs_in_path(file_path=base_pack_dir) |
379
|
|
|
|
380
|
|
|
for pack_dir in pack_dirs: |
381
|
|
|
process_content_pack_dir(pack_dir=pack_dir) |
382
|
|
|
|
383
|
|
|
def add_system_information(self, output_path): |
384
|
|
|
""" |
385
|
|
|
Collect and write system information to output path. |
386
|
|
|
|
387
|
|
|
:param output_path: Path where system information will be written to. |
388
|
|
|
:type output_path: ``str`` |
389
|
|
|
""" |
390
|
|
|
LOG.debug('Including system info') |
391
|
|
|
|
392
|
|
|
system_information = yaml.dump(self.get_system_information(), |
393
|
|
|
default_flow_style=False) |
394
|
|
|
|
395
|
|
|
with open(output_path, 'w') as fp: |
396
|
|
|
fp.write(system_information) |
397
|
|
|
|
398
|
|
|
def add_user_info(self, output_path): |
399
|
|
|
LOG.debug('Including user info') |
400
|
|
|
user_info = yaml.dump(self.user_info, default_flow_style=False) |
401
|
|
|
|
402
|
|
|
with open(output_path, 'w') as fp: |
403
|
|
|
fp.write(user_info) |
404
|
|
|
|
405
|
|
|
def add_shell_command_output(self, output_path): |
406
|
|
|
"""" |
407
|
|
|
Get output of the required shell command and redirect the output to output path. |
408
|
|
|
|
409
|
|
|
:param output_path: Directory where output files will be written |
410
|
|
|
:param output_path: ``str`` |
411
|
|
|
""" |
412
|
|
|
LOG.debug('Including the required shell commands output files') |
413
|
|
|
for cmd in self.shell_commands: |
414
|
|
|
output_file = os.path.join(output_path, '%s.txt' % self.format_output_filename(cmd)) |
415
|
|
|
exit_code, stdout, stderr = run_command(cmd=cmd, shell=True) |
416
|
|
|
with open(output_file, 'w') as fp: |
417
|
|
|
fp.write('[BEGIN STDOUT]\n') |
418
|
|
|
fp.write(stdout) |
419
|
|
|
fp.write('[END STDOUT]\n') |
420
|
|
|
fp.write('[BEGIN STDERR]\n') |
421
|
|
|
fp.write(stderr) |
422
|
|
|
fp.write('[END STDERR]') |
423
|
|
|
|
424
|
|
|
def create_tarball(self, temp_dir_path): |
425
|
|
|
""" |
426
|
|
|
Create tarball with the contents of temp_dir_path. |
427
|
|
|
|
428
|
|
|
Tarball will be written to self.output_path, if set. Otherwise it will |
429
|
|
|
be written to /tmp a name generated according to OUTPUT_FILENAME_TEMPLATE. |
430
|
|
|
|
431
|
|
|
:param temp_dir_path: Base directory to include in tarbal. |
432
|
|
|
:type temp_dir_path: ``str`` |
433
|
|
|
|
434
|
|
|
:return: Path to the created tarball. |
435
|
|
|
:rtype: ``str`` |
436
|
|
|
""" |
437
|
|
|
LOG.info('Creating tarball...') |
438
|
|
|
if self.output_path: |
439
|
|
|
output_file_path = self.output_path |
440
|
|
|
else: |
441
|
|
|
date = date_utils.get_datetime_utc_now().strftime(DATE_FORMAT) |
442
|
|
|
values = {'hostname': socket.gethostname(), 'date': date} |
443
|
|
|
|
444
|
|
|
output_file_name = OUTPUT_FILENAME_TEMPLATE % values |
445
|
|
|
output_file_path = os.path.join('/tmp', output_file_name) |
446
|
|
|
|
447
|
|
|
with tarfile.open(output_file_path, 'w:gz') as tar: |
448
|
|
|
tar.add(temp_dir_path, arcname='') |
449
|
|
|
|
450
|
|
|
return output_file_path |
451
|
|
|
|
452
|
|
|
@staticmethod |
453
|
|
|
def create_temp_directories(): |
454
|
|
|
""" |
455
|
|
|
Creates a new temp directory and creates the directory structure as defined |
456
|
|
|
by DIRECTORY_STRUCTURE. |
457
|
|
|
|
458
|
|
|
:return: Path to temp directory. |
459
|
|
|
:rtype: ``str`` |
460
|
|
|
""" |
461
|
|
|
temp_dir_path = tempfile.mkdtemp() |
462
|
|
|
|
463
|
|
|
for directory_name in DIRECTORY_STRUCTURE: |
464
|
|
|
full_path = os.path.join(temp_dir_path, directory_name) |
465
|
|
|
os.mkdir(full_path) |
466
|
|
|
|
467
|
|
|
return temp_dir_path |
468
|
|
|
|
469
|
|
|
@staticmethod |
470
|
|
|
def format_output_filename(cmd): |
471
|
|
|
"""" |
472
|
|
|
Remove whitespace and special characters from a shell command. |
473
|
|
|
|
474
|
|
|
Used to create filename-safe representations of a shell command. |
475
|
|
|
|
476
|
|
|
:param cmd: Shell command. |
477
|
|
|
:type cmd: ``str`` |
478
|
|
|
:return: Formatted filename. |
479
|
|
|
:rtype: ``str`` |
480
|
|
|
""" |
481
|
|
|
return cmd.translate(None, """ !@#$%^&*()[]{};:,./<>?\|`~=+"'""") |
|
|
|
|
482
|
|
|
|
483
|
|
|
@staticmethod |
484
|
|
|
def get_system_information(): |
485
|
|
|
""" |
486
|
|
|
Retrieve system information which is included in the report. |
487
|
|
|
|
488
|
|
|
:rtype: ``dict`` |
489
|
|
|
""" |
490
|
|
|
system_information = { |
491
|
|
|
'hostname': socket.gethostname(), |
492
|
|
|
'operating_system': {}, |
493
|
|
|
'hardware': { |
494
|
|
|
'cpu': {}, |
495
|
|
|
'memory': {} |
496
|
|
|
}, |
497
|
|
|
'python': {}, |
498
|
|
|
'stackstorm': {}, |
499
|
|
|
'mistral': {} |
500
|
|
|
} |
501
|
|
|
|
502
|
|
|
# Operating system information |
503
|
|
|
system_information['operating_system']['system'] = platform.system() |
504
|
|
|
system_information['operating_system']['release'] = platform.release() |
505
|
|
|
system_information['operating_system']['operating_system'] = platform.platform() |
506
|
|
|
system_information['operating_system']['platform'] = platform.system() |
507
|
|
|
system_information['operating_system']['architecture'] = ' '.join(platform.architecture()) |
508
|
|
|
|
509
|
|
|
if platform.system().lower() == 'linux': |
510
|
|
|
distribution = ' '.join(platform.linux_distribution()) |
511
|
|
|
system_information['operating_system']['distribution'] = distribution |
512
|
|
|
|
513
|
|
|
system_information['python']['version'] = sys.version.split('\n')[0] |
514
|
|
|
|
515
|
|
|
# Hardware information |
516
|
|
|
cpu_info = get_cpu_info() |
517
|
|
|
|
518
|
|
|
if cpu_info: |
519
|
|
|
core_count = len(cpu_info) |
520
|
|
|
model = cpu_info[0]['model_name'] |
521
|
|
|
system_information['hardware']['cpu'] = { |
522
|
|
|
'core_count': core_count, |
523
|
|
|
'model_name': model |
524
|
|
|
} |
525
|
|
|
else: |
526
|
|
|
# Unsupported platform |
527
|
|
|
system_information['hardware']['cpu'] = 'unsupported platform' |
528
|
|
|
|
529
|
|
|
memory_info = get_memory_info() |
530
|
|
|
|
531
|
|
|
if memory_info: |
532
|
|
|
total = memory_info['MemTotal'] / 1024 |
533
|
|
|
free = memory_info['MemFree'] / 1024 |
534
|
|
|
used = (total - free) |
535
|
|
|
system_information['hardware']['memory'] = { |
536
|
|
|
'total': total, |
537
|
|
|
'used': used, |
538
|
|
|
'free': free |
539
|
|
|
} |
540
|
|
|
else: |
541
|
|
|
# Unsupported platform |
542
|
|
|
system_information['hardware']['memory'] = 'unsupported platform' |
543
|
|
|
|
544
|
|
|
# StackStorm information |
545
|
|
|
system_information['stackstorm']['version'] = st2_version |
546
|
|
|
|
547
|
|
|
st2common_path = st2common.__file__ |
548
|
|
|
st2common_path = os.path.dirname(st2common_path) |
549
|
|
|
|
550
|
|
|
if 'st2common/st2common' in st2common_path: |
551
|
|
|
# Assume we are running source install |
552
|
|
|
base_install_path = st2common_path.replace('/st2common/st2common', '') |
553
|
|
|
|
554
|
|
|
revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path) |
555
|
|
|
|
556
|
|
|
system_information['stackstorm']['installation_method'] = 'source' |
557
|
|
|
system_information['stackstorm']['revision_hash'] = revision_hash |
558
|
|
|
else: |
559
|
|
|
package_list = get_package_list(name_startswith='st2') |
560
|
|
|
|
561
|
|
|
system_information['stackstorm']['installation_method'] = 'package' |
562
|
|
|
system_information['stackstorm']['packages'] = package_list |
563
|
|
|
|
564
|
|
|
# Mistral information |
565
|
|
|
repo_path = '/opt/openstack/mistral' |
566
|
|
|
revision_hash = get_repo_latest_revision_hash(repo_path=repo_path) |
567
|
|
|
system_information['mistral']['installation_method'] = 'source' |
568
|
|
|
system_information['mistral']['revision_hash'] = revision_hash |
569
|
|
|
|
570
|
|
|
return system_information |
571
|
|
|
|
572
|
|
|
|
573
|
|
|
def main(): |
574
|
|
|
parser = argparse.ArgumentParser(description='') |
575
|
|
|
parser.add_argument('--exclude-logs', action='store_true', default=False, |
576
|
|
|
help='Don\'t include logs in the generated tarball') |
577
|
|
|
parser.add_argument('--exclude-configs', action='store_true', default=False, |
578
|
|
|
help='Don\'t include configs in the generated tarball') |
579
|
|
|
parser.add_argument('--exclude-content', action='store_true', default=False, |
580
|
|
|
help='Don\'t include content packs in the generated tarball') |
581
|
|
|
parser.add_argument('--exclude-system-info', action='store_true', default=False, |
582
|
|
|
help='Don\'t include system information in the generated tarball') |
583
|
|
|
parser.add_argument('--exclude-shell-commands', action='store_true', default=False, |
584
|
|
|
help='Don\'t include shell commands output in the generated tarball') |
585
|
|
|
parser.add_argument('--yes', action='store_true', default=False, |
586
|
|
|
help='Run in non-interactive mode and answer "yes" to all the questions') |
587
|
|
|
parser.add_argument('--review', action='store_true', default=False, |
588
|
|
|
help='Generate the tarball, but don\'t encrypt and upload it') |
589
|
|
|
parser.add_argument('--debug', action='store_true', default=False, |
590
|
|
|
help='Enable debug mode') |
591
|
|
|
parser.add_argument('--config', action='store', default=None, |
592
|
|
|
help='Get required configurations from config file') |
593
|
|
|
parser.add_argument('--output', action='store', default=None, |
594
|
|
|
help='Specify output file path') |
595
|
|
|
parser.add_argument('--existing-file', action='store', default=None, |
596
|
|
|
help='Specify an existing file to operate on') |
597
|
|
|
args = parser.parse_args() |
598
|
|
|
|
599
|
|
|
# Ensure that not all options have been excluded |
600
|
|
|
abort = True |
601
|
|
|
for arg_name in ARG_NAMES: |
602
|
|
|
abort &= getattr(args, arg_name, False) |
603
|
|
|
|
604
|
|
|
if abort: |
605
|
|
|
print('Generated tarball would be empty. Aborting.') |
606
|
|
|
sys.exit(2) |
607
|
|
|
|
608
|
|
|
# Get setting overrides from yaml file if specified |
609
|
|
|
if args.config: |
610
|
|
|
with open(args.config, 'r') as yaml_file: |
611
|
|
|
config_file = yaml.load(yaml_file) |
612
|
|
|
else: |
613
|
|
|
config_file = {} |
614
|
|
|
|
615
|
|
|
company_name = config_file.get('company_name', COMPANY_NAME) |
616
|
|
|
|
617
|
|
|
# Defaults |
618
|
|
|
encrypt = True |
619
|
|
|
upload = True |
620
|
|
|
|
621
|
|
|
if args.review: |
622
|
|
|
encrypt = False |
623
|
|
|
upload = False |
624
|
|
|
|
625
|
|
|
if encrypt: |
626
|
|
|
# When not running in review mode, GPG needs to be installed and |
627
|
|
|
# available |
628
|
|
|
if not GPG_INSTALLED: |
629
|
|
|
msg = ('"gpg" binary not found, can\'t proceed. Make sure "gpg" is installed ' |
630
|
|
|
'and available in PATH.') |
631
|
|
|
raise ValueError(msg) |
632
|
|
|
|
633
|
|
|
if not args.yes and not args.existing_file and upload: |
634
|
|
|
submitted_content = [name.replace('exclude_', '') for name in ARG_NAMES if |
635
|
|
|
not getattr(args, name, False)] |
636
|
|
|
submitted_content = ', '.join(submitted_content) |
637
|
|
|
print('This will submit the following information to %s: %s' % (company_name, |
638
|
|
|
submitted_content)) |
639
|
|
|
value = six.moves.input('Are you sure you want to proceed? [y/n] ') |
640
|
|
|
if value.strip().lower() not in ['y', 'yes']: |
641
|
|
|
print('Aborting') |
642
|
|
|
sys.exit(1) |
643
|
|
|
|
644
|
|
|
# Prompt user for optional additional context info |
645
|
|
|
user_info = {} |
646
|
|
|
if not args.yes and not args.existing_file: |
647
|
|
|
print('If you want us to get back to you via email, you can provide additional context ' |
648
|
|
|
'such as your name, email and an optional comment') |
649
|
|
|
value = six.moves.input('Would you like to provide additional context? [y/n] ') |
650
|
|
|
if value.strip().lower() in ['y', 'yes']: |
651
|
|
|
user_info['name'] = six.moves.input('Name: ') |
652
|
|
|
user_info['email'] = six.moves.input('Email: ') |
653
|
|
|
user_info['comment'] = six.moves.input('Comment: ') |
654
|
|
|
|
655
|
|
|
setup_logging() |
656
|
|
|
|
657
|
|
|
debug_collector = DebugInfoCollector(include_logs=not args.exclude_logs, |
658
|
|
|
include_configs=not args.exclude_configs, |
659
|
|
|
include_content=not args.exclude_content, |
660
|
|
|
include_system_info=not args.exclude_system_info, |
661
|
|
|
include_shell_commands=not args.exclude_shell_commands, |
662
|
|
|
user_info=user_info, |
663
|
|
|
debug=args.debug, |
664
|
|
|
config_file=config_file, |
665
|
|
|
output_path=args.output) |
666
|
|
|
|
667
|
|
|
debug_collector.run(encrypt=encrypt, upload=upload, existing_file=args.existing_file) |
668
|
|
|
|