1
|
|
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more |
2
|
|
|
# contributor license agreements. See the NOTICE file distributed with |
3
|
|
|
# this work for additional information regarding copyright ownership. |
4
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0 |
5
|
|
|
# (the "License"); you may not use this file except in compliance with |
6
|
|
|
# the License. You may obtain a copy of the License at |
7
|
|
|
# |
8
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
9
|
|
|
# |
10
|
|
|
# Unless required by applicable law or agreed to in writing, software |
11
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
12
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13
|
|
|
# See the License for the specific language governing permissions and |
14
|
|
|
# limitations under the License. |
15
|
|
|
|
16
|
|
|
""" |
17
|
|
|
This script submits information which helps StackStorm employees debug different |
18
|
|
|
user problems and issues to StackStorm. |
19
|
|
|
|
20
|
|
|
By default the following information is included: |
21
|
|
|
|
22
|
|
|
- Logs from /var/log/st2 |
23
|
|
|
- StackStorm and mistral config file (/etc/st2/st2.conf, /etc/mistral/mistral.conf) |
24
|
|
|
- All the content (integration packs). |
25
|
|
|
- Information about your system and StackStorm installation (Operating system, |
26
|
|
|
Python version, StackStorm version, Mistral version) |
27
|
|
|
|
28
|
|
|
Note: This script currently assumes it's running on Linux. |
29
|
|
|
""" |
30
|
|
|
|
31
|
|
|
import os |
32
|
|
|
import sys |
33
|
|
|
import shutil |
34
|
|
|
import socket |
35
|
|
|
import logging |
36
|
|
|
import tarfile |
37
|
|
|
import argparse |
38
|
|
|
import platform |
39
|
|
|
import tempfile |
40
|
|
|
import httplib |
41
|
|
|
|
42
|
|
|
import six |
43
|
|
|
import yaml |
44
|
|
|
import gnupg |
45
|
|
|
import requests |
46
|
|
|
from distutils.spawn import find_executable |
47
|
|
|
|
48
|
|
|
import st2common |
49
|
|
|
from st2common.content.utils import get_packs_base_paths |
50
|
|
|
from st2common import __version__ as st2_version |
51
|
|
|
from st2common import config |
52
|
|
|
from st2common.util import date as date_utils |
53
|
|
|
from st2common.util.shell import run_command |
54
|
|
|
from st2debug.constants import GPG_KEY |
55
|
|
|
from st2debug.constants import GPG_KEY_FINGERPRINT |
56
|
|
|
from st2debug.constants import S3_BUCKET_URL |
57
|
|
|
from st2debug.constants import COMPANY_NAME |
58
|
|
|
from st2debug.constants import ARG_NAMES |
59
|
|
|
from st2debug.utils.fs import copy_files |
60
|
|
|
from st2debug.utils.fs import get_full_file_list |
61
|
|
|
from st2debug.utils.fs import get_dirs_in_path |
62
|
|
|
from st2debug.utils.fs import remove_file |
63
|
|
|
from st2debug.utils.system_info import get_cpu_info |
64
|
|
|
from st2debug.utils.system_info import get_memory_info |
65
|
|
|
from st2debug.utils.system_info import get_package_list |
66
|
|
|
from st2debug.utils.git_utils import get_repo_latest_revision_hash |
67
|
|
|
from st2debug.processors import process_st2_config |
68
|
|
|
from st2debug.processors import process_mistral_config |
69
|
|
|
from st2debug.processors import process_content_pack_dir |
70
|
|
|
|
71
|
|
|
LOG = logging.getLogger(__name__) |
72
|
|
|
|
73
|
|
|
# Constants |
74
|
|
|
GPG_INSTALLED = find_executable('gpg') is not None |
75
|
|
|
|
76
|
|
|
ST2_LOG_FILES_PATH = '/var/log/st2/*.log' |
77
|
|
|
MISTRAL_LOG_FILES_PATH = '/var/log/mistral*.log' |
78
|
|
|
|
79
|
|
|
LOG_FILE_PATHS = [ |
80
|
|
|
ST2_LOG_FILES_PATH, |
81
|
|
|
MISTRAL_LOG_FILES_PATH |
82
|
|
|
] |
83
|
|
|
|
84
|
|
|
ST2_CONFIG_FILE_PATH = '/etc/st2/st2.conf' |
85
|
|
|
MISTRAL_CONFIG_FILE_PATH = '/etc/mistral/mistral.conf' |
86
|
|
|
|
87
|
|
|
ST2_CONFIG_FILE_NAME = os.path.split(ST2_CONFIG_FILE_PATH)[1] |
88
|
|
|
MISTRAL_CONFIG_FILE_NAME = os.path.split(MISTRAL_CONFIG_FILE_PATH)[1] |
89
|
|
|
|
90
|
|
|
CONFIG_FILE_PATHS = [ |
91
|
|
|
ST2_CONFIG_FILE_PATH, |
92
|
|
|
MISTRAL_CONFIG_FILE_PATH |
93
|
|
|
] |
94
|
|
|
|
95
|
|
|
# Directory structure inside tarball |
96
|
|
|
DIRECTORY_STRUCTURE = [ |
97
|
|
|
'configs/', |
98
|
|
|
'logs/', |
99
|
|
|
'content/', |
100
|
|
|
'commands/' |
101
|
|
|
] |
102
|
|
|
|
103
|
|
|
# Options which should be removed from the st2 config |
104
|
|
|
ST2_CONF_OPTIONS_TO_REMOVE = { |
105
|
|
|
'database': ['username', 'password'], |
106
|
|
|
'messaging': ['url'] |
107
|
|
|
} |
108
|
|
|
|
109
|
|
|
REMOVE_VALUE_NAME = '**removed**' |
110
|
|
|
|
111
|
|
|
OUTPUT_FILENAME_TEMPLATE = 'st2-debug-output-%(hostname)s-%(date)s.tar.gz' |
112
|
|
|
|
113
|
|
|
# Global variables |
114
|
|
|
global debug_info_config_file_options |
|
|
|
|
115
|
|
|
|
116
|
|
|
try: |
117
|
|
|
config.parse_args(args=[]) |
118
|
|
|
except Exception: |
119
|
|
|
pass |
120
|
|
|
|
121
|
|
|
|
122
|
|
|
def load_config_yaml_file(yaml_file_name): |
123
|
|
|
""" |
124
|
|
|
To convert config yaml file into dict. |
125
|
|
|
:param yaml_file_name: config yaml file name |
126
|
|
|
""" |
127
|
|
|
global debug_info_config_file_options |
|
|
|
|
128
|
|
|
|
129
|
|
|
with open(yaml_file_name, 'r') as yaml_file: |
130
|
|
|
debug_info_config_file_options = yaml.load(yaml_file) |
131
|
|
|
|
132
|
|
|
|
133
|
|
|
def get_config_details(section_name, option_name=None): |
134
|
|
|
""" |
135
|
|
|
To get the configurations from st2 config file. |
136
|
|
|
:param section_name: section name |
137
|
|
|
:param option_name: option name |
138
|
|
|
:return: return requested option string if option_name provided else |
139
|
|
|
return list of conf/log/ file paths or list of list commands |
140
|
|
|
:rtype: ``str`` or ``list`` |
141
|
|
|
""" |
142
|
|
|
global debug_info_config_file_options |
|
|
|
|
143
|
|
|
|
144
|
|
|
for key, value in debug_info_config_file_options.iteritems(): |
145
|
|
|
if key == section_name: |
146
|
|
|
if option_name: |
147
|
|
|
return value[option_name] |
148
|
|
|
else: |
149
|
|
|
return value.values() |
150
|
|
|
else: |
|
|
|
|
151
|
|
|
print('section name "%s" does not exist' % section_name) |
152
|
|
|
|
153
|
|
|
|
154
|
|
|
def setup_logging(): |
155
|
|
|
root = LOG |
156
|
|
|
root.setLevel(logging.INFO) |
157
|
|
|
|
158
|
|
|
ch = logging.StreamHandler(sys.stdout) |
159
|
|
|
ch.setLevel(logging.DEBUG) |
160
|
|
|
formatter = logging.Formatter('%(asctime)s %(levelname)s - %(message)s') |
161
|
|
|
ch.setFormatter(formatter) |
162
|
|
|
root.addHandler(ch) |
163
|
|
|
|
164
|
|
|
|
165
|
|
|
def get_system_information(): |
166
|
|
|
""" |
167
|
|
|
Retrieve system information which is included in the report. |
168
|
|
|
|
169
|
|
|
:rtype: ``dict`` |
170
|
|
|
""" |
171
|
|
|
system_information = { |
172
|
|
|
'hostname': socket.gethostname(), |
173
|
|
|
'operating_system': {}, |
174
|
|
|
'hardware': { |
175
|
|
|
'cpu': {}, |
176
|
|
|
'memory': {} |
177
|
|
|
}, |
178
|
|
|
'python': {}, |
179
|
|
|
'stackstorm': {}, |
180
|
|
|
'mistral': {} |
181
|
|
|
} |
182
|
|
|
|
183
|
|
|
# Operating system information |
184
|
|
|
system_information['operating_system']['system'] = platform.system() |
185
|
|
|
system_information['operating_system']['release'] = platform.release() |
186
|
|
|
system_information['operating_system']['operating_system'] = platform.platform() |
187
|
|
|
system_information['operating_system']['platform'] = platform.system() |
188
|
|
|
system_information['operating_system']['architecture'] = ' '.join(platform.architecture()) |
189
|
|
|
|
190
|
|
|
if platform.system().lower() == 'linux': |
191
|
|
|
distribution = ' '.join(platform.linux_distribution()) |
192
|
|
|
system_information['operating_system']['distribution'] = distribution |
193
|
|
|
|
194
|
|
|
system_information['python']['version'] = sys.version.split('\n')[0] |
195
|
|
|
|
196
|
|
|
# Hardware information |
197
|
|
|
cpu_info = get_cpu_info() |
198
|
|
|
|
199
|
|
|
if cpu_info: |
200
|
|
|
core_count = len(cpu_info) |
201
|
|
|
model = cpu_info[0]['model_name'] |
202
|
|
|
system_information['hardware']['cpu'] = { |
203
|
|
|
'core_count': core_count, |
204
|
|
|
'model_name': model |
205
|
|
|
} |
206
|
|
|
else: |
207
|
|
|
# Unsupported platform |
208
|
|
|
system_information['hardware']['cpu'] = 'unsupported platform' |
209
|
|
|
|
210
|
|
|
memory_info = get_memory_info() |
211
|
|
|
|
212
|
|
|
if memory_info: |
213
|
|
|
total = memory_info['MemTotal'] / 1024 |
214
|
|
|
free = memory_info['MemFree'] / 1024 |
215
|
|
|
used = (total - free) |
216
|
|
|
system_information['hardware']['memory'] = { |
217
|
|
|
'total': total, |
218
|
|
|
'used': used, |
219
|
|
|
'free': free |
220
|
|
|
} |
221
|
|
|
else: |
222
|
|
|
# Unsupported platform |
223
|
|
|
system_information['hardware']['memory'] = 'unsupported platform' |
224
|
|
|
|
225
|
|
|
# StackStorm information |
226
|
|
|
system_information['stackstorm']['version'] = st2_version |
227
|
|
|
|
228
|
|
|
st2common_path = st2common.__file__ |
229
|
|
|
st2common_path = os.path.dirname(st2common_path) |
230
|
|
|
|
231
|
|
|
if 'st2common/st2common' in st2common_path: |
232
|
|
|
# Assume we are running source install |
233
|
|
|
base_install_path = st2common_path.replace('/st2common/st2common', '') |
234
|
|
|
|
235
|
|
|
revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path) |
236
|
|
|
|
237
|
|
|
system_information['stackstorm']['installation_method'] = 'source' |
238
|
|
|
system_information['stackstorm']['revision_hash'] = revision_hash |
239
|
|
|
else: |
240
|
|
|
package_list = get_package_list(name_startswith='st2') |
241
|
|
|
|
242
|
|
|
system_information['stackstorm']['installation_method'] = 'package' |
243
|
|
|
system_information['stackstorm']['packages'] = package_list |
244
|
|
|
|
245
|
|
|
# Mistral information |
246
|
|
|
repo_path = '/opt/openstack/mistral' |
247
|
|
|
revision_hash = get_repo_latest_revision_hash(repo_path=repo_path) |
248
|
|
|
system_information['mistral']['installation_method'] = 'source' |
249
|
|
|
system_information['mistral']['revision_hash'] = revision_hash |
250
|
|
|
|
251
|
|
|
return system_information |
252
|
|
|
|
253
|
|
|
|
254
|
|
|
def format_output_filename(cmd): |
255
|
|
|
"""" |
256
|
|
|
Format the file name such as removing white spaces and special characters. |
257
|
|
|
:param cmd: shell command |
258
|
|
|
:return: formatted output file name |
259
|
|
|
:rtype: ``str`` |
260
|
|
|
""" |
261
|
|
|
for char in cmd: |
262
|
|
|
if char in ' !@#$%^&*()[]{};:,./<>?\|`~=+"': |
|
|
|
|
263
|
|
|
cmd = cmd.replace(char, "") |
264
|
|
|
return cmd |
265
|
|
|
|
266
|
|
|
|
267
|
|
|
def get_commands_output(): |
268
|
|
|
"""" |
269
|
|
|
Get output of the required shell command and redirect the output to a file. |
270
|
|
|
:return: output file paths |
271
|
|
|
:rtype: ``list`` |
272
|
|
|
""" |
273
|
|
|
commands_list = get_config_details('shell_commands') |
274
|
|
|
output_files_list = [] |
275
|
|
|
for cmd in commands_list: |
276
|
|
|
output_file = os.path.join('/tmp', '%s.txt' % format_output_filename(cmd)) |
277
|
|
|
exit_code, stdout, _ = run_command(cmd=cmd, shell=True) |
278
|
|
|
with open(output_file, 'w') as fp: |
279
|
|
|
fp.write(stdout) |
280
|
|
|
output_files_list.append(output_file) |
281
|
|
|
return output_files_list |
282
|
|
|
|
283
|
|
|
|
284
|
|
|
def create_archive(include_logs, include_configs, include_content, include_system_info, |
285
|
|
|
include_shell_commands=False, user_info=None, debug=False, config_yaml=None): |
286
|
|
|
""" |
287
|
|
|
Create an archive with debugging information. |
288
|
|
|
|
289
|
|
|
:return: Path to the generated archive. |
290
|
|
|
:rtype: ``str`` |
291
|
|
|
""" |
292
|
|
|
date = date_utils.get_datetime_utc_now().strftime('%Y-%m-%d-%H:%M:%S') |
293
|
|
|
values = {'hostname': socket.gethostname(), 'date': date} |
294
|
|
|
|
295
|
|
|
output_file_name = OUTPUT_FILENAME_TEMPLATE % values |
296
|
|
|
output_file_path = os.path.join('/tmp', output_file_name) |
297
|
|
|
|
298
|
|
|
# 1. Create temporary directory with the final directory structure where we will move files |
299
|
|
|
# which will be processed and included in the tarball |
300
|
|
|
temp_dir_path = tempfile.mkdtemp() |
301
|
|
|
|
302
|
|
|
output_paths = { |
303
|
|
|
'logs': os.path.join(temp_dir_path, 'logs/'), |
304
|
|
|
'configs': os.path.join(temp_dir_path, 'configs/'), |
305
|
|
|
'content': os.path.join(temp_dir_path, 'content/'), |
306
|
|
|
'commands': os.path.join(temp_dir_path, 'commands/'), |
307
|
|
|
'system_info': os.path.join(temp_dir_path, 'system_info.yaml'), |
308
|
|
|
'user_info': os.path.join(temp_dir_path, 'user_info.yaml') |
309
|
|
|
} |
310
|
|
|
|
311
|
|
|
for directory_name in DIRECTORY_STRUCTURE: |
312
|
|
|
full_path = os.path.join(temp_dir_path, directory_name) |
313
|
|
|
os.mkdir(full_path) |
314
|
|
|
|
315
|
|
|
# 2. Moves all the files to the temporary directory |
316
|
|
|
LOG.info('Collecting files...') |
317
|
|
|
|
318
|
|
|
if config_yaml: |
319
|
|
|
st2_conf_file_name = os.path.split(get_config_details('conf_file_paths', |
320
|
|
|
option_name='st2_config_file_path') |
321
|
|
|
)[1] |
322
|
|
|
mistral_conf_file_name = os.path.split(get_config_details('conf_file_paths', |
323
|
|
|
option_name='mistral_config_file_path'))[1] |
324
|
|
|
log_files_paths = get_config_details('log_file_paths') |
325
|
|
|
config_files_paths = get_config_details('conf_file_paths') |
326
|
|
|
else: |
327
|
|
|
st2_conf_file_name = ST2_CONFIG_FILE_NAME |
328
|
|
|
mistral_conf_file_name = MISTRAL_CONFIG_FILE_NAME |
329
|
|
|
log_files_paths = LOG_FILE_PATHS |
330
|
|
|
config_files_paths = CONFIG_FILE_PATHS |
331
|
|
|
|
332
|
|
|
# Logs |
333
|
|
|
if include_logs: |
334
|
|
|
LOG.debug('Including log files') |
335
|
|
|
for file_path_glob in log_files_paths: |
336
|
|
|
log_file_list = get_full_file_list(file_path_glob=file_path_glob) |
337
|
|
|
copy_files(file_paths=log_file_list, destination=output_paths['logs']) |
338
|
|
|
|
339
|
|
|
# Config files |
340
|
|
|
if include_configs: |
341
|
|
|
LOG.debug('Including config files') |
342
|
|
|
copy_files(file_paths=config_files_paths, destination=output_paths['configs']) |
343
|
|
|
|
344
|
|
|
# Content |
345
|
|
|
if include_content: |
346
|
|
|
LOG.debug('Including content') |
347
|
|
|
|
348
|
|
|
packs_base_paths = get_packs_base_paths() |
349
|
|
|
for index, packs_base_path in enumerate(packs_base_paths, 1): |
350
|
|
|
dst = os.path.join(output_paths['content'], 'dir-%s' % index) |
351
|
|
|
|
352
|
|
|
try: |
353
|
|
|
shutil.copytree(src=packs_base_path, dst=dst) |
354
|
|
|
except IOError: |
355
|
|
|
continue |
356
|
|
|
|
357
|
|
|
# System information |
358
|
|
|
if include_system_info: |
359
|
|
|
LOG.debug('Including system info') |
360
|
|
|
|
361
|
|
|
system_information = get_system_information() |
362
|
|
|
system_information = yaml.dump(system_information, default_flow_style=False) |
363
|
|
|
|
364
|
|
|
with open(output_paths['system_info'], 'w') as fp: |
365
|
|
|
fp.write(system_information) |
366
|
|
|
|
367
|
|
|
if user_info: |
368
|
|
|
LOG.debug('Including user info') |
369
|
|
|
user_info = yaml.dump(user_info, default_flow_style=False) |
370
|
|
|
|
371
|
|
|
with open(output_paths['user_info'], 'w') as fp: |
372
|
|
|
fp.write(user_info) |
373
|
|
|
|
374
|
|
|
if include_shell_commands and config_yaml: |
375
|
|
|
LOG.debug('Including the required shell commands output files') |
376
|
|
|
shell_commands_output_paths = get_commands_output() |
377
|
|
|
copy_files(file_paths=shell_commands_output_paths, destination=output_paths['commands']) |
378
|
|
|
|
379
|
|
|
# Configs |
380
|
|
|
st2_config_path = os.path.join(output_paths['configs'], st2_conf_file_name) |
381
|
|
|
process_st2_config(config_path=st2_config_path) |
382
|
|
|
|
383
|
|
|
mistral_config_path = os.path.join(output_paths['configs'], mistral_conf_file_name) |
384
|
|
|
process_mistral_config(config_path=mistral_config_path) |
385
|
|
|
|
386
|
|
|
# Content |
387
|
|
|
base_pack_dirs = get_dirs_in_path(file_path=output_paths['content']) |
388
|
|
|
|
389
|
|
|
for base_pack_dir in base_pack_dirs: |
390
|
|
|
pack_dirs = get_dirs_in_path(file_path=base_pack_dir) |
391
|
|
|
|
392
|
|
|
for pack_dir in pack_dirs: |
393
|
|
|
process_content_pack_dir(pack_dir=pack_dir) |
394
|
|
|
|
395
|
|
|
# 4. Create a tarball |
396
|
|
|
LOG.info('Creating tarball...') |
397
|
|
|
|
398
|
|
|
with tarfile.open(output_file_path, 'w:gz') as tar: |
399
|
|
|
for file_path in output_paths.values(): |
400
|
|
|
file_path = os.path.normpath(file_path) |
401
|
|
|
source_dir = file_path |
402
|
|
|
|
403
|
|
|
if not os.path.exists(source_dir): |
404
|
|
|
continue |
405
|
|
|
|
406
|
|
|
if '.' in file_path: |
407
|
|
|
arcname = os.path.basename(file_path) |
408
|
|
|
else: |
409
|
|
|
arcname = os.path.split(file_path)[-1] |
410
|
|
|
|
411
|
|
|
tar.add(source_dir, arcname=arcname) |
412
|
|
|
|
413
|
|
|
return output_file_path |
414
|
|
|
|
415
|
|
|
|
416
|
|
|
def encrypt_archive(archive_file_path, debug=False, key_fingerprint=GPG_KEY_FINGERPRINT, |
417
|
|
|
key_gpg=GPG_KEY): |
418
|
|
|
""" |
419
|
|
|
Encrypt archive with debugging information using our public key. |
420
|
|
|
|
421
|
|
|
:param archive_file_path: Path to the non-encrypted tarball file. |
422
|
|
|
:type archive_file_path: ``str`` |
423
|
|
|
|
424
|
|
|
:return: Path to the encrypted archive. |
425
|
|
|
:rtype: ``str`` |
426
|
|
|
""" |
427
|
|
|
assert archive_file_path.endswith('.tar.gz') |
428
|
|
|
|
429
|
|
|
LOG.info('Encrypting tarball...') |
430
|
|
|
gpg = gnupg.GPG(verbose=debug) |
431
|
|
|
|
432
|
|
|
# Import our public key |
433
|
|
|
import_result = gpg.import_keys(key_gpg) |
434
|
|
|
# pylint: disable=no-member |
435
|
|
|
assert import_result.count == 1 |
436
|
|
|
|
437
|
|
|
encrypted_archive_output_file_path = archive_file_path + '.asc' |
438
|
|
|
with open(archive_file_path, 'rb') as fp: |
439
|
|
|
gpg.encrypt_file(fp, |
440
|
|
|
recipients=key_fingerprint, |
441
|
|
|
always_trust=True, |
442
|
|
|
output=encrypted_archive_output_file_path) |
443
|
|
|
return encrypted_archive_output_file_path |
444
|
|
|
|
445
|
|
|
|
446
|
|
|
def upload_archive(archive_file_path, bucket_url=S3_BUCKET_URL): |
447
|
|
|
assert archive_file_path.endswith('.asc') |
448
|
|
|
|
449
|
|
|
LOG.debug('Uploading tarball...') |
450
|
|
|
files = {'file': open(archive_file_path, 'rb')} |
451
|
|
|
file_name = os.path.split(archive_file_path)[1] |
452
|
|
|
url = bucket_url + file_name |
453
|
|
|
assert url.startswith('https://') |
454
|
|
|
|
455
|
|
|
response = requests.put(url=url, files=files) |
456
|
|
|
assert response.status_code == httplib.OK |
457
|
|
|
|
458
|
|
|
|
459
|
|
|
def create_and_review_archive(include_logs, include_configs, include_content, include_system_info, |
460
|
|
|
include_shell_commands=False, user_info=None, debug=False, |
461
|
|
|
config_yaml=None): |
462
|
|
|
try: |
463
|
|
|
plain_text_output_path = create_archive(include_logs=include_logs, |
464
|
|
|
include_configs=include_configs, |
465
|
|
|
include_content=include_content, |
466
|
|
|
include_system_info=include_system_info, |
467
|
|
|
include_shell_commands=include_shell_commands, |
468
|
|
|
user_info=user_info, |
469
|
|
|
debug=debug, |
470
|
|
|
config_yaml=config_yaml) |
471
|
|
|
except Exception: |
472
|
|
|
LOG.exception('Failed to generate tarball', exc_info=True) |
473
|
|
|
else: |
474
|
|
|
LOG.info('Debug tarball successfully generated and can be reviewed at: %s' % |
|
|
|
|
475
|
|
|
plain_text_output_path) |
476
|
|
|
|
477
|
|
|
|
478
|
|
|
def create_and_upload_archive(include_logs, include_configs, include_content, |
479
|
|
|
include_system_info, include_shell_commands=False, |
480
|
|
|
user_info=None, debug=False, config_yaml=None): |
481
|
|
|
if config_yaml: |
482
|
|
|
s3_bucket_url = get_config_details('s3_bucket', option_name='url') |
483
|
|
|
gpg_key_fingerprint = get_config_details('gpg', option_name='gpg_key_fingerprint') |
484
|
|
|
gpg_key = get_config_details('gpg', option_name='gpg_key') |
485
|
|
|
company_name = get_config_details('company_name', option_name='name') |
486
|
|
|
else: |
487
|
|
|
s3_bucket_url = S3_BUCKET_URL |
488
|
|
|
gpg_key_fingerprint = GPG_KEY_FINGERPRINT |
489
|
|
|
gpg_key = GPG_KEY |
490
|
|
|
company_name = COMPANY_NAME |
491
|
|
|
try: |
492
|
|
|
plain_text_output_path = create_archive(include_logs=include_logs, |
493
|
|
|
include_configs=include_configs, |
494
|
|
|
include_content=include_content, |
495
|
|
|
include_system_info=include_system_info, |
496
|
|
|
include_shell_commands=include_shell_commands, |
497
|
|
|
user_info=user_info, |
498
|
|
|
debug=debug, |
499
|
|
|
config_yaml=config_yaml) |
500
|
|
|
encrypted_output_path = encrypt_archive(archive_file_path=plain_text_output_path, |
501
|
|
|
key_fingerprint=gpg_key_fingerprint, |
502
|
|
|
key_gpg=gpg_key) |
503
|
|
|
upload_archive(archive_file_path=encrypted_output_path, bucket_url=s3_bucket_url) |
504
|
|
|
except Exception: |
505
|
|
|
LOG.exception('Failed to upload tarball to %s' % company_name, exc_info=True) |
|
|
|
|
506
|
|
|
plain_text_output_path = None |
507
|
|
|
encrypted_output_path = None |
508
|
|
|
else: |
509
|
|
|
tarball_name = os.path.basename(encrypted_output_path) |
510
|
|
|
LOG.info('Debug tarball successfully uploaded to %s (name=%s)' % |
|
|
|
|
511
|
|
|
(company_name, tarball_name)) |
512
|
|
|
LOG.info('When communicating with support, please let them know the tarball name - %s' % |
|
|
|
|
513
|
|
|
tarball_name) |
514
|
|
|
|
515
|
|
|
finally: |
516
|
|
|
# Remove tarballs |
517
|
|
|
if plain_text_output_path: |
518
|
|
|
assert plain_text_output_path.startswith('/tmp') |
519
|
|
|
remove_file(file_path=plain_text_output_path) |
520
|
|
|
if encrypted_output_path: |
521
|
|
|
assert encrypted_output_path.startswith('/tmp') |
522
|
|
|
remove_file(file_path=encrypted_output_path) |
523
|
|
|
|
524
|
|
|
|
525
|
|
|
def main(): |
526
|
|
|
parser = argparse.ArgumentParser(description='') |
527
|
|
|
parser.add_argument('--exclude-logs', action='store_true', default=False, |
528
|
|
|
help='Don\'t include logs in the generated tarball') |
529
|
|
|
parser.add_argument('--exclude-configs', action='store_true', default=False, |
530
|
|
|
help='Don\'t include configs in the generated tarball') |
531
|
|
|
parser.add_argument('--exclude-content', action='store_true', default=False, |
532
|
|
|
help='Don\'t include content packs in the generated tarball') |
533
|
|
|
parser.add_argument('--exclude-system-info', action='store_true', default=False, |
534
|
|
|
help='Don\'t include system information in the generated tarball') |
535
|
|
|
parser.add_argument('--exclude-shell-commands', action='store_true', default=False, |
536
|
|
|
help='Don\'t include shell commands output in the generated tarball') |
537
|
|
|
parser.add_argument('--yes', action='store_true', default=False, |
538
|
|
|
help='Run in non-interactive mode and answer "yes" to all the questions') |
539
|
|
|
parser.add_argument('--review', action='store_true', default=False, |
540
|
|
|
help='Generate the tarball, but don\'t encrypt and upload it') |
541
|
|
|
parser.add_argument('--debug', action='store_true', default=False, |
542
|
|
|
help='Enable debug mode') |
543
|
|
|
parser.add_argument('--config', action='store', default=None, |
544
|
|
|
help='Get required configurations from config file') |
545
|
|
|
args = parser.parse_args() |
546
|
|
|
|
547
|
|
|
arg_names = ARG_NAMES |
548
|
|
|
if args.config: |
549
|
|
|
load_config_yaml_file(args.config) |
550
|
|
|
company_name = get_config_details('company_name', option_name='name') |
551
|
|
|
arg_names.append('exclude_shell_commands') |
552
|
|
|
else: |
553
|
|
|
company_name = COMPANY_NAME |
554
|
|
|
|
555
|
|
|
abort = True |
556
|
|
|
for arg_name in arg_names: |
557
|
|
|
value = getattr(args, arg_name, False) |
558
|
|
|
abort &= value |
559
|
|
|
|
560
|
|
|
if abort: |
561
|
|
|
print('Generated tarball would be empty. Aborting.') |
562
|
|
|
sys.exit(2) |
563
|
|
|
|
564
|
|
|
submited_content = [name.replace('exclude_', '') for name in arg_names if |
565
|
|
|
not getattr(args, name, False)] |
566
|
|
|
submited_content = ', '.join(submited_content) |
567
|
|
|
|
568
|
|
|
if not args.yes and not args.review: |
569
|
|
|
# When not running in review mode, GPG needs to be installed and |
570
|
|
|
# available |
571
|
|
|
if not GPG_INSTALLED: |
572
|
|
|
msg = ('"gpg" binary not found, can\'t proceed. Make sure "gpg" is installed ' |
573
|
|
|
'and available in PATH.') |
574
|
|
|
raise ValueError(msg) |
575
|
|
|
print('This will submit the following information to %s: %s' % (company_name, |
576
|
|
|
submited_content)) |
577
|
|
|
value = six.moves.input('Are you sure you want to proceed? [y/n] ') |
578
|
|
|
if value.strip().lower() not in ['y', 'yes']: |
579
|
|
|
print('Aborting') |
580
|
|
|
sys.exit(1) |
581
|
|
|
|
582
|
|
|
# Prompt user for optional additional context info |
583
|
|
|
user_info = {} |
584
|
|
|
if not args.yes: |
585
|
|
|
print('If you want us to get back to you via email, you can provide additional context ' |
586
|
|
|
'such as your name, email and an optional comment') |
587
|
|
|
value = six.moves.input('Would you like to provide additional context? [y/n] ') |
588
|
|
|
if value.strip().lower() in ['y', 'yes']: |
589
|
|
|
user_info['name'] = six.moves.input('Name: ') |
590
|
|
|
user_info['email'] = six.moves.input('Email: ') |
591
|
|
|
user_info['comment'] = six.moves.input('Comment: ') |
592
|
|
|
|
593
|
|
|
setup_logging() |
594
|
|
|
|
595
|
|
|
if args.review: |
596
|
|
|
create_and_review_archive(include_logs=not args.exclude_logs, |
597
|
|
|
include_configs=not args.exclude_configs, |
598
|
|
|
include_content=not args.exclude_content, |
599
|
|
|
include_system_info=not args.exclude_system_info, |
600
|
|
|
include_shell_commands=not args.exclude_shell_commands, |
601
|
|
|
user_info=user_info, |
602
|
|
|
debug=args.debug, |
603
|
|
|
config_yaml=args.config) |
604
|
|
|
else: |
605
|
|
|
create_and_upload_archive(include_logs=not args.exclude_logs, |
606
|
|
|
include_configs=not args.exclude_configs, |
607
|
|
|
include_content=not args.exclude_content, |
608
|
|
|
include_system_info=not args.exclude_system_info, |
609
|
|
|
include_shell_commands=not args.exclude_shell_commands, |
610
|
|
|
user_info=user_info, |
611
|
|
|
debug=args.debug, |
612
|
|
|
config_yaml=args.config) |
613
|
|
|
|