Passed
Pull Request — master (#133)
by Matěj
01:22
created

org_fedora_oscap.common.set_packages_data()   A

Complexity

Conditions 2

Size

Total Lines 13
Code Lines 7

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 7
dl 0
loc 13
rs 10
c 0
b 0
f 0
cc 2
nop 1
1
#
2
# Copyright (C) 2013  Red Hat, Inc.
3
#
4
# This copyrighted material is made available to anyone wishing to use,
5
# modify, copy, or redistribute it subject to the terms and conditions of
6
# the GNU General Public License v.2, or (at your option) any later version.
7
# This program is distributed in the hope that it will be useful, but WITHOUT
8
# ANY WARRANTY expressed or implied, including the implied warranties of
9
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
10
# Public License for more details.  You should have received a copy of the
11
# GNU General Public License along with this program; if not, write to the
12
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
13
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
14
# source code or documentation are not subject to the GNU General Public
15
# License and may only be used or replicated with the express permission of
16
# Red Hat, Inc.
17
#
18
# Red Hat Author(s): Vratislav Podzimek <[email protected]>
19
#
20
21
"""
22
Module with various classes and functions needed by the OSCAP addon that are
23
not specific to any installation mode (tui, gui, ks).
24
25
"""
26
27
import os
28
import tempfile
29
import subprocess
30
import zipfile
31
import tarfile
32
33
import cpioarchive
34
import re
35
import logging
36
37
from collections import namedtuple
38
import gettext
39
from functools import wraps
40
from dasbus.identifier import DBusServiceIdentifier
41
from pyanaconda.core import constants
42
from pyanaconda.core.dbus import DBus
43
from pyanaconda.core.constants import PAYLOAD_TYPE_DNF
44
from pyanaconda.modules.common.constants.namespaces import ADDONS_NAMESPACE
45
from pyanaconda.modules.common.constants.services import NETWORK, PAYLOADS
46
from pyanaconda.modules.common.structures.payload import PackagesConfigurationData
47
from pyanaconda.threading import threadMgr, AnacondaThread
48
from org_fedora_oscap import utils
49
from org_fedora_oscap.data_fetch import fetch_data
50
51
log = logging.getLogger("anaconda")
52
53
54
# mimick pyanaconda/core/i18n.py
55
def _(string):
56
    if string:
57
        return gettext.translation("oscap-anaconda-addon", fallback=True).gettext(string)
58
    else:
59
        return ""
60
61
62
def N_(string): return string
63
64
65
# everything else should be private
66
__all__ = ["run_oscap_remediate", "get_fix_rules_pre",
67
           "wait_and_fetch_net_data", "extract_data", "strip_content_dir",
68
           "OSCAPaddonError", "get_payload_proxy", "get_packages_data",
69
           "set_packages_data"]
70
71
INSTALLATION_CONTENT_DIR = "/tmp/openscap_data/"
72
TARGET_CONTENT_DIR = "/root/openscap_data/"
73
74
SSG_DIR = "/usr/share/xml/scap/ssg/content/"
75
SSG_CONTENT = "ssg-rhel7-ds.xml"
76
if constants.shortProductName != 'anaconda':
77
    if constants.shortProductName == 'fedora':
78
        SSG_CONTENT  = "ssg-fedora-ds.xml"
79
    else:
80
        SSG_CONTENT = "ssg-%s%s-ds.xml" % (constants.shortProductName,
81
                                            constants.productVersion.strip(".")[0])
82
83
RESULTS_PATH = utils.join_paths(TARGET_CONTENT_DIR,
84
                                "eval_remediate_results.xml")
85
REPORT_PATH = utils.join_paths(TARGET_CONTENT_DIR,
86
                               "eval_remediate_report.html")
87
88
PRE_INSTALL_FIX_SYSTEM_ATTR = "urn:redhat:anaconda:pre"
89
90
THREAD_FETCH_DATA = "AnaOSCAPdataFetchThread"
91
92
SUPPORTED_ARCHIVES = (".zip", ".tar", ".tar.gz", ".tar.bz2", )
93
94
SUPPORTED_CONTENT_TYPES = (
95
    "datastream", "rpm", "archive", "scap-security-guide",
96
)
97
98
SUPPORTED_URL_PREFIXES = (
99
    "http://", "https://", "ftp://",  # LABEL:?, hdaX:?,
100
)
101
102
# buffer size for reading and writing out data (in bytes)
103
IO_BUF_SIZE = 2 * 1024 * 1024
104
105
# DBus constants
106
KDUMP = DBusServiceIdentifier(
107
    namespace=ADDONS_NAMESPACE,
108
    basename="Kdump",
109
    message_bus=DBus
110
)
111
112
113
class OSCAPaddonError(Exception):
114
    """Exception class for OSCAP addon related errors."""
115
116
    pass
117
118
119
class OSCAPaddonNetworkError(OSCAPaddonError):
120
    """Exception class for OSCAP addon related network errors."""
121
122
    pass
123
124
125
class ExtractionError(OSCAPaddonError):
126
    """Exception class for the extraction errors."""
127
128
    pass
129
130
131
MESSAGE_TYPE_FATAL = 0
132
MESSAGE_TYPE_WARNING = 1
133
MESSAGE_TYPE_INFO = 2
134
135
# namedtuple for messages returned from the rules evaluation
136
#   origin -- class (inherited from RuleHandler) that generated the message
137
#   type -- one of the MESSAGE_TYPE_* constants defined above
138
#   text -- the actual message that should be displayed, logged, ...
139
RuleMessage = namedtuple("RuleMessage", ["origin", "type", "text"])
140
141
142
class SubprocessLauncher(object):
143
    def __init__(self, args):
144
        self.args = args
145
        self.stdout = ""
146
        self.stderr = ""
147
        self.messages = []
148
        self.returncode = None
149
150
    def execute(self, ** kwargs):
151
        try:
152
            proc = subprocess.Popen(self.args, stdout=subprocess.PIPE,
153
                                    stderr=subprocess.PIPE, ** kwargs)
154
        except OSError as oserr:
155
            msg = "Failed to run the oscap tool: %s" % oserr
156
            raise OSCAPaddonError(msg)
157
158
        (stdout, stderr) = proc.communicate()
159
        self.stdout = stdout.decode()
160
        self.stderr = stderr.decode(errors="replace")
161
        self.messages = re.findall(r'OpenSCAP Error:.*', self.stderr)
162
        self.messages = self.messages + re.findall(r'E: oscap:.*', self.stderr)
163
164
        self.returncode = proc.returncode
165
166
    def log_messages(self):
167
        for message in self.messages:
168
            log.warning("OSCAP addon: " + message)
169
170
171
def get_fix_rules_pre(profile, fpath, ds_id="", xccdf_id="", tailoring=""):
172
    """
173
    Get fix rules for the pre-installation environment for a given profile in a
174
    given datastream and checklist in a given file.
175
176
    :see: run_oscap_remediate
177
    :see: _run_oscap_gen_fix
178
    :return: fix rules for a given profile
179
    :rtype: str
180
181
    """
182
183
    return _run_oscap_gen_fix(profile, fpath, PRE_INSTALL_FIX_SYSTEM_ATTR,
184
                              ds_id=ds_id, xccdf_id=xccdf_id,
185
                              tailoring=tailoring)
186
187
188
def _run_oscap_gen_fix(profile, fpath, template, ds_id="", xccdf_id="",
189
                       tailoring=""):
190
    """
191
    Run oscap tool on a given file to get the contents of fix elements with the
192
    'system' attribute equal to a given template for a given datastream,
193
    checklist and profile.
194
195
    :see: run_oscap_remediate
196
    :param template: the value of the 'system' attribute of the fix elements
197
    :type template: str
198
    :return: oscap tool's stdout
199
    :rtype: str
200
201
    """
202
203
    if not profile:
204
        return ""
205
206
    args = ["oscap", "xccdf", "generate", "fix"]
207
    args.append("--template=%s" % template)
208
209
    # oscap uses the default profile by default
210
    if profile.lower() != "default":
211
        args.append("--profile=%s" % profile)
212
    if ds_id:
213
        args.append("--datastream-id=%s" % ds_id)
214
    if xccdf_id:
215
        args.append("--xccdf-id=%s" % xccdf_id)
216
    if tailoring:
217
        args.append("--tailoring-file=%s" % tailoring)
218
219
    args.append(fpath)
220
221
    proc = SubprocessLauncher(args)
222
    proc.execute()
223
    proc.log_messages()
224
    if proc.returncode != 0:
225
        msg = "Failed to generate fix rules with the oscap tool: %s" % proc.stderr
226
        raise OSCAPaddonError(msg)
227
228
    return proc.stdout
229
230
231
def run_oscap_remediate(profile, fpath, ds_id="", xccdf_id="", tailoring="",
232
                        chroot=""):
233
    """
234
    Run the evaluation and remediation with the oscap tool on a given file,
235
    doing the remediation as defined in a given profile defined in a given
236
    checklist that is a part of a given datastream. If requested, run in
237
    chroot.
238
239
    :param profile: id of the profile that will drive the remediation
240
    :type profile: str
241
    :param fpath: path to a file with SCAP content
242
    :type fpath: str
243
    :param ds_id: ID of the datastream that contains the checklist defining
244
                  the profile
245
    :type ds_id: str
246
    :param xccdf_id: ID of the checklist that defines the profile
247
    :type xccdf_id: str
248
    :param tailoring: path to a tailoring file
249
    :type tailoring: str
250
    :param chroot: path to the root the oscap tool should be run in
251
    :type chroot: str
252
    :return: oscap tool's stdout (summary of the rules, checks and fixes)
253
    :rtype: str
254
255
    """
256
257
    if not profile:
258
        return ""
259
260
    def do_chroot():
261
        """Helper function doing the chroot if requested."""
262
        if chroot and chroot != "/":
263
            os.chroot(chroot)
264
            os.chdir("/")
265
266
    # make sure the directory for the results exists
267
    results_dir = os.path.dirname(RESULTS_PATH)
268
    if chroot:
269
        results_dir = os.path.normpath(chroot + "/" + results_dir)
270
    utils.ensure_dir_exists(results_dir)
271
272
    args = ["oscap", "xccdf", "eval"]
273
    args.append("--remediate")
274
    args.append("--results=%s" % RESULTS_PATH)
275
    args.append("--report=%s" % REPORT_PATH)
276
277
    # oscap uses the default profile by default
278
    if profile.lower() != "default":
279
        args.append("--profile=%s" % profile)
280
    if ds_id:
281
        args.append("--datastream-id=%s" % ds_id)
282
    if xccdf_id:
283
        args.append("--xccdf-id=%s" % xccdf_id)
284
    if tailoring:
285
        args.append("--tailoring-file=%s" % tailoring)
286
287
    args.append(fpath)
288
289
    proc = SubprocessLauncher(args)
290
    proc.execute(preexec_fn=do_chroot)
291
    proc.log_messages()
292
293
    if proc.returncode not in (0, 2):
294
        # 0 -- success; 2 -- no error, but checks/remediation failed
295
        msg = "Content evaluation and remediation with the oscap tool "\
296
            "failed: %s" % proc.stderr
297
        raise OSCAPaddonError(msg)
298
299
    return proc.stdout
300
301
302
def wait_and_fetch_net_data(url, out_file, ca_certs=None):
303
    """
304
    Function that waits for network connection and starts a thread that fetches
305
    data over network.
306
307
    :see: org_fedora_oscap.data_fetch.fetch_data
308
    :return: the name of the thread running fetch_data
309
    :rtype: str
310
311
    """
312
313
    # get thread that tries to establish a network connection
314
    nm_conn_thread = threadMgr.get(constants.THREAD_WAIT_FOR_CONNECTING_NM)
315
    if nm_conn_thread:
316
        # NM still connecting, wait for it to finish
317
        nm_conn_thread.join()
318
319
    network_proxy = NETWORK.get_proxy()
320
    if not network_proxy.Connected:
321
        raise OSCAPaddonNetworkError("Network connection needed to fetch data.")
322
323
    fetch_data_thread = AnacondaThread(name=THREAD_FETCH_DATA,
324
                                       target=fetch_data,
325
                                       args=(url, out_file, ca_certs),
326
                                       fatal=False)
327
328
    # register and run the thread
329
    threadMgr.add(fetch_data_thread)
330
331
    return THREAD_FETCH_DATA
332
333
334
def extract_data(archive, out_dir, ensure_has_files=None):
335
    """
336
    Fuction that extracts the given archive to the given output directory. It
337
    tries to find out the archive type by the file name.
338
339
    :param archive: path to the archive file that should be extracted
340
    :type archive: str
341
    :param out_dir: output directory the archive should be extracted to
342
    :type out_dir: str
343
    :param ensure_has_files: relative paths to the files that must exist in the
344
                             archive
345
    :type ensure_has_files: iterable of strings or None
346
    :return: a list of files and directories extracted from the archive
347
    :rtype: [str]
348
349
    """
350
351
    # get rid of empty file paths
352
    ensure_has_files = [fpath for fpath in ensure_has_files if fpath]
353
354
    if archive.endswith(".zip"):
355
        # ZIP file
356
        try:
357
            zfile = zipfile.ZipFile(archive, "r")
358
        except zipfile.BadZipfile as err:
359
            raise ExtractionError(str(err))
360
361
        # generator for the paths of the files found in the archive (dirs end
362
        # with "/")
363
        files = set(info.filename for info in zfile.filelist
364
                    if not info.filename.endswith("/"))
365
        for fpath in ensure_has_files or ():
366
            if fpath not in files:
367
                msg = "File '%s' not found in the archive '%s'" % (fpath,
368
                                                                   archive)
369
                raise ExtractionError(msg)
370
371
        utils.ensure_dir_exists(out_dir)
372
        zfile.extractall(path=out_dir)
373
        result = [utils.join_paths(out_dir, info.filename) for info in zfile.filelist]
374
        zfile.close()
375
        return result
376
    elif archive.endswith(".tar"):
377
        # plain tarball
378
        return _extract_tarball(archive, out_dir, ensure_has_files, None)
379
    elif archive.endswith(".tar.gz"):
380
        # gzipped tarball
381
        return _extract_tarball(archive, out_dir, ensure_has_files, "gz")
382
    elif archive.endswith(".tar.bz2"):
383
        # bzipped tarball
384
        return _extract_tarball(archive, out_dir, ensure_has_files, "bz2")
385
    elif archive.endswith(".rpm"):
386
        # RPM
387
        return _extract_rpm(archive, out_dir, ensure_has_files)
388
    # elif other types of archives
389
    else:
390
        raise ExtractionError("Unsuported archive type")
391
392
393
def _extract_tarball(archive, out_dir, ensure_has_files, alg):
394
    """
395
    Extract the given TAR archive to the given output directory and make sure
396
    the given file exists in the archive.
397
398
    :see: extract_data
399
    :param alg: compression algorithm used for the tarball
400
    :type alg: str (one of "gz", "bz2") or None
401
    :return: a list of files and directories extracted from the archive
402
    :rtype: [str]
403
404
    """
405
406
    if alg and alg not in ("gz", "bz2",):
407
        raise ExtractionError("Unsupported compression algorithm")
408
409
    mode = "r"
410
    if alg:
411
        mode += ":%s" % alg
412
413
    try:
414
        tfile = tarfile.TarFile.open(archive, mode)
415
    except tarfile.TarError as err:
416
        raise ExtractionError(str(err))
417
418
    # generator for the paths of the files found in the archive
419
    files = set(member.path for member in tfile.getmembers()
420
                if member.isfile())
421
422
    for fpath in ensure_has_files or ():
423
        if fpath not in files:
424
            msg = "File '%s' not found in the archive '%s'" % (fpath, archive)
425
            raise ExtractionError(msg)
426
427
    utils.ensure_dir_exists(out_dir)
428
    tfile.extractall(path=out_dir)
429
    result = [utils.join_paths(out_dir, member.path) for member in tfile.getmembers()]
430
    tfile.close()
431
432
    return result
433
434
435
def _extract_rpm(rpm_path, root="/", ensure_has_files=None):
436
    """
437
    Extract the given RPM into the directory tree given by the root argument
438
    and make sure the given file exists in the archive.
439
440
    :param rpm_path: path to the RPM file that should be extracted
441
    :type rpm_path: str
442
    :param root: root of the directory tree the RPM should be extracted into
443
    :type root: str
444
    :param ensure_has_files: relative paths to the files that must exist in the
445
                             RPM
446
    :type ensure_has_files: iterable of strings or None
447
    :return: a list of files and directories extracted from the archive
448
    :rtype: [str]
449
450
    """
451
452
    # run rpm2cpio and process the output with the cpioarchive module
453
    temp_fd, temp_path = tempfile.mkstemp(prefix="oscap_rpm")
454
    proc = subprocess.Popen(["rpm2cpio", rpm_path], stdout=temp_fd)
455
    proc.wait()
456
    if proc.returncode != 0:
457
        msg = "Failed to convert RPM '%s' to cpio archive" % rpm_path
458
        raise ExtractionError(msg)
459
460
    os.close(temp_fd)
461
462
    try:
463
        archive = cpioarchive.CpioArchive(temp_path)
464
    except cpioarchive.CpioError as err:
465
        raise ExtractionError(str(err))
466
467
    # get entries from the archive (supports only iteration over entries)
468
    entries = set(entry for entry in archive)
469
470
    # cpio entry names (paths) start with the dot
471
    entry_names = [entry.name.lstrip(".") for entry in entries]
472
473
    for fpath in ensure_has_files or ():
474
        # RPM->cpio entries have absolute paths
475
        if fpath not in entry_names and \
476
           os.path.join("/", fpath) not in entry_names:
477
            msg = "File '%s' not found in the archive '%s'" % (fpath, rpm_path)
478
            raise ExtractionError(msg)
479
480
    try:
481
        for entry in entries:
482
            if entry.size == 0:
483
                continue
484
            dirname = os.path.dirname(entry.name.lstrip("."))
485
            out_dir = os.path.normpath(root + dirname)
486
            utils.ensure_dir_exists(out_dir)
487
488
            out_fpath = os.path.normpath(root + entry.name.lstrip("."))
489
            if os.path.exists(out_fpath):
490
                continue
491
            with open(out_fpath, "wb") as out_file:
492
                buf = entry.read(IO_BUF_SIZE)
493
                while buf:
494
                    out_file.write(buf)
495
                    buf = entry.read(IO_BUF_SIZE)
496
    except (IOError, cpioarchive.CpioError) as e:
497
        raise ExtractionError(e)
498
499
    # cleanup
500
    archive.close()
501
    os.unlink(temp_path)
502
503
    return [os.path.normpath(root + name) for name in entry_names]
504
505
506
def strip_content_dir(fpaths, phase="preinst"):
507
    """
508
    Strip content directory prefix from the file paths for either
509
    pre-installation or post-installation phase.
510
511
    :param fpaths: iterable of file paths to strip content directory prefix
512
                   from
513
    :type fpaths: iterable of strings
514
    :param phase: specifies pre-installation or post-installation phase
515
    :type phase: "preinst" or "postinst"
516
    :return: the same iterable of file paths as given with the content
517
             directory prefix stripped
518
    :rtype: same type as fpaths
519
520
    """
521
522
    if phase == "preinst":
523
        remove_prefix = lambda x: x[len(INSTALLATION_CONTENT_DIR):]
524
    else:
525
        remove_prefix = lambda x: x[len(TARGET_CONTENT_DIR):]
526
527
    return utils.keep_type_map(remove_prefix, fpaths)
528
529
530
def ssg_available(root="/"):
531
    """
532
    Tries to find the SCAP Security Guide under the given root.
533
534
    :return: True if SSG was found under the given root, False otherwise
535
536
    """
537
538
    return os.path.exists(utils.join_paths(root, SSG_DIR + SSG_CONTENT))
539
540
541
def get_content_name(data):
542
    if data.content_type == "scap-security-guide":
543
        raise ValueError("Using scap-security-guide, no single content file")
544
545
    rest = "/anonymous_content"
546
    for prefix in SUPPORTED_URL_PREFIXES:
547
        if data.content_url.startswith(prefix):
548
            rest = data.content_url[len(prefix):]
549
            break
550
551
    parts = rest.rsplit("/", 1)
552
    if len(parts) != 2:
553
        raise ValueError("Unsupported url '%s'" % data.content_url)
554
555
    return parts[1]
556
557
558
def get_raw_preinst_content_path(data):
559
    """Path to the raw (unextracted, ...) pre-installation content file"""
560
    if data.content_type == "scap-security-guide":
561
        log.debug("Using scap-security-guide, no single content file")
562
        return None
563
564
    content_name = get_content_name(data)
565
    return utils.join_paths(INSTALLATION_CONTENT_DIR, content_name)
566
567
568
def get_preinst_content_path(data):
569
    """Path to the pre-installation content file"""
570
    if data.content_type == "scap-security-guide":
571
        # SSG is not copied to the standard place
572
        return data.content_path
573
574
    if data.content_type == "datastream":
575
        return utils.join_paths(
576
            INSTALLATION_CONTENT_DIR,
577
            get_content_name(data)
578
        )
579
580
    return utils.join_paths(
581
        INSTALLATION_CONTENT_DIR,
582
        data.content_path
583
    )
584
585
586
def get_postinst_content_path(data):
587
    """Path to the post-installation content file"""
588
    if data.content_type == "datastream":
589
        return utils.join_paths(
590
            TARGET_CONTENT_DIR,
591
            get_content_name(data)
592
        )
593
594
    if data.content_type in ("rpm", "scap-security-guide"):
595
        # no path magic in case of RPM (SSG is installed as an RPM)
596
        return data.content_path
597
598
    return utils.join_paths(
599
        TARGET_CONTENT_DIR,
600
        data.content_path
601
    )
602
603
604
def get_preinst_tailoring_path(data):
605
    """Path to the pre-installation tailoring file (if any)"""
606
    if not data.tailoring_path:
607
        return ""
608
609
    return utils.join_paths(
610
        INSTALLATION_CONTENT_DIR,
611
        data.tailoring_path
612
    )
613
614
615
def get_postinst_tailoring_path(data):
616
    """Path to the post-installation tailoring file (if any)"""
617
    if not data.tailoring_path:
618
        return ""
619
620
    if data.content_type == "rpm":
621
        # no path magic in case of RPM
622
        return data.tailoring_path
623
624
    return utils.join_paths(
625
        TARGET_CONTENT_DIR,
626
        data.tailoring_path
627
    )
628
629
630
def get_payload_proxy():
631
    """Get the DBus proxy of the active payload.
632
633
    :return: a DBus proxy
634
    """
635
    payloads_proxy = PAYLOADS.get_proxy()
636
    object_path = payloads_proxy.ActivePayload
637
638
    if not object_path:
639
        raise ValueError("Active payload is not set.")
640
641
    return PAYLOADS.get_proxy(object_path)
642
643
644
def get_packages_data() -> PackagesConfigurationData:
645
    """Get the DBus data with the packages configuration.
646
647
    :return: a packages configuration
648
    """
649
    payload_proxy = get_payload_proxy()
650
651
    if payload_proxy.Type != PAYLOAD_TYPE_DNF:
652
        return PackagesConfigurationData()
653
654
    return PackagesConfigurationData.from_structure(
655
        payload_proxy.Packages
656
    )
657
658
659
def set_packages_data(data: PackagesConfigurationData):
660
    """Set the DBus data with the packages configuration.
661
662
    :param data: a packages configuration
663
    """
664
    payload_proxy = get_payload_proxy()
665
666
    if payload_proxy.Type != PAYLOAD_TYPE_DNF:
667
        log.debug("The payload doesn't support packages.")
668
        return
669
670
    return payload_proxy.SetPackages(
671
        PackagesConfigurationData.to_structure(data)
672
    )
673