1
|
2 |
|
from __future__ import absolute_import |
2
|
2 |
|
from __future__ import print_function |
3
|
|
|
|
4
|
2 |
|
import os |
5
|
2 |
|
import os.path |
6
|
2 |
|
from collections import defaultdict |
7
|
2 |
|
from copy import deepcopy |
8
|
2 |
|
import datetime |
9
|
2 |
|
import re |
10
|
2 |
|
import sys |
11
|
2 |
|
from xml.sax.saxutils import escape |
12
|
|
|
|
13
|
2 |
|
import yaml |
14
|
|
|
|
15
|
2 |
|
from .constants import XCCDF_PLATFORM_TO_CPE |
16
|
2 |
|
from .constants import PRODUCT_TO_CPE_MAPPING |
17
|
2 |
|
from .constants import XCCDF_REFINABLE_PROPERTIES |
18
|
2 |
|
from .rules import get_rule_dir_id, get_rule_dir_yaml, is_rule_dir |
19
|
2 |
|
from .rule_yaml import parse_prodtype |
20
|
|
|
|
21
|
2 |
|
from .checks import is_cce_format_valid, is_cce_value_valid |
22
|
2 |
|
from .yaml import DocumentationNotComplete, open_and_expand, open_and_macro_expand |
23
|
2 |
|
from .utils import required_key, mkdir_p |
24
|
|
|
|
25
|
2 |
|
from .xml import ElementTree as ET |
26
|
2 |
|
from .shims import unicode_func |
27
|
|
|
|
28
|
|
|
|
29
|
2 |
|
def add_sub_element(parent, tag, data): |
30
|
|
|
""" |
31
|
|
|
Creates a new child element under parent with tag tag, and sets |
32
|
|
|
data as the content under the tag. In particular, data is a string |
33
|
|
|
to be parsed as an XML tree, allowing sub-elements of children to be |
34
|
|
|
added. |
35
|
|
|
|
36
|
|
|
If data should not be parsed as an XML tree, either escape the contents |
37
|
|
|
before passing into this function, or use ElementTree.SubElement(). |
38
|
|
|
|
39
|
|
|
Returns the newly created subelement of type tag. |
40
|
|
|
""" |
41
|
|
|
# This is used because our YAML data contain XML and XHTML elements |
42
|
|
|
# ET.SubElement() escapes the < > characters by < and > |
43
|
|
|
# and therefore it does not add child elements |
44
|
|
|
# we need to do a hack instead |
45
|
|
|
# TODO: Remove this function after we move to Markdown everywhere in SSG |
46
|
|
|
ustr = unicode_func("<{0}>{1}</{0}>").format(tag, data) |
47
|
|
|
|
48
|
|
|
try: |
49
|
|
|
element = ET.fromstring(ustr.encode("utf-8")) |
50
|
|
|
except Exception: |
51
|
|
|
msg = ("Error adding subelement to an element '{0}' from string: '{1}'" |
52
|
|
|
.format(parent.tag, ustr)) |
53
|
|
|
raise RuntimeError(msg) |
54
|
|
|
|
55
|
|
|
parent.append(element) |
56
|
|
|
return element |
57
|
|
|
|
58
|
|
|
|
59
|
2 |
|
def reorder_according_to_ordering(unordered, ordering, regex=None): |
60
|
2 |
|
ordered = [] |
61
|
2 |
|
if regex is None: |
62
|
2 |
|
regex = "|".join(["({0})".format(item) for item in ordering]) |
63
|
2 |
|
regex = re.compile(regex) |
64
|
|
|
|
65
|
2 |
|
items_to_order = list(filter(regex.match, unordered)) |
66
|
2 |
|
unordered = set(unordered) |
67
|
|
|
|
68
|
2 |
|
for priority_type in ordering: |
69
|
2 |
|
for item in items_to_order: |
70
|
2 |
|
if priority_type in item and item in unordered: |
71
|
2 |
|
ordered.append(item) |
72
|
2 |
|
unordered.remove(item) |
73
|
2 |
|
ordered.extend(list(unordered)) |
74
|
2 |
|
return ordered |
75
|
|
|
|
76
|
|
|
|
77
|
2 |
|
def add_warning_elements(element, warnings): |
78
|
|
|
# The use of [{dict}, {dict}] in warnings is to handle the following |
79
|
|
|
# scenario where multiple warnings have the same category which is |
80
|
|
|
# valid in SCAP and our content: |
81
|
|
|
# |
82
|
|
|
# warnings: |
83
|
|
|
# - general: Some general warning |
84
|
|
|
# - general: Some other general warning |
85
|
|
|
# - general: |- |
86
|
|
|
# Some really long multiline general warning |
87
|
|
|
# |
88
|
|
|
# Each of the {dict} should have only one key/value pair. |
89
|
|
|
for warning_dict in warnings: |
90
|
|
|
warning = add_sub_element(element, "warning", list(warning_dict.values())[0]) |
91
|
|
|
warning.set("category", list(warning_dict.keys())[0]) |
92
|
|
|
|
93
|
|
|
|
94
|
2 |
|
def add_nondata_subelements(element, subelement, attribute, attr_data): |
95
|
|
|
"""Add multiple iterations of a sublement that contains an attribute but no data |
96
|
|
|
For example, <requires id="my_required_id"/>""" |
97
|
|
|
for data in attr_data: |
98
|
|
|
req = ET.SubElement(element, subelement) |
99
|
|
|
req.set(attribute, data) |
100
|
|
|
|
101
|
|
|
|
102
|
2 |
|
class Profile(object): |
103
|
|
|
"""Represents XCCDF profile |
104
|
|
|
""" |
105
|
|
|
|
106
|
2 |
|
def __init__(self, id_): |
107
|
2 |
|
self.id_ = id_ |
108
|
2 |
|
self.title = "" |
109
|
2 |
|
self.description = "" |
110
|
2 |
|
self.extends = None |
111
|
2 |
|
self.selected = [] |
112
|
2 |
|
self.unselected = [] |
113
|
2 |
|
self.variables = dict() |
114
|
2 |
|
self.refine_rules = defaultdict(list) |
115
|
2 |
|
self.metadata = None |
116
|
2 |
|
self.reference = None |
117
|
|
|
|
118
|
2 |
|
@classmethod |
119
|
2 |
|
def from_yaml(cls, yaml_file, env_yaml=None): |
120
|
2 |
|
yaml_contents = open_and_expand(yaml_file, env_yaml) |
121
|
2 |
|
if yaml_contents is None: |
122
|
|
|
return None |
123
|
|
|
|
124
|
2 |
|
basename, _ = os.path.splitext(os.path.basename(yaml_file)) |
125
|
|
|
|
126
|
2 |
|
profile = cls(basename) |
127
|
2 |
|
profile.title = required_key(yaml_contents, "title") |
128
|
2 |
|
del yaml_contents["title"] |
129
|
2 |
|
profile.description = required_key(yaml_contents, "description") |
130
|
2 |
|
del yaml_contents["description"] |
131
|
2 |
|
profile.extends = yaml_contents.pop("extends", None) |
132
|
2 |
|
selection_entries = required_key(yaml_contents, "selections") |
133
|
2 |
|
if selection_entries: |
134
|
2 |
|
profile._parse_selections(selection_entries) |
135
|
2 |
|
del yaml_contents["selections"] |
136
|
|
|
|
137
|
2 |
|
profile.reference = yaml_contents.pop("reference", None) |
138
|
|
|
|
139
|
|
|
# At the moment, metadata is not used to build content |
140
|
2 |
|
if "metadata" in yaml_contents: |
141
|
|
|
del yaml_contents["metadata"] |
142
|
|
|
|
143
|
2 |
|
if yaml_contents: |
144
|
|
|
raise RuntimeError("Unparsed YAML data in '%s'.\n\n%s" |
145
|
|
|
% (yaml_file, yaml_contents)) |
146
|
|
|
|
147
|
2 |
|
return profile |
148
|
|
|
|
149
|
2 |
|
def dump_yaml(self, file_name, documentation_complete=True): |
150
|
|
|
to_dump = {} |
151
|
|
|
to_dump["documentation_complete"] = documentation_complete |
152
|
|
|
to_dump["title"] = self.title |
153
|
|
|
to_dump["description"] = self.description |
154
|
|
|
to_dump["reference"] = self.reference |
155
|
|
|
if self.metadata is not None: |
156
|
|
|
to_dump["metadata"] = self.metadata |
157
|
|
|
|
158
|
|
|
if self.extends is not None: |
159
|
|
|
to_dump["extends"] = self.extends |
160
|
|
|
|
161
|
|
|
selections = [] |
162
|
|
|
for item in self.selected: |
163
|
|
|
selections.append(item) |
164
|
|
|
for item in self.unselected: |
165
|
|
|
selections.append("!"+item) |
166
|
|
|
for varname in self.variables.keys(): |
167
|
|
|
selections.append(varname+"="+self.variables.get(varname)) |
168
|
|
|
for rule, refinements in self.refine_rules.items(): |
169
|
|
|
for prop, val in refinements: |
170
|
|
|
selections.append("{rule}.{property}={value}" |
171
|
|
|
.format(rule=rule, property=prop, value=val)) |
172
|
|
|
to_dump["selections"] = selections |
173
|
|
|
with open(file_name, "w+") as f: |
174
|
|
|
yaml.dump(to_dump, f, indent=4) |
175
|
|
|
|
176
|
2 |
|
def _parse_selections(self, entries): |
177
|
2 |
|
for item in entries: |
178
|
2 |
|
if "." in item: |
179
|
|
|
rule, refinement = item.split(".", 1) |
180
|
|
|
property_, value = refinement.split("=", 1) |
181
|
|
|
if property_ not in XCCDF_REFINABLE_PROPERTIES: |
182
|
|
|
msg = ("Property '{property_}' cannot be refined. " |
183
|
|
|
"Rule properties that can be refined are {refinables}. " |
184
|
|
|
"Fix refinement '{rule_id}.{property_}={value}' in profile '{profile}'." |
185
|
|
|
.format(property_=property_, refinables=XCCDF_REFINABLE_PROPERTIES, |
186
|
|
|
rule_id=rule, value=value, profile=self.id_) |
187
|
|
|
) |
188
|
|
|
raise ValueError(msg) |
189
|
|
|
self.refine_rules[rule].append((property_, value)) |
190
|
2 |
|
elif "=" in item: |
191
|
2 |
|
varname, value = item.split("=", 1) |
192
|
2 |
|
self.variables[varname] = value |
193
|
2 |
|
elif item.startswith("!"): |
194
|
|
|
self.unselected.append(item[1:]) |
195
|
|
|
else: |
196
|
2 |
|
self.selected.append(item) |
197
|
|
|
|
198
|
2 |
|
def to_xml_element(self): |
199
|
|
|
element = ET.Element('Profile') |
200
|
|
|
element.set("id", self.id_) |
201
|
|
|
if self.extends: |
202
|
|
|
element.set("extends", self.extends) |
203
|
|
|
title = add_sub_element(element, "title", self.title) |
204
|
|
|
title.set("override", "true") |
205
|
|
|
desc = add_sub_element(element, "description", self.description) |
206
|
|
|
desc.set("override", "true") |
207
|
|
|
|
208
|
|
|
if self.reference: |
209
|
|
|
add_sub_element(element, "reference", escape(self.reference)) |
210
|
|
|
|
211
|
|
|
for selection in self.selected: |
212
|
|
|
select = ET.Element("select") |
213
|
|
|
select.set("idref", selection) |
214
|
|
|
select.set("selected", "true") |
215
|
|
|
element.append(select) |
216
|
|
|
|
217
|
|
|
for selection in self.unselected: |
218
|
|
|
unselect = ET.Element("select") |
219
|
|
|
unselect.set("idref", selection) |
220
|
|
|
unselect.set("selected", "false") |
221
|
|
|
element.append(unselect) |
222
|
|
|
|
223
|
|
|
for value_id, selector in self.variables.items(): |
224
|
|
|
refine_value = ET.Element("refine-value") |
225
|
|
|
refine_value.set("idref", value_id) |
226
|
|
|
refine_value.set("selector", selector) |
227
|
|
|
element.append(refine_value) |
228
|
|
|
|
229
|
|
|
for refined_rule, refinement_list in self.refine_rules.items(): |
230
|
|
|
refine_rule = ET.Element("refine-rule") |
231
|
|
|
refine_rule.set("idref", refined_rule) |
232
|
|
|
for refinement in refinement_list: |
233
|
|
|
refine_rule.set(refinement[0], refinement[1]) |
234
|
|
|
element.append(refine_rule) |
235
|
|
|
|
236
|
|
|
return element |
237
|
|
|
|
238
|
2 |
|
def get_rule_selectors(self): |
239
|
2 |
|
return list(self.selected + self.unselected) |
240
|
|
|
|
241
|
2 |
|
def get_variable_selectors(self): |
242
|
2 |
|
return self.variables |
243
|
|
|
|
244
|
2 |
|
def validate_refine_rules(self, rules): |
245
|
|
|
existing_rule_ids = [r.id_ for r in rules] |
246
|
|
|
for refine_rule, refinement_list in self.refine_rules.items(): |
247
|
|
|
# Take first refinement to ilustrate where the error is |
248
|
|
|
# all refinements in list are invalid, so it doesn't really matter |
249
|
|
|
a_refinement = refinement_list[0] |
250
|
|
|
|
251
|
|
|
if refine_rule not in existing_rule_ids: |
252
|
|
|
msg = ( |
253
|
|
|
"You are trying to refine a rule that doesn't exist. " |
254
|
|
|
"Rule '{rule_id}' was not found in the benchmark. " |
255
|
|
|
"Please check all rule refinements for rule: '{rule_id}', for example: " |
256
|
|
|
"- {rule_id}.{property_}={value}' in profile {profile_id}." |
257
|
|
|
.format(rule_id=refine_rule, profile_id=self.id_, |
258
|
|
|
property_=a_refinement[0], value=a_refinement[1]) |
259
|
|
|
) |
260
|
|
|
raise ValueError(msg) |
261
|
|
|
|
262
|
|
|
if refine_rule not in self.get_rule_selectors(): |
263
|
|
|
msg = ("- {rule_id}.{property_}={value}' in profile '{profile_id}' is refining " |
264
|
|
|
"a rule that is not selected by it. The refinement will not have any " |
265
|
|
|
"noticeable effect. Either select the rule or remove the rule refinement." |
266
|
|
|
.format(rule_id=refine_rule, property_=a_refinement[0], |
267
|
|
|
value=a_refinement[1], profile_id=self.id_) |
268
|
|
|
) |
269
|
|
|
raise ValueError(msg) |
270
|
|
|
|
271
|
2 |
|
def validate_variables(self, variables): |
272
|
|
|
variables_by_id = dict() |
273
|
|
|
for var in variables: |
274
|
|
|
variables_by_id[var.id_] = var |
275
|
|
|
|
276
|
|
|
for var_id, our_val in self.variables.items(): |
277
|
|
|
if var_id not in variables_by_id: |
278
|
|
|
all_vars_list = [" - %s" % v for v in variables_by_id.keys()] |
279
|
|
|
msg = ( |
280
|
|
|
"Value '{var_id}' in profile '{profile_name}' is not known. " |
281
|
|
|
"We know only variables:\n{var_names}" |
282
|
|
|
.format( |
283
|
|
|
var_id=var_id, profile_name=self.id_, |
284
|
|
|
var_names="\n".join(sorted(all_vars_list))) |
285
|
|
|
) |
286
|
|
|
raise ValueError(msg) |
287
|
|
|
|
288
|
|
|
allowed_selectors = [str(s) for s in variables_by_id[var_id].options.keys()] |
289
|
|
|
if our_val not in allowed_selectors: |
290
|
|
|
msg = ( |
291
|
|
|
"Value '{var_id}' in profile '{profile_name}' " |
292
|
|
|
"uses the selector '{our_val}'. " |
293
|
|
|
"This is not possible, as only selectors {all_selectors} are available. " |
294
|
|
|
"Either change the selector used in the profile, or " |
295
|
|
|
"add the selector-value pair to the variable definition." |
296
|
|
|
.format( |
297
|
|
|
var_id=var_id, profile_name=self.id_, our_val=our_val, |
298
|
|
|
all_selectors=allowed_selectors, |
299
|
|
|
) |
300
|
|
|
) |
301
|
|
|
raise ValueError(msg) |
302
|
|
|
|
303
|
2 |
|
def validate_rules(self, rules, groups): |
304
|
|
|
existing_rule_ids = [r.id_ for r in rules] |
305
|
|
|
rule_selectors = self.get_rule_selectors() |
306
|
|
|
for id_ in rule_selectors: |
307
|
|
|
if id_ in groups: |
308
|
|
|
msg = ( |
309
|
|
|
"You have selected a group '{group_id}' instead of a " |
310
|
|
|
"rule. Groups have no effect in the profile and are not " |
311
|
|
|
"allowed to be selected. Please remove '{group_id}' " |
312
|
|
|
"from profile '{profile_id}' before proceeding." |
313
|
|
|
.format(group_id=id_, profile_id=self.id_) |
314
|
|
|
) |
315
|
|
|
raise ValueError(msg) |
316
|
|
|
if id_ not in existing_rule_ids: |
317
|
|
|
msg = ( |
318
|
|
|
"Rule '{rule_id}' was not found in the benchmark. Please " |
319
|
|
|
"remove rule '{rule_id}' from profile '{profile_id}' " |
320
|
|
|
"before proceeding." |
321
|
|
|
.format(rule_id=id_, profile_id=self.id_) |
322
|
|
|
) |
323
|
|
|
raise ValueError(msg) |
324
|
|
|
|
325
|
2 |
|
def __sub__(self, other): |
326
|
|
|
profile = Profile(self.id_) |
327
|
|
|
profile.title = self.title |
328
|
|
|
profile.description = self.description |
329
|
|
|
profile.extends = self.extends |
330
|
|
|
profile.selected = list(set(self.selected) - set(other.selected)) |
331
|
|
|
profile.selected.sort() |
332
|
|
|
profile.unselected = list(set(self.unselected) - set(other.unselected)) |
333
|
|
|
profile.variables = dict ((k, v) for (k, v) in self.variables.items() |
334
|
|
|
if k not in other.variables or v != other.variables[k]) |
335
|
|
|
return profile |
336
|
|
|
|
337
|
|
|
|
338
|
2 |
|
class ResolvableProfile(Profile): |
339
|
2 |
|
def __init__(self, * args, ** kwargs): |
340
|
|
|
super(ResolvableProfile, self).__init__(* args, ** kwargs) |
341
|
|
|
self.resolved = False |
342
|
|
|
|
343
|
2 |
|
def resolve(self, all_profiles): |
344
|
|
|
if self.resolved: |
345
|
|
|
return |
346
|
|
|
|
347
|
|
|
resolved_selections = set(self.selected) |
348
|
|
|
if self.extends: |
349
|
|
|
if self.extends not in all_profiles: |
350
|
|
|
msg = ( |
351
|
|
|
"Profile {name} extends profile {extended}, but " |
352
|
|
|
"only profiles {profiles} are available for resolution." |
353
|
|
|
.format(name=self.id_, extended=self.extends, |
354
|
|
|
profiles=list(all_profiles.keys()))) |
355
|
|
|
raise RuntimeError(msg) |
356
|
|
|
extended_profile = all_profiles[self.extends] |
357
|
|
|
extended_profile.resolve(all_profiles) |
358
|
|
|
|
359
|
|
|
extended_selects = set(extended_profile.selected) |
360
|
|
|
resolved_selections.update(extended_selects) |
361
|
|
|
|
362
|
|
|
updated_variables = dict(extended_profile.variables) |
363
|
|
|
updated_variables.update(self.variables) |
364
|
|
|
self.variables = updated_variables |
365
|
|
|
|
366
|
|
|
extended_refinements = deepcopy(extended_profile.refine_rules) |
367
|
|
|
updated_refinements = self._subtract_refinements(extended_refinements) |
368
|
|
|
updated_refinements.update(self.refine_rules) |
369
|
|
|
self.refine_rules = updated_refinements |
370
|
|
|
|
371
|
|
|
for uns in self.unselected: |
372
|
|
|
resolved_selections.discard(uns) |
373
|
|
|
|
374
|
|
|
self.unselected = [] |
375
|
|
|
self.extends = None |
376
|
|
|
|
377
|
|
|
self.selected = sorted(resolved_selections) |
378
|
|
|
|
379
|
|
|
self.resolved = True |
380
|
|
|
|
381
|
2 |
|
def _subtract_refinements(self, extended_refinements): |
382
|
|
|
""" |
383
|
|
|
Given a dict of rule refinements from the extended profile, |
384
|
|
|
"undo" every refinement prefixed with '!' in this profile. |
385
|
|
|
""" |
386
|
|
|
for rule, refinements in list(self.refine_rules.items()): |
387
|
|
|
if rule.startswith("!"): |
388
|
|
|
for prop, val in refinements: |
389
|
|
|
extended_refinements[rule[1:]].remove((prop, val)) |
390
|
|
|
del self.refine_rules[rule] |
391
|
|
|
return extended_refinements |
392
|
|
|
|
393
|
|
|
|
394
|
2 |
|
class Value(object): |
395
|
|
|
"""Represents XCCDF Value |
396
|
|
|
""" |
397
|
|
|
|
398
|
2 |
|
def __init__(self, id_): |
399
|
2 |
|
self.id_ = id_ |
400
|
2 |
|
self.title = "" |
401
|
2 |
|
self.description = "" |
402
|
2 |
|
self.type_ = "string" |
403
|
2 |
|
self.operator = "equals" |
404
|
2 |
|
self.interactive = False |
405
|
2 |
|
self.options = {} |
406
|
2 |
|
self.warnings = [] |
407
|
|
|
|
408
|
2 |
|
@staticmethod |
409
|
2 |
|
def from_yaml(yaml_file, env_yaml=None): |
410
|
2 |
|
yaml_contents = open_and_macro_expand(yaml_file, env_yaml) |
411
|
2 |
|
if yaml_contents is None: |
412
|
|
|
return None |
413
|
|
|
|
414
|
2 |
|
value_id, _ = os.path.splitext(os.path.basename(yaml_file)) |
415
|
2 |
|
value = Value(value_id) |
416
|
2 |
|
value.title = required_key(yaml_contents, "title") |
417
|
2 |
|
del yaml_contents["title"] |
418
|
2 |
|
value.description = required_key(yaml_contents, "description") |
419
|
2 |
|
del yaml_contents["description"] |
420
|
2 |
|
value.type_ = required_key(yaml_contents, "type") |
421
|
2 |
|
del yaml_contents["type"] |
422
|
2 |
|
value.operator = yaml_contents.pop("operator", "equals") |
423
|
2 |
|
possible_operators = ["equals", "not equal", "greater than", |
424
|
|
|
"less than", "greater than or equal", |
425
|
|
|
"less than or equal", "pattern match"] |
426
|
|
|
|
427
|
2 |
|
if value.operator not in possible_operators: |
428
|
|
|
raise ValueError( |
429
|
|
|
"Found an invalid operator value '%s' in '%s'. " |
430
|
|
|
"Expected one of: %s" |
431
|
|
|
% (value.operator, yaml_file, ", ".join(possible_operators)) |
432
|
|
|
) |
433
|
|
|
|
434
|
2 |
|
value.interactive = \ |
435
|
|
|
yaml_contents.pop("interactive", "false").lower() == "true" |
436
|
|
|
|
437
|
2 |
|
value.options = required_key(yaml_contents, "options") |
438
|
2 |
|
del yaml_contents["options"] |
439
|
2 |
|
value.warnings = yaml_contents.pop("warnings", []) |
440
|
|
|
|
441
|
2 |
|
for warning_list in value.warnings: |
442
|
|
|
if len(warning_list) != 1: |
443
|
|
|
raise ValueError("Only one key/value pair should exist for each dictionary") |
444
|
|
|
|
445
|
2 |
|
if yaml_contents: |
446
|
|
|
raise RuntimeError("Unparsed YAML data in '%s'.\n\n%s" |
447
|
|
|
% (yaml_file, yaml_contents)) |
448
|
|
|
|
449
|
2 |
|
return value |
450
|
|
|
|
451
|
2 |
|
def to_xml_element(self): |
452
|
|
|
value = ET.Element('Value') |
453
|
|
|
value.set('id', self.id_) |
454
|
|
|
value.set('type', self.type_) |
455
|
|
|
if self.operator != "equals": # equals is the default |
456
|
|
|
value.set('operator', self.operator) |
457
|
|
|
if self.interactive: # False is the default |
458
|
|
|
value.set('interactive', 'true') |
459
|
|
|
title = ET.SubElement(value, 'title') |
460
|
|
|
title.text = self.title |
461
|
|
|
add_sub_element(value, 'description', self.description) |
462
|
|
|
add_warning_elements(value, self.warnings) |
463
|
|
|
|
464
|
|
|
for selector, option in self.options.items(): |
465
|
|
|
# do not confuse Value with big V with value with small v |
466
|
|
|
# value is child element of Value |
467
|
|
|
value_small = ET.SubElement(value, 'value') |
468
|
|
|
# by XCCDF spec, default value is value without selector |
469
|
|
|
if selector != "default": |
470
|
|
|
value_small.set('selector', str(selector)) |
471
|
|
|
value_small.text = str(option) |
472
|
|
|
|
473
|
|
|
return value |
474
|
|
|
|
475
|
2 |
|
def to_file(self, file_name): |
476
|
|
|
root = self.to_xml_element() |
477
|
|
|
tree = ET.ElementTree(root) |
478
|
|
|
tree.write(file_name) |
479
|
|
|
|
480
|
|
|
|
481
|
2 |
|
class Benchmark(object): |
482
|
|
|
"""Represents XCCDF Benchmark |
483
|
|
|
""" |
484
|
2 |
|
def __init__(self, id_): |
485
|
|
|
self.id_ = id_ |
486
|
|
|
self.title = "" |
487
|
|
|
self.status = "" |
488
|
|
|
self.description = "" |
489
|
|
|
self.notice_id = "" |
490
|
|
|
self.notice_description = "" |
491
|
|
|
self.front_matter = "" |
492
|
|
|
self.rear_matter = "" |
493
|
|
|
self.cpes = [] |
494
|
|
|
self.version = "0.1" |
495
|
|
|
self.profiles = [] |
496
|
|
|
self.values = {} |
497
|
|
|
self.bash_remediation_fns_group = None |
498
|
|
|
self.groups = {} |
499
|
|
|
self.rules = {} |
500
|
|
|
|
501
|
|
|
# This is required for OCIL clauses |
502
|
|
|
conditional_clause = Value("conditional_clause") |
503
|
|
|
conditional_clause.title = "A conditional clause for check statements." |
504
|
|
|
conditional_clause.description = conditional_clause.title |
505
|
|
|
conditional_clause.type_ = "string" |
506
|
|
|
conditional_clause.options = {"": "This is a placeholder"} |
507
|
|
|
|
508
|
|
|
self.add_value(conditional_clause) |
509
|
|
|
|
510
|
2 |
|
@classmethod |
511
|
2 |
|
def from_yaml(cls, yaml_file, id_, product_yaml=None): |
512
|
|
|
yaml_contents = open_and_macro_expand(yaml_file, product_yaml) |
513
|
|
|
if yaml_contents is None: |
514
|
|
|
return None |
515
|
|
|
|
516
|
|
|
benchmark = cls(id_) |
517
|
|
|
benchmark.title = required_key(yaml_contents, "title") |
518
|
|
|
del yaml_contents["title"] |
519
|
|
|
benchmark.status = required_key(yaml_contents, "status") |
520
|
|
|
del yaml_contents["status"] |
521
|
|
|
benchmark.description = required_key(yaml_contents, "description") |
522
|
|
|
del yaml_contents["description"] |
523
|
|
|
notice_contents = required_key(yaml_contents, "notice") |
524
|
|
|
benchmark.notice_id = required_key(notice_contents, "id") |
525
|
|
|
del notice_contents["id"] |
526
|
|
|
benchmark.notice_description = required_key(notice_contents, |
527
|
|
|
"description") |
528
|
|
|
del notice_contents["description"] |
529
|
|
|
if not notice_contents: |
530
|
|
|
del yaml_contents["notice"] |
531
|
|
|
|
532
|
|
|
benchmark.front_matter = required_key(yaml_contents, |
533
|
|
|
"front-matter") |
534
|
|
|
del yaml_contents["front-matter"] |
535
|
|
|
benchmark.rear_matter = required_key(yaml_contents, |
536
|
|
|
"rear-matter") |
537
|
|
|
del yaml_contents["rear-matter"] |
538
|
|
|
benchmark.version = str(required_key(yaml_contents, "version")) |
539
|
|
|
del yaml_contents["version"] |
540
|
|
|
|
541
|
|
|
if yaml_contents: |
542
|
|
|
raise RuntimeError("Unparsed YAML data in '%s'.\n\n%s" |
543
|
|
|
% (yaml_file, yaml_contents)) |
544
|
|
|
|
545
|
|
|
if product_yaml: |
546
|
|
|
benchmark.cpes = PRODUCT_TO_CPE_MAPPING[product_yaml["product"]] |
547
|
|
|
|
548
|
|
|
return benchmark |
549
|
|
|
|
550
|
2 |
|
def add_profiles_from_dir(self, dir_, env_yaml): |
551
|
|
|
for dir_item in os.listdir(dir_): |
552
|
|
|
dir_item_path = os.path.join(dir_, dir_item) |
553
|
|
|
if not os.path.isfile(dir_item_path): |
554
|
|
|
continue |
555
|
|
|
|
556
|
|
|
_, ext = os.path.splitext(os.path.basename(dir_item_path)) |
557
|
|
|
if ext != '.profile': |
558
|
|
|
sys.stderr.write( |
559
|
|
|
"Encountered file '%s' while looking for profiles, " |
560
|
|
|
"extension '%s' is unknown. Skipping..\n" |
561
|
|
|
% (dir_item, ext) |
562
|
|
|
) |
563
|
|
|
continue |
564
|
|
|
|
565
|
|
|
try: |
566
|
|
|
new_profile = Profile.from_yaml(dir_item_path, env_yaml) |
567
|
|
|
except DocumentationNotComplete: |
568
|
|
|
continue |
569
|
|
|
except Exception as exc: |
570
|
|
|
msg = ("Error building profile from '{fname}': '{error}'" |
571
|
|
|
.format(fname=dir_item_path, error=str(exc))) |
572
|
|
|
raise RuntimeError(msg) |
573
|
|
|
if new_profile is None: |
574
|
|
|
continue |
575
|
|
|
|
576
|
|
|
self.profiles.append(new_profile) |
577
|
|
|
|
578
|
2 |
|
def add_bash_remediation_fns_from_file(self, file_): |
579
|
|
|
if not file_: |
580
|
|
|
# bash-remediation-functions.xml doens't exist |
581
|
|
|
return |
582
|
|
|
|
583
|
|
|
tree = ET.parse(file_) |
584
|
|
|
self.bash_remediation_fns_group = tree.getroot() |
585
|
|
|
|
586
|
2 |
|
def to_xml_element(self): |
587
|
|
|
root = ET.Element('Benchmark') |
588
|
|
|
root.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance') |
589
|
|
|
root.set('xmlns:xhtml', 'http://www.w3.org/1999/xhtml') |
590
|
|
|
root.set('xmlns:dc', 'http://purl.org/dc/elements/1.1/') |
591
|
|
|
root.set('id', 'product-name') |
592
|
|
|
root.set('xsi:schemaLocation', |
593
|
|
|
'http://checklists.nist.gov/xccdf/1.1 xccdf-1.1.4.xsd') |
594
|
|
|
root.set('style', 'SCAP_1.1') |
595
|
|
|
root.set('resolved', 'false') |
596
|
|
|
root.set('xml:lang', 'en-US') |
597
|
|
|
status = ET.SubElement(root, 'status') |
598
|
|
|
status.set('date', datetime.date.today().strftime("%Y-%m-%d")) |
599
|
|
|
status.text = self.status |
600
|
|
|
add_sub_element(root, "title", self.title) |
601
|
|
|
add_sub_element(root, "description", self.description) |
602
|
|
|
notice = add_sub_element(root, "notice", self.notice_description) |
603
|
|
|
notice.set('id', self.notice_id) |
604
|
|
|
add_sub_element(root, "front-matter", self.front_matter) |
605
|
|
|
add_sub_element(root, "rear-matter", self.rear_matter) |
606
|
|
|
|
607
|
|
|
for idref in self.cpes: |
608
|
|
|
plat = ET.SubElement(root, "platform") |
609
|
|
|
plat.set("idref", idref) |
610
|
|
|
|
611
|
|
|
version = ET.SubElement(root, 'version') |
612
|
|
|
version.text = self.version |
613
|
|
|
ET.SubElement(root, "metadata") |
614
|
|
|
|
615
|
|
|
for profile in self.profiles: |
616
|
|
|
root.append(profile.to_xml_element()) |
617
|
|
|
|
618
|
|
|
for value in self.values.values(): |
619
|
|
|
root.append(value.to_xml_element()) |
620
|
|
|
if self.bash_remediation_fns_group is not None: |
621
|
|
|
root.append(self.bash_remediation_fns_group) |
622
|
|
|
|
623
|
|
|
groups_in_bench = list(self.groups.keys()) |
624
|
|
|
priority_order = ["system", "services"] |
625
|
|
|
groups_in_bench = reorder_according_to_ordering(groups_in_bench, priority_order) |
626
|
|
|
|
627
|
|
|
# Make system group the first, followed by services group |
628
|
|
|
for group_id in groups_in_bench: |
629
|
|
|
group = self.groups.get(group_id) |
630
|
|
|
# Products using application benchmark don't have system or services group |
631
|
|
|
if group is not None: |
632
|
|
|
root.append(group.to_xml_element()) |
633
|
|
|
|
634
|
|
|
for rule in self.rules.values(): |
635
|
|
|
root.append(rule.to_xml_element()) |
636
|
|
|
|
637
|
|
|
return root |
638
|
|
|
|
639
|
2 |
|
def to_file(self, file_name): |
640
|
|
|
root = self.to_xml_element() |
641
|
|
|
tree = ET.ElementTree(root) |
642
|
|
|
tree.write(file_name) |
643
|
|
|
|
644
|
2 |
|
def add_value(self, value): |
645
|
|
|
if value is None: |
646
|
|
|
return |
647
|
|
|
self.values[value.id_] = value |
648
|
|
|
|
649
|
2 |
|
def add_group(self, group): |
650
|
|
|
if group is None: |
651
|
|
|
return |
652
|
|
|
self.groups[group.id_] = group |
653
|
|
|
|
654
|
2 |
|
def add_rule(self, rule): |
655
|
|
|
if rule is None: |
656
|
|
|
return |
657
|
|
|
self.rules[rule.id_] = rule |
658
|
|
|
|
659
|
2 |
|
def to_xccdf(self): |
660
|
|
|
"""We can easily extend this script to generate a valid XCCDF instead |
661
|
|
|
of SSG SHORTHAND. |
662
|
|
|
""" |
663
|
|
|
raise NotImplementedError |
664
|
|
|
|
665
|
2 |
|
def __str__(self): |
666
|
|
|
return self.id_ |
667
|
|
|
|
668
|
|
|
|
669
|
2 |
|
class Group(object): |
670
|
|
|
"""Represents XCCDF Group |
671
|
|
|
""" |
672
|
2 |
|
ATTRIBUTES_TO_PASS_ON = ( |
673
|
|
|
"platform", |
674
|
|
|
) |
675
|
|
|
|
676
|
2 |
|
def __init__(self, id_): |
677
|
|
|
self.id_ = id_ |
678
|
|
|
self.prodtype = "all" |
679
|
|
|
self.title = "" |
680
|
|
|
self.description = "" |
681
|
|
|
self.warnings = [] |
682
|
|
|
self.requires = [] |
683
|
|
|
self.conflicts = [] |
684
|
|
|
self.values = {} |
685
|
|
|
self.groups = {} |
686
|
|
|
self.rules = {} |
687
|
|
|
self.platform = None |
688
|
|
|
|
689
|
2 |
|
@classmethod |
690
|
2 |
|
def from_yaml(cls, yaml_file, env_yaml=None): |
691
|
|
|
yaml_contents = open_and_macro_expand(yaml_file, env_yaml) |
692
|
|
|
if yaml_contents is None: |
693
|
|
|
return None |
694
|
|
|
|
695
|
|
|
group_id = os.path.basename(os.path.dirname(yaml_file)) |
696
|
|
|
group = cls(group_id) |
697
|
|
|
group.prodtype = yaml_contents.pop("prodtype", "all") |
698
|
|
|
group.title = required_key(yaml_contents, "title") |
699
|
|
|
del yaml_contents["title"] |
700
|
|
|
group.description = required_key(yaml_contents, "description") |
701
|
|
|
del yaml_contents["description"] |
702
|
|
|
group.warnings = yaml_contents.pop("warnings", []) |
703
|
|
|
group.conflicts = yaml_contents.pop("conflicts", []) |
704
|
|
|
group.requires = yaml_contents.pop("requires", []) |
705
|
|
|
group.platform = yaml_contents.pop("platform", None) |
706
|
|
|
|
707
|
|
|
for warning_list in group.warnings: |
708
|
|
|
if len(warning_list) != 1: |
709
|
|
|
raise ValueError("Only one key/value pair should exist for each dictionary") |
710
|
|
|
|
711
|
|
|
if yaml_contents: |
712
|
|
|
raise RuntimeError("Unparsed YAML data in '%s'.\n\n%s" |
713
|
|
|
% (yaml_file, yaml_contents)) |
714
|
|
|
group.validate_prodtype(yaml_file) |
715
|
|
|
return group |
716
|
|
|
|
717
|
2 |
|
def validate_prodtype(self, yaml_file): |
718
|
|
|
for ptype in self.prodtype.split(","): |
719
|
|
|
if ptype.strip() != ptype: |
720
|
|
|
msg = ( |
721
|
|
|
"Comma-separated '{prodtype}' prodtype " |
722
|
|
|
"in {yaml_file} contains whitespace." |
723
|
|
|
.format(prodtype=self.prodtype, yaml_file=yaml_file)) |
724
|
|
|
raise ValueError(msg) |
725
|
|
|
|
726
|
2 |
|
def to_xml_element(self): |
727
|
|
|
group = ET.Element('Group') |
728
|
|
|
group.set('id', self.id_) |
729
|
|
|
if self.prodtype != "all": |
730
|
|
|
group.set("prodtype", self.prodtype) |
731
|
|
|
title = ET.SubElement(group, 'title') |
732
|
|
|
title.text = self.title |
733
|
|
|
add_sub_element(group, 'description', self.description) |
734
|
|
|
add_warning_elements(group, self.warnings) |
735
|
|
|
add_nondata_subelements(group, "requires", "id", self.requires) |
736
|
|
|
add_nondata_subelements(group, "conflicts", "id", self.conflicts) |
737
|
|
|
|
738
|
|
|
if self.platform: |
739
|
|
|
platform_el = ET.SubElement(group, "platform") |
740
|
|
|
try: |
741
|
|
|
platform_cpe = XCCDF_PLATFORM_TO_CPE[self.platform] |
742
|
|
|
except KeyError: |
743
|
|
|
raise ValueError("Unsupported platform '%s' in rule '%s'." % (self.platform, self.id_)) |
744
|
|
|
platform_el.set("idref", platform_cpe) |
745
|
|
|
|
746
|
|
|
for _value in self.values.values(): |
747
|
|
|
group.append(_value.to_xml_element()) |
748
|
|
|
|
749
|
|
|
# Rules that install or remove packages affect remediation |
750
|
|
|
# of other rules. |
751
|
|
|
# When packages installed/removed rules come first: |
752
|
|
|
# The Rules are ordered in more logical way, and |
753
|
|
|
# remediation order is natural, first the package is installed, then configured. |
754
|
|
|
rules_in_group = list(self.rules.keys()) |
755
|
|
|
regex = r'(package_.*_(installed|removed))|(service_.*_(enabled|disabled))$' |
756
|
|
|
priority_order = ["installed", "removed", "enabled", "disabled"] |
757
|
|
|
rules_in_group = reorder_according_to_ordering(rules_in_group, priority_order, regex) |
758
|
|
|
|
759
|
|
|
# Add rules in priority order, first all packages installed, then removed, |
760
|
|
|
# followed by services enabled, then disabled |
761
|
|
|
for rule_id in rules_in_group: |
762
|
|
|
group.append(self.rules.get(rule_id).to_xml_element()) |
763
|
|
|
|
764
|
|
|
# Add the sub groups after any current level group rules. |
765
|
|
|
# As package installed/removed and service enabled/disabled rules are usuallly in |
766
|
|
|
# top level group, this ensures groups that further configure a package or service |
767
|
|
|
# are after rules that install or remove it. |
768
|
|
|
groups_in_group = list(self.groups.keys()) |
769
|
|
|
# The account group has to precede audit group because |
770
|
|
|
# the rule package_screen_installed is desired to be executed before the rule |
771
|
|
|
# audit_rules_privileged_commands, othervise the rule |
772
|
|
|
# does not catch newly installed screeen binary during remediation |
773
|
|
|
# and report fail |
774
|
|
|
# the software group should come before the |
775
|
|
|
# bootloader-grub2 group because of conflict between |
776
|
|
|
# rules rpm_verify_permissions and file_permissions_grub2_cfg |
777
|
|
|
# specific rules concerning permissions should |
778
|
|
|
# be applied after the general rpm_verify_permissions |
779
|
|
|
# The FIPS group should come before Crypto - if we want to set a different (stricter) Crypto Policy than FIPS. |
780
|
|
|
# the firewalld_activation must come before ruleset_modifications, othervise |
781
|
|
|
# remediations for ruleset_modifications won't work |
782
|
|
|
# rules from group disabling_ipv6 must precede rules from configuring_ipv6, |
783
|
|
|
# otherwise the remediation prints error although it is successful |
784
|
|
|
priority_order = [ |
785
|
|
|
"accounts", "auditing", |
786
|
|
|
"software", "bootloader-grub2", |
787
|
|
|
"fips", "crypto", |
788
|
|
|
"firewalld_activation", "ruleset_modifications", |
789
|
|
|
"disabling_ipv6", "configuring_ipv6" |
790
|
|
|
] |
791
|
|
|
groups_in_group = reorder_according_to_ordering(groups_in_group, priority_order) |
792
|
|
|
for group_id in groups_in_group: |
793
|
|
|
_group = self.groups[group_id] |
794
|
|
|
group.append(_group.to_xml_element()) |
795
|
|
|
|
796
|
|
|
return group |
797
|
|
|
|
798
|
2 |
|
def to_file(self, file_name): |
799
|
|
|
root = self.to_xml_element() |
800
|
|
|
tree = ET.ElementTree(root) |
801
|
|
|
tree.write(file_name) |
802
|
|
|
|
803
|
2 |
|
def add_value(self, value): |
804
|
|
|
if value is None: |
805
|
|
|
return |
806
|
|
|
self.values[value.id_] = value |
807
|
|
|
|
808
|
2 |
|
def add_group(self, group): |
809
|
|
|
if group is None: |
810
|
|
|
return |
811
|
|
|
if self.platform and not group.platform: |
812
|
|
|
group.platform = self.platform |
813
|
|
|
self.groups[group.id_] = group |
814
|
|
|
self._pass_our_properties_on_to(group) |
815
|
|
|
|
816
|
2 |
|
def _pass_our_properties_on_to(self, obj): |
817
|
|
|
for attr in self.ATTRIBUTES_TO_PASS_ON: |
818
|
|
|
if hasattr(obj, attr) and getattr(obj, attr) is None: |
819
|
|
|
setattr(obj, attr, getattr(self, attr)) |
820
|
|
|
|
821
|
2 |
|
def add_rule(self, rule): |
822
|
|
|
if rule is None: |
823
|
|
|
return |
824
|
|
|
if self.platform and not rule.platform: |
825
|
|
|
rule.platform = self.platform |
826
|
|
|
self.rules[rule.id_] = rule |
827
|
|
|
self._pass_our_properties_on_to(rule) |
828
|
|
|
|
829
|
2 |
|
def __str__(self): |
830
|
|
|
return self.id_ |
831
|
|
|
|
832
|
|
|
|
833
|
2 |
|
class Rule(object): |
834
|
|
|
"""Represents XCCDF Rule |
835
|
|
|
""" |
836
|
2 |
|
YAML_KEYS_DEFAULTS = { |
837
|
|
|
"prodtype": lambda: "all", |
838
|
|
|
"title": lambda: RuntimeError("Missing key 'title'"), |
839
|
|
|
"description": lambda: RuntimeError("Missing key 'description'"), |
840
|
|
|
"rationale": lambda: RuntimeError("Missing key 'rationale'"), |
841
|
|
|
"severity": lambda: RuntimeError("Missing key 'severity'"), |
842
|
|
|
"references": lambda: dict(), |
843
|
|
|
"identifiers": lambda: dict(), |
844
|
|
|
"ocil_clause": lambda: None, |
845
|
|
|
"ocil": lambda: None, |
846
|
|
|
"oval_external_content": lambda: None, |
847
|
|
|
"warnings": lambda: list(), |
848
|
|
|
"conflicts": lambda: list(), |
849
|
|
|
"requires": lambda: list(), |
850
|
|
|
"platform": lambda: None, |
851
|
|
|
"inherited_platforms": lambda: list(), |
852
|
|
|
"template": lambda: None, |
853
|
|
|
} |
854
|
|
|
|
855
|
2 |
|
def __init__(self, id_): |
856
|
2 |
|
self.id_ = id_ |
857
|
2 |
|
self.prodtype = "all" |
858
|
2 |
|
self.title = "" |
859
|
2 |
|
self.description = "" |
860
|
2 |
|
self.rationale = "" |
861
|
2 |
|
self.severity = "unknown" |
862
|
2 |
|
self.references = {} |
863
|
2 |
|
self.identifiers = {} |
864
|
2 |
|
self.ocil_clause = None |
865
|
2 |
|
self.ocil = None |
866
|
2 |
|
self.oval_external_content = None |
867
|
2 |
|
self.warnings = [] |
868
|
2 |
|
self.requires = [] |
869
|
2 |
|
self.conflicts = [] |
870
|
2 |
|
self.platform = None |
871
|
2 |
|
self.inherited_platforms = [] # platforms inherited from the group |
872
|
2 |
|
self.template = None |
873
|
|
|
|
874
|
2 |
|
@classmethod |
875
|
2 |
|
def from_yaml(cls, yaml_file, env_yaml=None): |
876
|
2 |
|
yaml_file = os.path.normpath(yaml_file) |
877
|
|
|
|
878
|
2 |
|
yaml_contents = open_and_macro_expand(yaml_file, env_yaml) |
879
|
2 |
|
if yaml_contents is None: |
880
|
|
|
return None |
881
|
|
|
|
882
|
2 |
|
rule_id, ext = os.path.splitext(os.path.basename(yaml_file)) |
883
|
2 |
|
if rule_id == "rule" and ext == ".yml": |
884
|
2 |
|
rule_id = get_rule_dir_id(yaml_file) |
885
|
|
|
|
886
|
2 |
|
rule = cls(rule_id) |
887
|
|
|
|
888
|
2 |
|
try: |
889
|
2 |
|
rule._set_attributes_from_dict(yaml_contents) |
890
|
|
|
except RuntimeError as exc: |
891
|
|
|
msg = ("Error processing '{fname}': {err}" |
892
|
|
|
.format(fname=yaml_file, err=str(exc))) |
893
|
|
|
raise RuntimeError(msg) |
894
|
|
|
|
895
|
2 |
|
for warning_list in rule.warnings: |
896
|
|
|
if len(warning_list) != 1: |
897
|
|
|
raise ValueError("Only one key/value pair should exist for each dictionary") |
898
|
|
|
|
899
|
2 |
|
if yaml_contents: |
900
|
|
|
raise RuntimeError("Unparsed YAML data in '%s'.\n\n%s" |
901
|
|
|
% (yaml_file, yaml_contents)) |
902
|
|
|
|
903
|
2 |
|
rule.validate_prodtype(yaml_file) |
904
|
2 |
|
rule.validate_identifiers(yaml_file) |
905
|
2 |
|
rule.validate_references(yaml_file) |
906
|
2 |
|
return rule |
907
|
|
|
|
908
|
2 |
|
def _verify_stigid_format(self, product): |
909
|
2 |
|
stig_id = self.references.get("stigid", None) |
910
|
2 |
|
if not stig_id: |
911
|
2 |
|
return |
912
|
2 |
|
if "," in stig_id: |
913
|
2 |
|
raise ValueError("Rules can not have multiple STIG IDs.") |
914
|
|
|
|
915
|
2 |
|
def _verify_disa_cci_format(self): |
916
|
2 |
|
cci_id = self.references.get("disa", None) |
917
|
2 |
|
if not cci_id: |
918
|
2 |
|
return |
919
|
|
|
cci_ex = re.compile(r'^CCI-[0-9]{6}$') |
920
|
|
|
for cci in cci_id.split(","): |
921
|
|
|
if not cci_ex.match(cci): |
922
|
|
|
raise ValueError("CCI '{}' is in the wrong format! " |
923
|
|
|
"Format should be similar to: " |
924
|
|
|
"CCI-XXXXXX".format(cci)) |
925
|
|
|
self.references["disa"] = cci_id |
926
|
|
|
|
927
|
2 |
|
def normalize(self, product): |
928
|
2 |
|
try: |
929
|
2 |
|
self.make_refs_and_identifiers_product_specific(product) |
930
|
2 |
|
self.make_template_product_specific(product) |
931
|
2 |
|
except Exception as exc: |
932
|
2 |
|
msg = ( |
933
|
|
|
"Error normalizing '{rule}': {msg}" |
934
|
|
|
.format(rule=self.id_, msg=str(exc)) |
935
|
|
|
) |
936
|
2 |
|
raise RuntimeError(msg) |
937
|
|
|
|
938
|
2 |
|
def _get_product_only_references(self): |
939
|
2 |
|
PRODUCT_REFERENCES = ("stigid",) |
940
|
|
|
|
941
|
2 |
|
product_references = dict() |
942
|
|
|
|
943
|
2 |
|
for ref in PRODUCT_REFERENCES: |
944
|
2 |
|
start = "{0}@".format(ref) |
945
|
2 |
|
for gref, gval in self.references.items(): |
946
|
2 |
|
if ref == gref or gref.startswith(start): |
947
|
2 |
|
product_references[gref] = gval |
948
|
2 |
|
return product_references |
949
|
|
|
|
950
|
2 |
|
def make_template_product_specific(self, product): |
951
|
2 |
|
product_suffix = "@{0}".format(product) |
952
|
|
|
|
953
|
2 |
|
if not self.template: |
954
|
|
|
return |
955
|
|
|
|
956
|
2 |
|
not_specific_vars = self.template.get("vars", dict()) |
957
|
2 |
|
specific_vars = self._make_items_product_specific( |
958
|
|
|
not_specific_vars, product_suffix, True) |
959
|
2 |
|
self.template["vars"] = specific_vars |
960
|
|
|
|
961
|
2 |
|
not_specific_backends = self.template.get("backends", dict()) |
962
|
2 |
|
specific_backends = self._make_items_product_specific( |
963
|
|
|
not_specific_backends, product_suffix, True) |
964
|
2 |
|
self.template["backends"] = specific_backends |
965
|
|
|
|
966
|
2 |
|
def make_refs_and_identifiers_product_specific(self, product): |
967
|
2 |
|
product_suffix = "@{0}".format(product) |
968
|
|
|
|
969
|
2 |
|
product_references = self._get_product_only_references() |
970
|
2 |
|
general_references = self.references.copy() |
971
|
2 |
|
for todel in product_references: |
972
|
2 |
|
general_references.pop(todel) |
973
|
|
|
|
974
|
2 |
|
to_set = dict( |
975
|
|
|
identifiers=(self.identifiers, False), |
976
|
|
|
general_references=(general_references, True), |
977
|
|
|
product_references=(product_references, False), |
978
|
|
|
) |
979
|
2 |
|
for name, (dic, allow_overwrites) in to_set.items(): |
980
|
2 |
|
try: |
981
|
2 |
|
new_items = self._make_items_product_specific( |
982
|
|
|
dic, product_suffix, allow_overwrites) |
983
|
2 |
|
except ValueError as exc: |
984
|
2 |
|
msg = ( |
985
|
|
|
"Error processing {what} for rule '{rid}': {msg}" |
986
|
|
|
.format(what=name, rid=self.id_, msg=str(exc)) |
987
|
|
|
) |
988
|
2 |
|
raise ValueError(msg) |
989
|
2 |
|
dic.clear() |
990
|
2 |
|
dic.update(new_items) |
991
|
|
|
|
992
|
2 |
|
self.references = general_references |
993
|
2 |
|
self._verify_disa_cci_format() |
994
|
2 |
|
self.references.update(product_references) |
995
|
|
|
|
996
|
2 |
|
self._verify_stigid_format(product) |
997
|
|
|
|
998
|
2 |
|
def _make_items_product_specific(self, items_dict, product_suffix, allow_overwrites=False): |
999
|
2 |
|
new_items = dict() |
1000
|
2 |
|
for full_label, value in items_dict.items(): |
1001
|
2 |
|
if "@" not in full_label and full_label not in new_items: |
1002
|
2 |
|
new_items[full_label] = value |
1003
|
2 |
|
continue |
1004
|
|
|
|
1005
|
2 |
|
if not full_label.endswith(product_suffix): |
1006
|
2 |
|
continue |
1007
|
|
|
|
1008
|
2 |
|
label = full_label.split("@")[0] |
1009
|
2 |
|
if label in items_dict and not allow_overwrites and value != items_dict[label]: |
1010
|
2 |
|
msg = ( |
1011
|
|
|
"There is a product-qualified '{item_q}' item, " |
1012
|
|
|
"but also an unqualified '{item_u}' item " |
1013
|
|
|
"and those two differ in value - " |
1014
|
|
|
"'{value_q}' vs '{value_u}' respectively." |
1015
|
|
|
.format(item_q=full_label, item_u=label, |
1016
|
|
|
value_q=value, value_u=items_dict[label]) |
1017
|
|
|
) |
1018
|
2 |
|
raise ValueError(msg) |
1019
|
2 |
|
new_items[label] = value |
1020
|
2 |
|
return new_items |
1021
|
|
|
|
1022
|
2 |
|
def _set_attributes_from_dict(self, yaml_contents): |
1023
|
2 |
|
for key, default_getter in self.YAML_KEYS_DEFAULTS.items(): |
1024
|
2 |
|
if key not in yaml_contents: |
1025
|
2 |
|
value = default_getter() |
1026
|
2 |
|
if isinstance(value, Exception): |
1027
|
|
|
raise value |
1028
|
|
|
else: |
1029
|
2 |
|
value = yaml_contents.pop(key) |
1030
|
|
|
|
1031
|
2 |
|
setattr(self, key, value) |
1032
|
|
|
|
1033
|
2 |
|
def to_contents_dict(self): |
1034
|
|
|
""" |
1035
|
|
|
Returns a dictionary that is the same schema as the dict obtained when loading rule YAML. |
1036
|
|
|
""" |
1037
|
|
|
|
1038
|
2 |
|
yaml_contents = dict() |
1039
|
2 |
|
for key in Rule.YAML_KEYS_DEFAULTS: |
1040
|
2 |
|
yaml_contents[key] = getattr(self, key) |
1041
|
|
|
|
1042
|
2 |
|
return yaml_contents |
1043
|
|
|
|
1044
|
2 |
|
def validate_identifiers(self, yaml_file): |
1045
|
2 |
|
if self.identifiers is None: |
1046
|
|
|
raise ValueError("Empty identifier section in file %s" % yaml_file) |
1047
|
|
|
|
1048
|
|
|
# Validate all identifiers are non-empty: |
1049
|
2 |
|
for ident_type, ident_val in self.identifiers.items(): |
1050
|
2 |
|
if not isinstance(ident_type, str) or not isinstance(ident_val, str): |
1051
|
|
|
raise ValueError("Identifiers and values must be strings: %s in file %s" |
1052
|
|
|
% (ident_type, yaml_file)) |
1053
|
2 |
|
if ident_val.strip() == "": |
1054
|
|
|
raise ValueError("Identifiers must not be empty: %s in file %s" |
1055
|
|
|
% (ident_type, yaml_file)) |
1056
|
2 |
|
if ident_type[0:3] == 'cce': |
1057
|
2 |
|
if not is_cce_format_valid(ident_val): |
1058
|
|
|
raise ValueError("CCE Identifier format must be valid: invalid format '%s' for CEE '%s'" |
1059
|
|
|
" in file '%s'" % (ident_val, ident_type, yaml_file)) |
1060
|
2 |
|
if not is_cce_value_valid("CCE-" + ident_val): |
1061
|
|
|
raise ValueError("CCE Identifier value is not a valid checksum: invalid value '%s' for CEE '%s'" |
1062
|
|
|
" in file '%s'" % (ident_val, ident_type, yaml_file)) |
1063
|
|
|
|
1064
|
2 |
|
def validate_references(self, yaml_file): |
1065
|
2 |
|
if self.references is None: |
1066
|
|
|
raise ValueError("Empty references section in file %s" % yaml_file) |
1067
|
|
|
|
1068
|
2 |
|
for ref_type, ref_val in self.references.items(): |
1069
|
2 |
|
if not isinstance(ref_type, str) or not isinstance(ref_val, str): |
1070
|
|
|
raise ValueError("References and values must be strings: %s in file %s" |
1071
|
|
|
% (ref_type, yaml_file)) |
1072
|
2 |
|
if ref_val.strip() == "": |
1073
|
|
|
raise ValueError("References must not be empty: %s in file %s" |
1074
|
|
|
% (ref_type, yaml_file)) |
1075
|
|
|
|
1076
|
2 |
|
for ref_type, ref_val in self.references.items(): |
1077
|
2 |
|
for ref in ref_val.split(","): |
1078
|
2 |
|
if ref.strip() != ref: |
1079
|
|
|
msg = ( |
1080
|
|
|
"Comma-separated '{ref_type}' reference " |
1081
|
|
|
"in {yaml_file} contains whitespace." |
1082
|
|
|
.format(ref_type=ref_type, yaml_file=yaml_file)) |
1083
|
|
|
raise ValueError(msg) |
1084
|
|
|
|
1085
|
2 |
|
def validate_prodtype(self, yaml_file): |
1086
|
2 |
|
for ptype in self.prodtype.split(","): |
1087
|
2 |
|
if ptype.strip() != ptype: |
1088
|
|
|
msg = ( |
1089
|
|
|
"Comma-separated '{prodtype}' prodtype " |
1090
|
|
|
"in {yaml_file} contains whitespace." |
1091
|
|
|
.format(prodtype=self.prodtype, yaml_file=yaml_file)) |
1092
|
|
|
raise ValueError(msg) |
1093
|
|
|
|
1094
|
2 |
|
def to_xml_element(self): |
1095
|
|
|
rule = ET.Element('Rule') |
1096
|
|
|
rule.set('id', self.id_) |
1097
|
|
|
if self.prodtype != "all": |
1098
|
|
|
rule.set("prodtype", self.prodtype) |
1099
|
|
|
rule.set('severity', self.severity) |
1100
|
|
|
add_sub_element(rule, 'title', self.title) |
1101
|
|
|
add_sub_element(rule, 'description', self.description) |
1102
|
|
|
add_sub_element(rule, 'rationale', self.rationale) |
1103
|
|
|
|
1104
|
|
|
main_ident = ET.Element('ident') |
1105
|
|
|
for ident_type, ident_val in self.identifiers.items(): |
1106
|
|
|
# This is not true if items were normalized |
1107
|
|
|
if '@' in ident_type: |
1108
|
|
|
# the ident is applicable only on some product |
1109
|
|
|
# format : 'policy@product', eg. 'stigid@product' |
1110
|
|
|
# for them, we create a separate <ref> element |
1111
|
|
|
policy, product = ident_type.split('@') |
1112
|
|
|
ident = ET.SubElement(rule, 'ident') |
1113
|
|
|
ident.set(policy, ident_val) |
1114
|
|
|
ident.set('prodtype', product) |
1115
|
|
|
else: |
1116
|
|
|
main_ident.set(ident_type, ident_val) |
1117
|
|
|
|
1118
|
|
|
if main_ident.attrib: |
1119
|
|
|
rule.append(main_ident) |
1120
|
|
|
|
1121
|
|
|
main_ref = ET.Element('ref') |
1122
|
|
|
for ref_type, ref_val in self.references.items(): |
1123
|
|
|
# This is not true if items were normalized |
1124
|
|
|
if '@' in ref_type: |
1125
|
|
|
# the reference is applicable only on some product |
1126
|
|
|
# format : 'policy@product', eg. 'stigid@product' |
1127
|
|
|
# for them, we create a separate <ref> element |
1128
|
|
|
policy, product = ref_type.split('@') |
1129
|
|
|
ref = ET.SubElement(rule, 'ref') |
1130
|
|
|
ref.set(policy, ref_val) |
1131
|
|
|
ref.set('prodtype', product) |
1132
|
|
|
else: |
1133
|
|
|
main_ref.set(ref_type, ref_val) |
1134
|
|
|
|
1135
|
|
|
if main_ref.attrib: |
1136
|
|
|
rule.append(main_ref) |
1137
|
|
|
|
1138
|
|
|
if self.oval_external_content: |
1139
|
|
|
check = ET.SubElement(rule, 'check') |
1140
|
|
|
check.set("system", "http://oval.mitre.org/XMLSchema/oval-definitions-5") |
1141
|
|
|
external_content = ET.SubElement(check, "check-content-ref") |
1142
|
|
|
external_content.set("href", self.oval_external_content) |
1143
|
|
|
else: |
1144
|
|
|
# TODO: This is pretty much a hack, oval ID will be the same as rule ID |
1145
|
|
|
# and we don't want the developers to have to keep them in sync. |
1146
|
|
|
# Therefore let's just add an OVAL ref of that ID. |
1147
|
|
|
oval_ref = ET.SubElement(rule, "oval") |
1148
|
|
|
oval_ref.set("id", self.id_) |
1149
|
|
|
|
1150
|
|
|
if self.ocil or self.ocil_clause: |
1151
|
|
|
ocil = add_sub_element(rule, 'ocil', self.ocil if self.ocil else "") |
1152
|
|
|
if self.ocil_clause: |
1153
|
|
|
ocil.set("clause", self.ocil_clause) |
1154
|
|
|
|
1155
|
|
|
add_warning_elements(rule, self.warnings) |
1156
|
|
|
add_nondata_subelements(rule, "requires", "id", self.requires) |
1157
|
|
|
add_nondata_subelements(rule, "conflicts", "id", self.conflicts) |
1158
|
|
|
|
1159
|
|
|
if self.platform: |
1160
|
|
|
platform_el = ET.SubElement(rule, "platform") |
1161
|
|
|
try: |
1162
|
|
|
platform_cpe = XCCDF_PLATFORM_TO_CPE[self.platform] |
1163
|
|
|
except KeyError: |
1164
|
|
|
raise ValueError("Unsupported platform '%s' in rule '%s'." % (self.platform, self.id_)) |
1165
|
|
|
platform_el.set("idref", platform_cpe) |
1166
|
|
|
|
1167
|
|
|
return rule |
1168
|
|
|
|
1169
|
2 |
|
def to_file(self, file_name): |
1170
|
|
|
root = self.to_xml_element() |
1171
|
|
|
tree = ET.ElementTree(root) |
1172
|
|
|
tree.write(file_name) |
1173
|
|
|
|
1174
|
|
|
|
1175
|
2 |
|
class DirectoryLoader(object): |
1176
|
2 |
|
def __init__(self, profiles_dir, bash_remediation_fns, env_yaml): |
1177
|
|
|
self.benchmark_file = None |
1178
|
|
|
self.group_file = None |
1179
|
|
|
self.loaded_group = None |
1180
|
|
|
self.rule_files = [] |
1181
|
|
|
self.value_files = [] |
1182
|
|
|
self.subdirectories = [] |
1183
|
|
|
|
1184
|
|
|
self.all_values = set() |
1185
|
|
|
self.all_rules = set() |
1186
|
|
|
self.all_groups = set() |
1187
|
|
|
|
1188
|
|
|
self.profiles_dir = profiles_dir |
1189
|
|
|
self.bash_remediation_fns = bash_remediation_fns |
1190
|
|
|
self.env_yaml = env_yaml |
1191
|
|
|
self.product = env_yaml["product"] |
1192
|
|
|
|
1193
|
|
|
self.parent_group = None |
1194
|
|
|
|
1195
|
2 |
|
def _collect_items_to_load(self, guide_directory): |
1196
|
|
|
for dir_item in os.listdir(guide_directory): |
1197
|
|
|
dir_item_path = os.path.join(guide_directory, dir_item) |
1198
|
|
|
_, extension = os.path.splitext(dir_item) |
1199
|
|
|
|
1200
|
|
|
if extension == '.var': |
1201
|
|
|
self.value_files.append(dir_item_path) |
1202
|
|
|
elif dir_item == "benchmark.yml": |
1203
|
|
|
if self.benchmark_file: |
1204
|
|
|
raise ValueError("Multiple benchmarks in one directory") |
1205
|
|
|
self.benchmark_file = dir_item_path |
1206
|
|
|
elif dir_item == "group.yml": |
1207
|
|
|
if self.group_file: |
1208
|
|
|
raise ValueError("Multiple groups in one directory") |
1209
|
|
|
self.group_file = dir_item_path |
1210
|
|
|
elif extension == '.rule': |
1211
|
|
|
self.rule_files.append(dir_item_path) |
1212
|
|
|
elif is_rule_dir(dir_item_path): |
1213
|
|
|
self.rule_files.append(get_rule_dir_yaml(dir_item_path)) |
1214
|
|
|
elif dir_item != "tests": |
1215
|
|
|
if os.path.isdir(dir_item_path): |
1216
|
|
|
self.subdirectories.append(dir_item_path) |
1217
|
|
|
else: |
1218
|
|
|
sys.stderr.write( |
1219
|
|
|
"Encountered file '%s' while recursing, extension '%s' " |
1220
|
|
|
"is unknown. Skipping..\n" |
1221
|
|
|
% (dir_item, extension) |
1222
|
|
|
) |
1223
|
|
|
|
1224
|
2 |
|
def load_benchmark_or_group(self, guide_directory): |
1225
|
|
|
""" |
1226
|
|
|
Loads a given benchmark or group from the specified benchmark_file or |
1227
|
|
|
group_file, in the context of guide_directory, profiles_dir, |
1228
|
|
|
env_yaml, and bash_remediation_fns. |
1229
|
|
|
|
1230
|
|
|
Returns the loaded group or benchmark. |
1231
|
|
|
""" |
1232
|
|
|
group = None |
1233
|
|
|
if self.group_file and self.benchmark_file: |
1234
|
|
|
raise ValueError("A .benchmark file and a .group file were found in " |
1235
|
|
|
"the same directory '%s'" % (guide_directory)) |
1236
|
|
|
|
1237
|
|
|
# we treat benchmark as a special form of group in the following code |
1238
|
|
|
if self.benchmark_file: |
1239
|
|
|
group = Benchmark.from_yaml( |
1240
|
|
|
self.benchmark_file, 'product-name', self.env_yaml |
1241
|
|
|
) |
1242
|
|
|
if self.profiles_dir: |
1243
|
|
|
group.add_profiles_from_dir(self.profiles_dir, self.env_yaml) |
1244
|
|
|
group.add_bash_remediation_fns_from_file(self.bash_remediation_fns) |
1245
|
|
|
|
1246
|
|
|
if self.group_file: |
1247
|
|
|
group = Group.from_yaml(self.group_file, self.env_yaml) |
1248
|
|
|
self.all_groups.add(group.id_) |
1249
|
|
|
|
1250
|
|
|
return group |
1251
|
|
|
|
1252
|
2 |
|
def _load_group_process_and_recurse(self, guide_directory): |
1253
|
|
|
self.loaded_group = self.load_benchmark_or_group(guide_directory) |
1254
|
|
|
|
1255
|
|
|
if self.loaded_group: |
1256
|
|
|
if self.parent_group: |
1257
|
|
|
self.parent_group.add_group(self.loaded_group) |
1258
|
|
|
|
1259
|
|
|
self._process_values() |
1260
|
|
|
self._recurse_into_subdirs() |
1261
|
|
|
self._process_rules() |
1262
|
|
|
|
1263
|
2 |
|
def process_directory_tree(self, start_dir, extra_group_dirs=None): |
1264
|
|
|
self._collect_items_to_load(start_dir) |
1265
|
|
|
if extra_group_dirs is not None: |
1266
|
|
|
self.subdirectories += extra_group_dirs |
1267
|
|
|
self._load_group_process_and_recurse(start_dir) |
1268
|
|
|
|
1269
|
2 |
|
def _recurse_into_subdirs(self): |
1270
|
|
|
for subdir in self.subdirectories: |
1271
|
|
|
loader = self._get_new_loader() |
1272
|
|
|
loader.parent_group = self.loaded_group |
1273
|
|
|
loader.process_directory_tree(subdir) |
1274
|
|
|
self.all_values.update(loader.all_values) |
1275
|
|
|
self.all_rules.update(loader.all_rules) |
1276
|
|
|
self.all_groups.update(loader.all_groups) |
1277
|
|
|
|
1278
|
2 |
|
def _get_new_loader(self): |
1279
|
|
|
raise NotImplementedError() |
1280
|
|
|
|
1281
|
2 |
|
def _process_values(self): |
1282
|
|
|
raise NotImplementedError() |
1283
|
|
|
|
1284
|
2 |
|
def _process_rules(self): |
1285
|
|
|
raise NotImplementedError() |
1286
|
|
|
|
1287
|
|
|
|
1288
|
2 |
|
class BuildLoader(DirectoryLoader): |
1289
|
2 |
|
def __init__(self, profiles_dir, bash_remediation_fns, env_yaml, resolved_rules_dir=None): |
1290
|
|
|
super(BuildLoader, self).__init__(profiles_dir, bash_remediation_fns, env_yaml) |
1291
|
|
|
|
1292
|
|
|
self.resolved_rules_dir = resolved_rules_dir |
1293
|
|
|
if resolved_rules_dir and not os.path.isdir(resolved_rules_dir): |
1294
|
|
|
os.mkdir(resolved_rules_dir) |
1295
|
|
|
|
1296
|
2 |
|
def _process_values(self): |
1297
|
|
|
for value_yaml in self.value_files: |
1298
|
|
|
value = Value.from_yaml(value_yaml, self.env_yaml) |
1299
|
|
|
self.all_values.add(value) |
1300
|
|
|
self.loaded_group.add_value(value) |
1301
|
|
|
|
1302
|
2 |
|
def _process_rules(self): |
1303
|
|
|
for rule_yaml in self.rule_files: |
1304
|
|
|
try: |
1305
|
|
|
rule = Rule.from_yaml(rule_yaml, self.env_yaml) |
1306
|
|
|
except DocumentationNotComplete: |
1307
|
|
|
# Happens on non-debug build when a rule is "documentation-incomplete" |
1308
|
|
|
continue |
1309
|
|
|
prodtypes = parse_prodtype(rule.prodtype) |
1310
|
|
|
if "all" not in prodtypes and self.product not in prodtypes: |
1311
|
|
|
continue |
1312
|
|
|
self.all_rules.add(rule) |
1313
|
|
|
self.loaded_group.add_rule(rule) |
1314
|
|
|
|
1315
|
|
|
rule.inherited_platforms.append(self.loaded_group.platform) |
1316
|
|
|
|
1317
|
|
|
if self.resolved_rules_dir: |
1318
|
|
|
output_for_rule = os.path.join( |
1319
|
|
|
self.resolved_rules_dir, "{id_}.yml".format(id_=rule.id_)) |
1320
|
|
|
mkdir_p(self.resolved_rules_dir) |
1321
|
|
|
with open(output_for_rule, "w") as f: |
1322
|
|
|
rule.normalize(self.env_yaml["product"]) |
1323
|
|
|
yaml.dump(rule.to_contents_dict(), f) |
1324
|
|
|
|
1325
|
2 |
|
def _get_new_loader(self): |
1326
|
|
|
return BuildLoader( |
1327
|
|
|
self.profiles_dir, self.bash_remediation_fns, self.env_yaml, self.resolved_rules_dir) |
1328
|
|
|
|
1329
|
2 |
|
def export_group_to_file(self, filename): |
1330
|
|
|
return self.loaded_group.to_file(filename) |
1331
|
|
|
|