1
|
|
|
#!/usr/bin/python3 |
2
|
|
|
# -*- coding: utf-8 -*- |
3
|
|
|
|
4
|
|
|
from __future__ import print_function |
5
|
|
|
|
6
|
|
|
from tempfile import mkdtemp |
7
|
|
|
import io |
8
|
|
|
import os |
9
|
|
|
import os.path |
10
|
|
|
import sys |
11
|
|
|
import shutil |
12
|
|
|
import re |
13
|
|
|
import argparse |
14
|
|
|
import getpass |
15
|
|
|
import yaml |
16
|
|
|
import collections |
17
|
|
|
|
18
|
|
|
try: |
19
|
|
|
from github import Github, InputGitAuthor, UnknownObjectException |
20
|
|
|
except ImportError: |
21
|
|
|
sys.stderr.write("Please install PyGithub, on Fedora it's in the " |
22
|
|
|
"python-PyGithub package.\n") |
23
|
|
|
sys.exit(1) |
24
|
|
|
|
25
|
|
|
|
26
|
|
|
import ssg.ansible |
27
|
|
|
import ssg.yaml |
28
|
|
|
from ssg.utils import mkdir_p |
29
|
|
|
|
30
|
|
|
|
31
|
|
|
def memoize(f): |
32
|
|
|
memo = {} |
33
|
|
|
|
34
|
|
|
def helper(x): |
35
|
|
|
if x not in memo: |
|
|
|
|
36
|
|
|
memo[x] = f(x) |
37
|
|
|
return memo[x] |
38
|
|
|
return helper |
39
|
|
|
|
40
|
|
|
# The following code preserves ansible yaml order |
41
|
|
|
# code from arcaduf's gist |
42
|
|
|
# https://gist.github.com/arcaduf/8edbe5900372f0dd30aa037272dfe826 |
43
|
|
|
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG |
44
|
|
|
|
45
|
|
|
|
46
|
|
|
def dict_representer(dumper, data): |
47
|
|
|
return dumper.represent_mapping(_mapping_tag, data.items()) |
48
|
|
|
|
49
|
|
|
|
50
|
|
|
def dict_constructor(loader, node): |
51
|
|
|
return collections.OrderedDict(loader.construct_pairs(node)) |
52
|
|
|
|
53
|
|
|
|
54
|
|
|
yaml.add_representer(collections.OrderedDict, dict_representer) |
55
|
|
|
yaml.add_constructor(_mapping_tag, dict_constructor) |
56
|
|
|
# End arcaduf gist |
57
|
|
|
|
58
|
|
|
PRODUCT_ALLOWLIST = set([ |
59
|
|
|
"rhel7", |
60
|
|
|
"rhel8", |
61
|
|
|
]) |
62
|
|
|
|
63
|
|
|
PROFILE_ALLOWLIST = set([ |
64
|
|
|
"anssi_nt28_enhanced", |
65
|
|
|
"anssi_nt28_high", |
66
|
|
|
"anssi_nt28_intermediary", |
67
|
|
|
"anssi_nt28_minimal", |
68
|
|
|
"anssi_bp28_enhanced", |
69
|
|
|
"anssi_bp28_high", |
70
|
|
|
"anssi_bp28_intermediary", |
71
|
|
|
"anssi_bp28_minimal", |
72
|
|
|
"C2S", |
73
|
|
|
"cis", |
74
|
|
|
"cjis", |
75
|
|
|
"hipaa", |
76
|
|
|
"cui", |
77
|
|
|
"ospp", |
78
|
|
|
"pci-dss", |
79
|
|
|
"rht-ccp", |
80
|
|
|
"stig", |
81
|
|
|
"rhvh-stig", |
82
|
|
|
"rhvh-vpp", |
83
|
|
|
]) |
84
|
|
|
|
85
|
|
|
|
86
|
|
|
ORGANIZATION_NAME = "RedHatOfficial" |
87
|
|
|
GIT_COMMIT_AUTHOR_NAME = "ComplianceAsCode development team" |
88
|
|
|
GIT_COMMIT_AUTHOR_EMAIL = "[email protected]" |
89
|
|
|
META_TEMPLATE_PATH = os.path.join( |
90
|
|
|
os.path.dirname(os.path.abspath(__file__)), |
91
|
|
|
"ansible_galaxy_meta_template.yml" |
92
|
|
|
) |
93
|
|
|
README_TEMPLATE_PATH = os.path.join( |
94
|
|
|
os.path.dirname(os.path.abspath(__file__)), |
95
|
|
|
"ansible_galaxy_readme_template.md" |
96
|
|
|
) |
97
|
|
|
|
98
|
|
|
|
99
|
|
|
def create_empty_repositories(github_new_repos, github_org): |
100
|
|
|
for github_new_repo in github_new_repos: |
101
|
|
|
print("Creating new Github repository: %s" % github_new_repo) |
102
|
|
|
github_org.create_repo( |
103
|
|
|
github_new_repo, |
104
|
|
|
description="Role generated from ComplianceAsCode Project", |
105
|
|
|
homepage="https://github.com/ComplianceAsCode/content/", |
106
|
|
|
private=False, |
107
|
|
|
has_issues=False, |
108
|
|
|
has_wiki=False, |
109
|
|
|
has_downloads=False) |
110
|
|
|
|
111
|
|
|
|
112
|
|
|
def clone_and_init_repository(parent_dir, organization, repo): |
113
|
|
|
os.system( |
114
|
|
|
"git clone [email protected]:%s/%s" % (organization, repo)) |
115
|
|
|
os.system("ansible-galaxy init " + repo + " --force") |
116
|
|
|
os.chdir(repo) |
117
|
|
|
try: |
118
|
|
|
os.system('git add .') |
119
|
|
|
os.system('git commit -a -m "Initial commit" --author "%s <%s>"' |
120
|
|
|
% (GIT_COMMIT_AUTHOR_NAME, GIT_COMMIT_AUTHOR_EMAIL)) |
121
|
|
|
os.system('git push origin master') |
122
|
|
|
finally: |
123
|
|
|
os.chdir("..") |
124
|
|
|
|
125
|
|
|
|
126
|
|
|
def update_repo_release(github, repo): |
127
|
|
|
repo_tags = [tag for tag in repo.get_tags()] |
128
|
|
|
try: |
129
|
|
|
(majv, minv, rel) = repo_tags[0].name.split(".") |
130
|
|
|
rel = int(rel) + 1 |
131
|
|
|
except IndexError: |
132
|
|
|
cac = github.get_repo("ComplianceAsCode/content") |
133
|
|
|
cac_tags = [tag for tag in cac.get_tags() if tag.name != "v0.5.0-InitialDraft"] |
134
|
|
|
(majv, minv, rel) = cac_tags[0].name.strip("v").split(".") |
135
|
|
|
|
136
|
|
|
new_tag = ("%s.%s.%s" % (majv, minv, rel)) |
137
|
|
|
commits = repo.get_commits() |
138
|
|
|
print("Tagging new release '%s' for repo '%s'" % (new_tag, repo.name)) |
139
|
|
|
repo.create_git_tag_and_release(new_tag, '', '', '', commits[0].sha, 'commit') |
140
|
|
|
|
141
|
|
|
|
142
|
|
|
class PlaybookToRoleConverter(): |
143
|
|
|
PRODUCED_FILES = ('defaults/main.yml', 'meta/main.yml', 'tasks/main.yml', 'vars/main.yml', |
144
|
|
|
'README.md') |
145
|
|
|
|
146
|
|
|
def __init__(self, local_playbook_filename): |
147
|
|
|
self._local_playbook_filename = local_playbook_filename |
148
|
|
|
|
149
|
|
|
# ansible language doesn't allow pre_tasks for roles, if the only pre task |
150
|
|
|
# is the ansible version check we can ignore it because the minimal version |
151
|
|
|
# is in role metadata |
152
|
|
|
if "pre_tasks" in self._playbook[0]: |
153
|
|
|
pre_tasks_data = self._playbook[0]["pre_tasks"] |
154
|
|
|
if len(pre_tasks_data) == 1 and \ |
155
|
|
|
pre_tasks_data[0]["name"] == \ |
156
|
|
|
ssg.ansible.ansible_version_requirement_pre_task_name: |
157
|
|
|
pass |
158
|
|
|
else: |
159
|
|
|
sys.stderr.write( |
160
|
|
|
"%s contains pre_tasks other than the version check. " |
161
|
|
|
"pre_tasks are not supported for ansible roles and " |
162
|
|
|
"will be skipped!.\n") |
163
|
|
|
|
164
|
|
|
@property |
165
|
|
|
@memoize |
166
|
|
|
def name(self): |
167
|
|
|
root, _ = os.path.splitext(os.path.basename(self._local_playbook_filename)) |
168
|
|
|
product, _, profile = root.split("-", 2) |
169
|
|
|
return "%s_%s" % (product, profile.replace("-", "_").lower()) |
170
|
|
|
|
171
|
|
|
@property |
172
|
|
|
@memoize |
173
|
|
|
def product(self): |
174
|
|
|
# Returns the first part [product] of name. |
175
|
|
|
# ex: rhel7_stig |
176
|
|
|
# returns: rhel7 |
177
|
|
|
return self.name.split("_")[0] |
178
|
|
|
|
179
|
|
|
@property |
180
|
|
|
@memoize |
181
|
|
|
def profile(self): |
182
|
|
|
# Returns the second part [profile] of name. |
183
|
|
|
# ex: rhe7_anssi_nt28_enhanced |
184
|
|
|
# returns: anssi_nt28_enhanced |
185
|
|
|
return self.name.split("_", 1)[1] |
186
|
|
|
|
187
|
|
|
@property |
188
|
|
|
@memoize |
189
|
|
|
def tasks_data(self): |
190
|
|
|
return self._playbook[0]["tasks"] if "tasks" in self._playbook[0] else [] |
191
|
|
|
|
192
|
|
|
@property |
193
|
|
|
@memoize |
194
|
|
|
def tasks_local_content(self): |
195
|
|
|
return yaml.dump(self.tasks_data, width=120, default_flow_style=False) \ |
196
|
|
|
.replace('\n- ', '\n\n- ') |
197
|
|
|
|
198
|
|
|
@property |
199
|
|
|
@memoize |
200
|
|
|
def default_vars_data(self): |
201
|
|
|
return self._playbook[0]["vars"] if "vars" in self._playbook[0] else [] |
202
|
|
|
|
203
|
|
|
@property |
204
|
|
|
@memoize |
205
|
|
|
def added_variables(self): |
206
|
|
|
variables = set() |
207
|
|
|
for task in self.tasks_data: |
208
|
|
|
if "tags" not in task: |
209
|
|
|
next |
210
|
|
|
if "when" not in task: |
211
|
|
|
task["when"] = [] |
212
|
|
|
elif isinstance(task["when"], str): |
213
|
|
|
task["when"] = [task["when"]] |
214
|
|
|
|
215
|
|
|
variables_to_add = {self._sanitize_tag(tag) |
216
|
|
|
for tag in task["tags"] if self._tag_is_valid_variable(tag)} |
217
|
|
|
task["when"] = ["{varname} | bool".format( |
218
|
|
|
varname=v) for v in sorted(variables_to_add)] + task["when"] |
219
|
|
|
variables.update(variables_to_add) |
220
|
|
|
|
221
|
|
|
if not task["when"]: |
222
|
|
|
del task["when"] |
223
|
|
|
return variables |
224
|
|
|
|
225
|
|
|
@property |
226
|
|
|
def vars_data(self): |
227
|
|
|
return [] |
228
|
|
|
|
229
|
|
|
@property |
230
|
|
|
@memoize |
231
|
|
|
def title(self): |
232
|
|
|
try: |
233
|
|
|
title = re.search(r'Profile Title:\s+(.+)$', self._description, re.MULTILINE).group(1) |
234
|
|
|
return '"' + title + '"' |
235
|
|
|
except AttributeError: |
236
|
|
|
return re.search(r'Ansible Playbook for\s+(.+)$', self._description, re.MULTILINE) \ |
237
|
|
|
.group(1) |
238
|
|
|
|
239
|
|
|
@property |
240
|
|
|
@memoize |
241
|
|
|
def description_md(self): |
242
|
|
|
# This is for a role and not a playbook |
243
|
|
|
description = re.sub(r'Playbook', "Role", self._description) |
244
|
|
|
|
245
|
|
|
# Fix the description format for markdown so that it looks pretty |
246
|
|
|
return description.replace('\n', ' \n') |
247
|
|
|
|
248
|
|
|
@property |
249
|
|
|
@memoize |
250
|
|
|
def _playbook(self): |
251
|
|
|
return ssg.yaml.ordered_load(self._raw_playbook) |
252
|
|
|
|
253
|
|
|
@property |
254
|
|
|
@memoize |
255
|
|
|
def _raw_playbook(self): |
256
|
|
|
with io.open(self._local_playbook_filename, 'r', encoding="utf-8") as f: |
257
|
|
|
return f.read() |
258
|
|
|
|
259
|
|
|
@property |
260
|
|
|
@memoize |
261
|
|
|
def platform_version(self): |
262
|
|
|
platform = self.product |
263
|
|
|
# Check to see if this is RHEL product |
264
|
|
|
if platform in PRODUCT_ALLOWLIST: |
265
|
|
|
# For RHEL, we can get what version |
266
|
|
|
if 'rhel' in platform: |
267
|
|
|
return platform[-1] |
268
|
|
|
return "7\n - 8" |
269
|
|
|
return "TBD" |
270
|
|
|
|
271
|
|
|
@property |
272
|
|
|
@memoize |
273
|
|
|
def _description(self): |
274
|
|
|
separator = "#" * 79 |
275
|
|
|
offset_from_separator = 3 |
276
|
|
|
first_separator_pos = self._raw_playbook.find(separator) |
277
|
|
|
second_separator_pos = self._raw_playbook.find(separator, |
278
|
|
|
first_separator_pos + len(separator)) |
279
|
|
|
description_start = first_separator_pos + len(separator) + offset_from_separator |
280
|
|
|
description_stop = second_separator_pos - offset_from_separator |
281
|
|
|
description = self._raw_playbook[description_start:description_stop] |
282
|
|
|
description = description.replace('# ', '') |
283
|
|
|
description = description.replace('#', '') |
284
|
|
|
|
285
|
|
|
desc = "" |
286
|
|
|
# Remove SCAP and Playbook examples from description as they don't belong in roles. |
287
|
|
|
for line in description.split("\n"): |
288
|
|
|
if line.startswith("Profile ID:"): |
289
|
|
|
break |
290
|
|
|
else: |
291
|
|
|
desc += (line + "\n") |
292
|
|
|
return desc.strip("\n\n") |
293
|
|
|
|
294
|
|
|
@property |
295
|
|
|
def _update_galaxy_tags(self): |
296
|
|
|
galaxy_tags = {} |
297
|
|
|
# These are the default tags that all roles share |
298
|
|
|
tags = [ |
299
|
|
|
"system", |
300
|
|
|
"hardening", |
301
|
|
|
"openscap", |
302
|
|
|
"ssg", |
303
|
|
|
"scap", |
304
|
|
|
"security", |
305
|
|
|
"compliance", |
306
|
|
|
"complianceascode", |
307
|
|
|
"redhatofficial", |
308
|
|
|
"redhat", |
309
|
|
|
] |
310
|
|
|
prod = self.product |
311
|
|
|
prof = self.profile |
312
|
|
|
|
313
|
|
|
tags.append(prod) |
314
|
|
|
tags.append(prof.replace("_", "")) |
315
|
|
|
|
316
|
|
|
if prof == 'stig': |
317
|
|
|
tags.append("disa") |
318
|
|
|
|
319
|
|
|
if 'anssi' in prof: |
320
|
|
|
tags.append("anssi") |
321
|
|
|
|
322
|
|
|
galaxy_tags['galaxy_tags'] = tags |
323
|
|
|
return galaxy_tags |
324
|
|
|
|
325
|
|
|
def _tag_is_valid_variable(self, tag): |
326
|
|
|
if "DISA-STIG" in tag: |
327
|
|
|
return True |
328
|
|
|
|
329
|
|
|
# rules of kind package_* and service_* can have hyphen in their rule IDs |
330
|
|
|
pattern = re.compile('(package_.*_(installed|removed))|(service_.*_(enabled|disabled))') |
331
|
|
|
if pattern.match(tag): |
332
|
|
|
return True |
333
|
|
|
|
334
|
|
|
return '-' not in tag and tag != 'always' |
335
|
|
|
|
336
|
|
|
def _sanitize_tag(self, tag): |
337
|
|
|
return tag.replace("-", "_") |
338
|
|
|
|
339
|
|
|
def file(self, filepath): |
340
|
|
|
if filepath == 'tasks/main.yml': |
341
|
|
|
return self.tasks_local_content |
342
|
|
|
elif filepath == 'vars/main.yml': |
343
|
|
|
if len(self.vars_data) < 1: |
344
|
|
|
return "---\n# defaults file for {role_name}\n".format(role_name=self.name) |
345
|
|
|
else: |
346
|
|
|
return yaml.dump(self.vars_data, width=120, indent=4, default_flow_style=False) |
347
|
|
|
elif filepath == 'README.md': |
348
|
|
|
return self._generate_readme_content() |
349
|
|
|
elif filepath == 'defaults/main.yml': |
350
|
|
|
return self._generate_defaults_content() |
351
|
|
|
elif filepath == 'meta/main.yml': |
352
|
|
|
return self._generate_meta_content() |
353
|
|
|
|
354
|
|
|
def _generate_readme_content(self): |
355
|
|
|
with io.open(README_TEMPLATE_PATH, 'r', encoding="utf-8") as f: |
356
|
|
|
readme_template = f.read() |
357
|
|
|
|
358
|
|
|
local_readme_content = readme_template.replace( |
359
|
|
|
"@DESCRIPTION@", self.description_md) |
360
|
|
|
local_readme_content = local_readme_content.replace( |
361
|
|
|
"@TITLE@", self.title) |
362
|
|
|
local_readme_content = local_readme_content.replace( |
363
|
|
|
"@MIN_ANSIBLE_VERSION@", ssg.ansible.min_ansible_version) |
364
|
|
|
local_readme_content = local_readme_content.replace( |
365
|
|
|
"@ROLE_NAME@", self.name) |
366
|
|
|
return local_readme_content |
367
|
|
|
|
368
|
|
|
def _generate_meta_content(self): |
369
|
|
|
with open(META_TEMPLATE_PATH, 'r') as f: |
370
|
|
|
meta_template = f.read() |
371
|
|
|
local_meta_content = meta_template.replace( |
372
|
|
|
"@ROLE_NAME@", self.name) |
373
|
|
|
local_meta_content = local_meta_content.replace( |
374
|
|
|
"@DESCRIPTION@", self.title) |
375
|
|
|
local_meta_content = local_meta_content.replace( |
376
|
|
|
"@PLATFORM_VERSION@", self.platform_version) |
377
|
|
|
local_meta_content = local_meta_content.replace( |
378
|
|
|
"@GALAXY_TAGS@", yaml.dump(self._update_galaxy_tags).replace("- ", " - ")) |
379
|
|
|
return local_meta_content.replace( |
380
|
|
|
"@MIN_ANSIBLE_VERSION@", ssg.ansible.min_ansible_version) |
381
|
|
|
|
382
|
|
|
def _generate_defaults_content(self): |
383
|
|
|
default_vars_to_add = sorted(self.added_variables) |
384
|
|
|
default_vars_local_content = yaml.dump(self.default_vars_data, width=120, indent=4, |
385
|
|
|
default_flow_style=False) |
386
|
|
|
header = [ |
387
|
|
|
"---", "# defaults file for {role_name}\n".format(role_name=self.name), |
388
|
|
|
] |
389
|
|
|
lines = ["{var_name}: true".format(var_name=var_name) for var_name in default_vars_to_add] |
390
|
|
|
lines.append("") |
391
|
|
|
|
392
|
|
|
return ("%s%s%s" % ("\n".join(header), default_vars_local_content, "\n".join(lines))) |
393
|
|
|
|
394
|
|
|
def save_to_disk(self, directory): |
395
|
|
|
print("Converting Ansible Playbook {} to Ansible Role {}".format(self._local_playbook_filename, os.path.join(directory, self.name))) |
396
|
|
|
for filename in self.PRODUCED_FILES: |
397
|
|
|
abs_path = os.path.join(directory, self.name, filename) |
398
|
|
|
mkdir_p(os.path.dirname(abs_path)) |
399
|
|
|
open(abs_path, 'wb').write(self.file(filename).encode("utf-8")) |
400
|
|
|
|
401
|
|
|
|
402
|
|
|
class RoleGithubUpdater(object): |
403
|
|
|
def __init__(self, repo, local_playbook_filename): |
404
|
|
|
self.remote_repo = repo |
405
|
|
|
self.role = PlaybookToRoleConverter(local_playbook_filename) |
406
|
|
|
|
407
|
|
|
def _local_content(self, filepath): |
408
|
|
|
new_content = self.role.file(filepath) |
409
|
|
|
return new_content |
410
|
|
|
|
411
|
|
|
def _get_blob_content(self, branch, path_name): |
412
|
|
|
""" |
413
|
|
|
see: |
414
|
|
|
https://github.com/PyGithub/PyGithub/issues/661 |
415
|
|
|
""" |
416
|
|
|
ref = self.remote_repo.get_git_ref(f'heads/{branch}') |
417
|
|
|
tree = self.remote_repo.get_git_tree(ref.object.sha, recursive='/' in path_name).tree |
418
|
|
|
sha = [x.sha for x in tree if x.path == path_name] |
419
|
|
|
if not sha: |
420
|
|
|
return None |
421
|
|
|
blob = self.remote_repo.get_git_blob(sha[0]) |
422
|
|
|
import base64 |
423
|
|
|
b64 = base64.b64decode(blob.content) |
424
|
|
|
return (b64.decode("utf8"), sha[0]) |
425
|
|
|
|
426
|
|
|
def _get_contents(self, path_name, branch='master'): |
427
|
|
|
""" |
428
|
|
|
First try to use traditional's github API to get package contents, |
429
|
|
|
since this API can't fetch file size more than 1MB, use another API when failed. |
430
|
|
|
""" |
431
|
|
|
content = self.remote_repo.get_contents(path_name, ref=branch) |
432
|
|
|
if content.content: |
433
|
|
|
return (content.decoded_content.decode("utf-8"), content.sha) |
434
|
|
|
|
435
|
|
|
blob = self._get_blob_content(branch, path_name) |
436
|
|
|
if blob is None: |
437
|
|
|
raise UnknownObjectException( |
438
|
|
|
'unable to locate file: ' + path_name + ' in branch: ' + branch) |
439
|
|
|
return blob |
440
|
|
|
|
441
|
|
|
def _remote_content(self, filepath): |
442
|
|
|
# We want the raw string to compare against _local_content |
443
|
|
|
content, sha = self._get_contents(filepath) |
444
|
|
|
return content, sha |
445
|
|
|
|
446
|
|
|
def _update_content_if_needed(self, filepath): |
447
|
|
|
remote_content, sha = self._remote_content(filepath) |
448
|
|
|
|
449
|
|
|
if self._local_content(filepath) != remote_content: |
450
|
|
|
self.remote_repo.update_file( |
451
|
|
|
filepath, |
452
|
|
|
"Updated " + filepath, |
453
|
|
|
self._local_content(filepath), |
454
|
|
|
sha, |
455
|
|
|
author=InputGitAuthor( |
456
|
|
|
GIT_COMMIT_AUTHOR_NAME, GIT_COMMIT_AUTHOR_EMAIL) |
457
|
|
|
) |
458
|
|
|
print("Updating %s in %s" % (filepath, self.remote_repo.name)) |
459
|
|
|
|
460
|
|
|
def update_repository(self): |
461
|
|
|
print("Processing %s..." % self.remote_repo.name) |
462
|
|
|
|
463
|
|
|
for path in PlaybookToRoleConverter.PRODUCED_FILES: |
464
|
|
|
self._update_content_if_needed(path) |
465
|
|
|
|
466
|
|
|
repo_description = ( |
467
|
|
|
"{title} - Ansible role generated from ComplianceAsCode Project" |
468
|
|
|
.format(title=self.role.title)) |
469
|
|
|
self.remote_repo.edit( |
470
|
|
|
self.remote_repo.name, |
471
|
|
|
description=repo_description, |
472
|
|
|
homepage="https://github.com/complianceascode/content", |
473
|
|
|
) |
474
|
|
|
|
475
|
|
|
|
476
|
|
|
def parse_args(): |
477
|
|
|
parser = argparse.ArgumentParser( |
478
|
|
|
description='Generates Ansible Roles and pushes them to Github') |
479
|
|
|
parser.add_argument( |
480
|
|
|
"--build-playbooks-dir", required=True, |
481
|
|
|
help="Path to directory containing the generated Ansible Playbooks. " |
482
|
|
|
"Most likely this is going to be ./build/ansible", |
483
|
|
|
dest="build_playbooks_dir") |
484
|
|
|
parser.add_argument( |
485
|
|
|
"--dry-run", "-d", dest="dry_run", |
486
|
|
|
help="Do not push Ansible Roles to the Github, store them only to local directory" |
487
|
|
|
) |
488
|
|
|
parser.add_argument( |
489
|
|
|
"--organization", "-o", default=ORGANIZATION_NAME, |
490
|
|
|
help="Name of the Github organization") |
491
|
|
|
parser.add_argument( |
492
|
|
|
"--profile", "-p", default=[], action="append", |
493
|
|
|
metavar="PROFILE", choices=PROFILE_ALLOWLIST, |
494
|
|
|
help="What profiles to upload, if not specified, upload all that are applicable.") |
495
|
|
|
parser.add_argument( |
496
|
|
|
"--product", "-r", default=[], action="append", |
497
|
|
|
metavar="PRODUCT", choices=PRODUCT_ALLOWLIST, |
498
|
|
|
help="What products to upload, if not specified, upload all that are applicable.") |
499
|
|
|
parser.add_argument( |
500
|
|
|
"--tag-release", "-n", default=False, action="store_true", |
501
|
|
|
help="Tag a new release in GitHub") |
502
|
|
|
parser.add_argument( |
503
|
|
|
"--token", "-t", dest="token", |
504
|
|
|
help="GitHub token used for organization authorization") |
505
|
|
|
return parser.parse_args() |
506
|
|
|
|
507
|
|
|
|
508
|
|
|
def locally_clone_and_init_repositories(organization, repo_list): |
509
|
|
|
temp_dir = mkdtemp() |
510
|
|
|
current_dir = os.getcwd() |
511
|
|
|
os.chdir(temp_dir) |
512
|
|
|
try: |
513
|
|
|
for repo in repo_list: |
514
|
|
|
clone_and_init_repository(temp_dir, organization, repo) |
515
|
|
|
finally: |
516
|
|
|
os.chdir(current_dir) |
517
|
|
|
shutil.rmtree(temp_dir) |
518
|
|
|
|
519
|
|
|
|
520
|
|
|
def select_roles_to_upload(product_allowlist, profile_allowlist, |
521
|
|
|
build_playbooks_dir): |
522
|
|
|
selected_roles = dict() |
523
|
|
|
for filename in sorted(os.listdir(build_playbooks_dir)): |
524
|
|
|
root, ext = os.path.splitext(filename) |
525
|
|
|
if ext == ".yml": |
526
|
|
|
# the format is product-playbook-profile.yml |
527
|
|
|
product, _, profile = root.split("-", 2) |
528
|
|
|
if product in product_allowlist and profile in profile_allowlist: |
529
|
|
|
role_name = "ansible-role-%s-%s" % (product, profile) |
530
|
|
|
selected_roles[role_name] = (product, profile) |
531
|
|
|
return selected_roles |
532
|
|
|
|
533
|
|
|
|
534
|
|
|
def main(): |
535
|
|
|
args = parse_args() |
536
|
|
|
|
537
|
|
|
product_allowlist = set(PRODUCT_ALLOWLIST) |
538
|
|
|
profile_allowlist = set(PROFILE_ALLOWLIST) |
539
|
|
|
|
540
|
|
|
potential_roles = { |
541
|
|
|
("ansible-role-%s-%s" % (product, profile)) |
542
|
|
|
for product in product_allowlist for profile in profile_allowlist |
543
|
|
|
} |
544
|
|
|
|
545
|
|
|
if args.product: |
546
|
|
|
product_allowlist &= set(args.product) |
547
|
|
|
if args.profile: |
548
|
|
|
profile_allowlist &= set(args.profile) |
549
|
|
|
|
550
|
|
|
selected_roles = select_roles_to_upload( |
551
|
|
|
product_allowlist, profile_allowlist, args.build_playbooks_dir |
552
|
|
|
) |
553
|
|
|
|
554
|
|
|
if args.dry_run: |
555
|
|
|
for product_profile in selected_roles.values(): |
556
|
|
|
playbook_filename = "%s-playbook-%s.yml" % product_profile |
557
|
|
|
playbook_full_path = os.path.join( |
558
|
|
|
args.build_playbooks_dir, playbook_filename) |
559
|
|
|
PlaybookToRoleConverter(playbook_full_path).save_to_disk(args.dry_run) |
560
|
|
|
else: |
561
|
|
|
if not args.token: |
562
|
|
|
print("Input your GitHub credentials:") |
563
|
|
|
username = input("username or token: ") |
564
|
|
|
password = getpass.getpass("password (or empty for token): ") |
565
|
|
|
else: |
566
|
|
|
username = args.token |
567
|
|
|
password = "" |
568
|
|
|
|
569
|
|
|
github = Github(username, password) |
570
|
|
|
github_org = github.get_organization(args.organization) |
571
|
|
|
github_repositories = [repo.name for repo in github_org.get_repos()] |
572
|
|
|
|
573
|
|
|
# Create empty repositories |
574
|
|
|
github_new_repos = sorted(list(set(map(str.lower, selected_roles.keys())) - set(map(str.lower, github_repositories)))) |
575
|
|
|
if github_new_repos: |
576
|
|
|
create_empty_repositories(github_new_repos, github_org) |
577
|
|
|
|
578
|
|
|
locally_clone_and_init_repositories(args.organization, github_new_repos) |
579
|
|
|
|
580
|
|
|
# Update repositories |
581
|
|
|
for repo in sorted(github_org.get_repos(), key=lambda repo: repo.name): |
582
|
|
|
if repo.name in selected_roles: |
583
|
|
|
playbook_filename = "%s-playbook-%s.yml" % selected_roles[repo.name] |
584
|
|
|
playbook_full_path = os.path.join( |
585
|
|
|
args.build_playbooks_dir, playbook_filename) |
586
|
|
|
RoleGithubUpdater(repo, playbook_full_path).update_repository() |
587
|
|
|
if args.tag_release: |
588
|
|
|
update_repo_release(github, repo) |
589
|
|
|
elif repo.name not in potential_roles: |
590
|
|
|
print("Repo '%s' is not managed by this script. " |
591
|
|
|
"It may need to be deleted, please verify and do that " |
592
|
|
|
"manually!" % repo.name) |
593
|
|
|
|
594
|
|
|
|
595
|
|
|
if __name__ == "__main__": |
596
|
|
|
main() |
597
|
|
|
|