1
|
|
|
#!/usr/bin/env python2 |
2
|
|
|
# -*- coding: utf-8 -*- |
3
|
|
|
|
4
|
|
|
from __future__ import print_function |
5
|
|
|
|
6
|
|
|
from tempfile import mkdtemp |
7
|
|
|
import io |
8
|
|
|
import os |
9
|
|
|
import os.path |
10
|
|
|
import sys |
11
|
|
|
import shutil |
12
|
|
|
import re |
13
|
|
|
import argparse |
14
|
|
|
import getpass |
15
|
|
|
import yaml |
16
|
|
|
import collections |
17
|
|
|
|
18
|
|
|
try: |
19
|
|
|
from github import Github, InputGitAuthor |
20
|
|
|
except ImportError: |
21
|
|
|
sys.stderr.write("Please install PyGithub, on Fedora it's in the " |
22
|
|
|
"python-PyGithub package.\n") |
23
|
|
|
sys.exit(1) |
24
|
|
|
|
25
|
|
|
|
26
|
|
|
import ssg.ansible |
27
|
|
|
import ssg.yaml |
28
|
|
|
|
29
|
|
|
|
30
|
|
|
def memoize(f): |
31
|
|
|
memo = {} |
32
|
|
|
|
33
|
|
|
def helper(x): |
34
|
|
|
if x not in memo: |
|
|
|
|
35
|
|
|
memo[x] = f(x) |
36
|
|
|
return memo[x] |
37
|
|
|
return helper |
38
|
|
|
|
39
|
|
|
# The following code preserves ansible yaml order |
40
|
|
|
# code from arcaduf's gist |
41
|
|
|
# https://gist.github.com/arcaduf/8edbe5900372f0dd30aa037272dfe826 |
42
|
|
|
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG |
43
|
|
|
|
44
|
|
|
|
45
|
|
|
def dict_representer(dumper, data): |
46
|
|
|
return dumper.represent_mapping(_mapping_tag, data.items()) |
47
|
|
|
|
48
|
|
|
|
49
|
|
|
def dict_constructor(loader, node): |
50
|
|
|
return collections.OrderedDict(loader.construct_pairs(node)) |
51
|
|
|
|
52
|
|
|
|
53
|
|
|
yaml.add_representer(collections.OrderedDict, dict_representer) |
54
|
|
|
yaml.add_constructor(_mapping_tag, dict_constructor) |
55
|
|
|
# End arcaduf gist |
56
|
|
|
|
57
|
|
|
PRODUCT_WHITELIST = set([ |
58
|
|
|
"rhel7", |
59
|
|
|
"rhel8", |
60
|
|
|
"rhv4", |
61
|
|
|
]) |
62
|
|
|
|
63
|
|
|
PROFILE_WHITELIST = set([ |
64
|
|
|
"C2S", |
65
|
|
|
"cjis", |
66
|
|
|
"hipaa", |
67
|
|
|
"cui", |
68
|
|
|
"ospp", |
69
|
|
|
"pci-dss", |
70
|
|
|
"rht-ccp", |
71
|
|
|
"stig", |
72
|
|
|
"rhvh-stig", |
73
|
|
|
"rhvh-vpp", |
74
|
|
|
]) |
75
|
|
|
|
76
|
|
|
|
77
|
|
|
ORGANIZATION_NAME = "RedHatOfficial" |
78
|
|
|
GIT_COMMIT_AUTHOR_NAME = "ComplianceAsCode development team" |
79
|
|
|
GIT_COMMIT_AUTHOR_EMAIL = "[email protected]" |
80
|
|
|
META_TEMPLATE_PATH = os.path.join( |
81
|
|
|
os.path.dirname(os.path.abspath(__file__)), |
82
|
|
|
"ansible_galaxy_meta_template.yml" |
83
|
|
|
) |
84
|
|
|
README_TEMPLATE_PATH = os.path.join( |
85
|
|
|
os.path.dirname(os.path.abspath(__file__)), |
86
|
|
|
"ansible_galaxy_readme_template.md" |
87
|
|
|
) |
88
|
|
|
|
89
|
|
|
|
90
|
|
|
def create_empty_repositories(github_new_repos, github_org): |
91
|
|
|
for github_new_repo in github_new_repos: |
92
|
|
|
print("Creating new Github repository: %s" % github_new_repo) |
93
|
|
|
github_org.create_repo( |
94
|
|
|
github_new_repo, |
95
|
|
|
description="Role generated from ComplianceAsCode Project", |
96
|
|
|
homepage="https://github.com/ComplianceAsCode/content/", |
97
|
|
|
private=False, |
98
|
|
|
has_issues=False, |
99
|
|
|
has_wiki=False, |
100
|
|
|
has_downloads=False) |
101
|
|
|
|
102
|
|
|
|
103
|
|
|
def clone_and_init_repository(parent_dir, organization, repo): |
104
|
|
|
os.system( |
105
|
|
|
"git clone https://github.com/%s/%s" % (organization, repo)) |
106
|
|
|
os.system("ansible-galaxy init " + repo + " --force") |
107
|
|
|
os.chdir(repo) |
108
|
|
|
try: |
109
|
|
|
os.system('git add .') |
110
|
|
|
os.system('git commit -a -m "Initial commit" --author "%s <%s>"' |
111
|
|
|
% (GIT_COMMIT_AUTHOR_NAME, GIT_COMMIT_AUTHOR_EMAIL)) |
112
|
|
|
os.system('git push origin master') |
113
|
|
|
finally: |
114
|
|
|
os.chdir("..") |
115
|
|
|
|
116
|
|
|
|
117
|
|
|
def update_repo_release(github, repo): |
118
|
|
|
repo_tags = [tag for tag in repo.get_tags()] |
119
|
|
|
try: |
120
|
|
|
(majv, minv, rel) = repo_tags[0].name.split(".") |
121
|
|
|
rel = int(rel) + 1 |
122
|
|
|
except IndexError: |
123
|
|
|
cac = github.get_repo("ComplianceAsCode/content") |
124
|
|
|
cac_tags = [tag for tag in cac.get_tags() if tag.name != "v0.5.0-InitialDraft"] |
125
|
|
|
(majv, minv, rel) = cac_tags[0].name.strip("v").split(".") |
126
|
|
|
|
127
|
|
|
new_tag = ("%s.%s.%s" % (majv, minv, rel)) |
128
|
|
|
commits = repo.get_commits() |
129
|
|
|
print("Tagging new release '%s' for repo '%s'" % (new_tag, repo.name)) |
130
|
|
|
repo.create_git_tag_and_release(new_tag, '', '', '', commits[0].sha, 'commit') |
131
|
|
|
|
132
|
|
|
|
133
|
|
|
class PlaybookToRoleConverter(): |
134
|
|
|
PRODUCED_FILES = ('defaults/main.yml', 'meta/main.yml', 'tasks/main.yml', 'vars/main.yml', |
135
|
|
|
'README.md') |
136
|
|
|
|
137
|
|
|
def __init__(self, local_playbook_filename): |
138
|
|
|
self._local_playbook_filename = local_playbook_filename |
139
|
|
|
|
140
|
|
|
# ansible language doesn't allow pre_tasks for roles, if the only pre task |
141
|
|
|
# is the ansible version check we can ignore it because the minimal version |
142
|
|
|
# is in role metadata |
143
|
|
|
if "pre_tasks" in self._playbook[0]: |
144
|
|
|
pre_tasks_data = self._playbook[0]["pre_tasks"] |
145
|
|
|
if len(pre_tasks_data) == 1 and \ |
146
|
|
|
pre_tasks_data[0]["name"] == \ |
147
|
|
|
ssg.ansible.ansible_version_requirement_pre_task_name: |
148
|
|
|
pass |
149
|
|
|
else: |
150
|
|
|
sys.stderr.write( |
151
|
|
|
"%s contains pre_tasks other than the version check. " |
152
|
|
|
"pre_tasks are not supported for ansible roles and " |
153
|
|
|
"will be skipped!.\n") |
154
|
|
|
|
155
|
|
|
@property |
156
|
|
|
@memoize |
157
|
|
|
def name(self): |
158
|
|
|
root, _ = os.path.splitext(os.path.basename(self._local_playbook_filename)) |
159
|
|
|
product, _, profile = root.split("-", 2) |
160
|
|
|
return "%s_%s" % (product, profile.replace("-", "_")) |
161
|
|
|
|
162
|
|
|
@property |
163
|
|
|
@memoize |
164
|
|
|
def tasks_data(self): |
165
|
|
|
return self._playbook[0]["tasks"] if "tasks" in self._playbook[0] else [] |
166
|
|
|
|
167
|
|
|
@property |
168
|
|
|
@memoize |
169
|
|
|
def tasks_local_content(self): |
170
|
|
|
return yaml.dump(self.tasks_data, width=120, default_flow_style=False) \ |
171
|
|
|
.replace('\n- ', '\n\n- ') |
172
|
|
|
|
173
|
|
|
@property |
174
|
|
|
@memoize |
175
|
|
|
def default_vars_data(self): |
176
|
|
|
return self._playbook[0]["vars"] if "vars" in self._playbook[0] else [] |
177
|
|
|
|
178
|
|
|
@property |
179
|
|
|
@memoize |
180
|
|
|
def added_variables(self): |
181
|
|
|
variables = set() |
182
|
|
|
for task in self.tasks_data: |
183
|
|
|
if "tags" not in task: |
184
|
|
|
next |
185
|
|
|
if "when" not in task: |
186
|
|
|
task["when"] = [] |
187
|
|
|
elif isinstance(task["when"], str): |
188
|
|
|
task["when"] = [task["when"]] |
189
|
|
|
|
190
|
|
|
variables_to_add = {tag for tag in task["tags"] if self._tag_is_valid_variable(tag)} |
191
|
|
|
task["when"] = ["{varname} | bool".format(varname=v) for v in variables_to_add] + task["when"] |
192
|
|
|
variables.update(variables_to_add) |
193
|
|
|
|
194
|
|
|
if not task["when"]: |
195
|
|
|
del task["when"] |
196
|
|
|
return variables |
197
|
|
|
|
198
|
|
|
@property |
199
|
|
|
def vars_data(self): |
200
|
|
|
return [] |
201
|
|
|
|
202
|
|
|
@property |
203
|
|
|
@memoize |
204
|
|
|
def title(self): |
205
|
|
|
try: |
206
|
|
|
title = re.search(r'Profile Title:\s+(.+)$', self._description, re.MULTILINE).group(1) |
207
|
|
|
return '"' + title + '"' |
208
|
|
|
except AttributeError: |
209
|
|
|
return re.search(r'Ansible Playbook for\s+(.+)$', self._description, re.MULTILINE) \ |
210
|
|
|
.group(1) |
211
|
|
|
|
212
|
|
|
@property |
213
|
|
|
@memoize |
214
|
|
|
def description_md(self): |
215
|
|
|
# This is for a role and not a playbook |
216
|
|
|
description = re.sub(r'Playbook', "Role", self._description) |
217
|
|
|
|
218
|
|
|
# Fix the description format for markdown so that it looks pretty |
219
|
|
|
return description.replace('\n', ' \n') |
220
|
|
|
|
221
|
|
|
@property |
222
|
|
|
@memoize |
223
|
|
|
def _playbook(self): |
224
|
|
|
return ssg.yaml.ordered_load(self._raw_playbook) |
225
|
|
|
|
226
|
|
|
@property |
227
|
|
|
@memoize |
228
|
|
|
def _raw_playbook(self): |
229
|
|
|
with io.open(self._local_playbook_filename, 'r', encoding="utf-8") as f: |
230
|
|
|
return f.read() |
231
|
|
|
|
232
|
|
|
@property |
233
|
|
|
@memoize |
234
|
|
|
def _description(self): |
235
|
|
|
separator = "#" * 79 |
236
|
|
|
offset_from_separator = 3 |
237
|
|
|
first_separator_pos = self._raw_playbook.find(separator) |
238
|
|
|
second_separator_pos = self._raw_playbook.find(separator, |
239
|
|
|
first_separator_pos + len(separator)) |
240
|
|
|
description_start = first_separator_pos + len(separator) + offset_from_separator |
241
|
|
|
description_stop = second_separator_pos - offset_from_separator |
242
|
|
|
description = self._raw_playbook[description_start:description_stop] |
243
|
|
|
description = description.replace('# ', '') |
244
|
|
|
description = description.replace('#', '') |
245
|
|
|
|
246
|
|
|
desc = "" |
247
|
|
|
# Remove SCAP and Playbook examples from description as they don't belong in roles. |
248
|
|
|
for line in description.split("\n"): |
249
|
|
|
if line.startswith("Profile ID:"): |
250
|
|
|
break |
251
|
|
|
else: |
252
|
|
|
desc += (line + "\n") |
253
|
|
|
return desc.strip("\n\n") |
254
|
|
|
|
255
|
|
|
def _tag_is_valid_variable(self, tag): |
256
|
|
|
return '-' not in tag and tag != 'always' |
257
|
|
|
|
258
|
|
|
def file(self, filepath): |
259
|
|
|
if filepath == 'tasks/main.yml': |
260
|
|
|
return self.tasks_local_content |
261
|
|
|
elif filepath == 'vars/main.yml': |
262
|
|
|
if len(self.vars_data) < 1: |
263
|
|
|
return "---\n# defaults file for {role_name}\n".format(role_name=self.name) |
264
|
|
|
else: |
265
|
|
|
return yaml.dump(self.vars_data, width=120, indent=4, default_flow_style=False) |
266
|
|
|
elif filepath == 'README.md': |
267
|
|
|
return self._generate_readme_content() |
268
|
|
|
elif filepath == 'defaults/main.yml': |
269
|
|
|
return self._generate_defaults_content() |
270
|
|
|
elif filepath == 'meta/main.yml': |
271
|
|
|
return self._generate_meta_content() |
272
|
|
|
|
273
|
|
|
def _generate_readme_content(self): |
274
|
|
|
with io.open(README_TEMPLATE_PATH, 'r', encoding="utf-8") as f: |
275
|
|
|
readme_template = f.read() |
276
|
|
|
|
277
|
|
|
local_readme_content = readme_template.replace( |
278
|
|
|
"@DESCRIPTION@", self.description_md) |
279
|
|
|
local_readme_content = local_readme_content.replace( |
280
|
|
|
"@TITLE@", self.title) |
281
|
|
|
local_readme_content = local_readme_content.replace( |
282
|
|
|
"@MIN_ANSIBLE_VERSION@", ssg.ansible.min_ansible_version) |
283
|
|
|
local_readme_content = local_readme_content.replace( |
284
|
|
|
"@ROLE_NAME@", self.name) |
285
|
|
|
return local_readme_content |
286
|
|
|
|
287
|
|
|
def _generate_meta_content(self): |
288
|
|
|
with open(META_TEMPLATE_PATH, 'r') as f: |
289
|
|
|
meta_template = f.read() |
290
|
|
|
local_meta_content = meta_template.replace("@ROLE_NAME@", |
291
|
|
|
self.name) |
292
|
|
|
local_meta_content = local_meta_content.replace("@DESCRIPTION@", self.title) |
293
|
|
|
return local_meta_content.replace("@MIN_ANSIBLE_VERSION@", ssg.ansible.min_ansible_version) |
294
|
|
|
|
295
|
|
|
def _generate_defaults_content(self): |
296
|
|
|
default_vars_to_add = sorted(self.added_variables) |
297
|
|
|
default_vars_local_content = yaml.dump(self.default_vars_data, width=120, indent=4, |
298
|
|
|
default_flow_style=False) |
299
|
|
|
header = [ |
300
|
|
|
"---", "# defaults file for {role_name}\n".format(role_name=self.name), |
301
|
|
|
] |
302
|
|
|
lines = ["{var_name}: true".format(var_name=var_name) for var_name in default_vars_to_add] |
303
|
|
|
lines.append("") |
304
|
|
|
|
305
|
|
|
return ("%s%s%s" % ("\n".join(header), default_vars_local_content, "\n".join(lines))) |
306
|
|
|
|
307
|
|
|
def save_to_disk(self, directory): |
308
|
|
|
print("Converting Ansible Playbook {} to Ansible Role {}".format(self._local_playbook_filename, os.path.join(directory, self.name))) |
309
|
|
|
for filename in self.PRODUCED_FILES: |
310
|
|
|
abs_path = os.path.join(directory, self.name, filename) |
311
|
|
|
ssg.utils.mkdir_p(os.path.dirname(abs_path)) |
312
|
|
|
open(abs_path, 'wb').write(self.file(filename).encode("utf-8")) |
313
|
|
|
|
314
|
|
|
|
315
|
|
|
class RoleGithubUpdater(object): |
316
|
|
|
def __init__(self, repo, local_playbook_filename): |
317
|
|
|
self.remote_repo = repo |
318
|
|
|
self.role = PlaybookToRoleConverter(local_playbook_filename) |
319
|
|
|
|
320
|
|
|
def _local_content(self, filepath): |
321
|
|
|
new_content = self.role.file(filepath) |
322
|
|
|
|
323
|
|
|
if filepath == 'README.md': |
324
|
|
|
remote_readme_file, _ = self._remote_content("README.md") |
325
|
|
|
if not remote_readme_file: |
326
|
|
|
return new_content |
327
|
|
|
|
328
|
|
|
local_readme_content = re.sub(r'Ansible version (\d*\.\d+|\d+)', |
329
|
|
|
"Ansible version %s" % ssg.ansible.min_ansible_version, |
330
|
|
|
remote_readme_file) |
331
|
|
|
return re.sub(r'%s\.[a-zA-Z0-9\-_]+' % ORGANIZATION_NAME, |
332
|
|
|
"%s.%s" % (ORGANIZATION_NAME, self.role.name), |
333
|
|
|
local_readme_content) |
334
|
|
|
elif filepath == 'meta/main.yml': |
335
|
|
|
remote_meta_file, _ = self._remote_content(filepath) |
336
|
|
|
if not remote_meta_file: |
337
|
|
|
return new_content |
338
|
|
|
|
339
|
|
|
with open(META_TEMPLATE_PATH, 'r') as f: |
340
|
|
|
meta_template = f.read() |
341
|
|
|
author = re.search(r'author:.*', meta_template).group(0) |
342
|
|
|
description = re.search(r'description:.*', meta_template).group(0) |
343
|
|
|
issue_tracker_url = re.search(r'issue_tracker_url:.*', meta_template).group(0) |
344
|
|
|
local_meta_content = remote_meta_file |
345
|
|
|
local_meta_content = re.sub(r'role_name:.*', |
346
|
|
|
"role_name: %s" % self.role.name, |
347
|
|
|
local_meta_content) |
348
|
|
|
local_meta_content = re.sub(r'author:.*', |
349
|
|
|
author, |
350
|
|
|
local_meta_content) |
351
|
|
|
local_meta_content = re.sub(r'min_ansible_version: (\d*\.\d+|\d+)', |
352
|
|
|
"min_ansible_version: %s" % ssg.ansible.min_ansible_version, |
353
|
|
|
local_meta_content) |
354
|
|
|
local_meta_content = re.sub(r'description:.*', |
355
|
|
|
"description: %s" % self.role.title, |
356
|
|
|
local_meta_content) |
357
|
|
|
return re.sub(r'issue_tracker_url:.*', |
358
|
|
|
issue_tracker_url, |
359
|
|
|
local_meta_content) |
360
|
|
|
return new_content |
361
|
|
|
|
362
|
|
|
def _remote_content(self, filepath): |
363
|
|
|
remote = self.remote_repo.get_contents(filepath) |
364
|
|
|
content = remote.decoded_content |
365
|
|
|
if filepath == 'README.md': |
366
|
|
|
content = content.decode("utf-8") |
367
|
|
|
return content, remote.sha |
368
|
|
|
|
369
|
|
|
def _update_content_if_needed(self, filepath): |
370
|
|
|
remote_content, sha = self._remote_content(filepath) |
371
|
|
|
|
372
|
|
|
if self._local_content(filepath) != remote_content: |
373
|
|
|
self.remote_repo.update_file( |
374
|
|
|
"/" + filepath, |
375
|
|
|
"Updates " + filepath, |
376
|
|
|
self._local_content(filepath), |
377
|
|
|
sha, |
378
|
|
|
author=InputGitAuthor( |
379
|
|
|
GIT_COMMIT_AUTHOR_NAME, GIT_COMMIT_AUTHOR_EMAIL) |
380
|
|
|
) |
381
|
|
|
print("Updating %s in %s" % (filepath, self.remote_repo.name)) |
382
|
|
|
|
383
|
|
|
def update_repository(self): |
384
|
|
|
print("Processing %s..." % self.remote_repo.name) |
385
|
|
|
|
386
|
|
|
for path in PlaybookToRoleConverter.PRODUCED_FILES: |
387
|
|
|
self._update_content_if_needed(path) |
388
|
|
|
|
389
|
|
|
repo_description = ( |
390
|
|
|
"{title} - Ansible role generated from ComplianceAsCode Project" |
391
|
|
|
.format(title=self.role.title)) |
392
|
|
|
self.remote_repo.edit( |
393
|
|
|
self.remote_repo.name, |
394
|
|
|
description=repo_description, |
395
|
|
|
homepage="https://github.com/complianceascode/content", |
396
|
|
|
) |
397
|
|
|
|
398
|
|
|
|
399
|
|
|
def parse_args(): |
400
|
|
|
parser = argparse.ArgumentParser( |
401
|
|
|
description='Generates Ansible Roles and pushes them to Github') |
402
|
|
|
parser.add_argument( |
403
|
|
|
"--build-playbooks-dir", required=True, |
404
|
|
|
help="Path to directory containing the generated Ansible Playbooks. " |
405
|
|
|
"Most likely this is going to be ./build/ansible", |
406
|
|
|
dest="build_playbooks_dir") |
407
|
|
|
parser.add_argument( |
408
|
|
|
"--dry-run", "-d", dest="dry_run", |
409
|
|
|
help="Do not push Ansible Roles to the Github, store them only to local directory" |
410
|
|
|
) |
411
|
|
|
parser.add_argument( |
412
|
|
|
"--organization", "-o", default=ORGANIZATION_NAME, |
413
|
|
|
help="Name of the Github organization") |
414
|
|
|
parser.add_argument( |
415
|
|
|
"--profile", "-p", default=[], action="append", |
416
|
|
|
metavar="PROFILE", choices=PROFILE_WHITELIST, |
417
|
|
|
help="What profiles to upload, if not specified, upload all that are applicable.") |
418
|
|
|
parser.add_argument( |
419
|
|
|
"--product", "-r", default=[], action="append", |
420
|
|
|
metavar="PRODUCT", choices=PRODUCT_WHITELIST, |
421
|
|
|
help="What products to upload, if not specified, upload all that are applicable.") |
422
|
|
|
parser.add_argument( |
423
|
|
|
"--tag-release", "-n", default=False, action="store_true", |
424
|
|
|
help="Tag a new release in GitHub") |
425
|
|
|
parser.add_argument( |
426
|
|
|
"--token", "-t", dest="token", |
427
|
|
|
help="GitHub token used for organization authorization") |
428
|
|
|
return parser.parse_args() |
429
|
|
|
|
430
|
|
|
|
431
|
|
|
def locally_clone_and_init_repositories(organization, repo_list): |
432
|
|
|
temp_dir = mkdtemp() |
433
|
|
|
current_dir = os.getcwd() |
434
|
|
|
os.chdir(temp_dir) |
435
|
|
|
try: |
436
|
|
|
for repo in repo_list: |
437
|
|
|
clone_and_init_repository(temp_dir, organization, repo) |
438
|
|
|
finally: |
439
|
|
|
os.chdir(current_dir) |
440
|
|
|
shutil.rmtree(temp_dir) |
441
|
|
|
|
442
|
|
|
|
443
|
|
|
def select_roles_to_upload(product_whitelist, profile_whitelist, |
444
|
|
|
build_playbooks_dir): |
445
|
|
|
selected_roles = dict() |
446
|
|
|
for filename in os.listdir(build_playbooks_dir): |
447
|
|
|
root, ext = os.path.splitext(filename) |
448
|
|
|
if ext == ".yml": |
449
|
|
|
# the format is product-playbook-profile.yml |
450
|
|
|
product, _, profile = root.split("-", 2) |
451
|
|
|
if product in product_whitelist and profile in profile_whitelist: |
452
|
|
|
role_name = "ansible-role-%s-%s" % (product, profile) |
453
|
|
|
selected_roles[role_name] = (product, profile) |
454
|
|
|
return selected_roles |
455
|
|
|
|
456
|
|
|
|
457
|
|
|
def main(): |
458
|
|
|
args = parse_args() |
459
|
|
|
|
460
|
|
|
product_whitelist = set(PRODUCT_WHITELIST) |
461
|
|
|
profile_whitelist = set(PROFILE_WHITELIST) |
462
|
|
|
|
463
|
|
|
potential_roles = { |
464
|
|
|
("ansible-role-%s-%s" % (product, profile)) |
465
|
|
|
for product in product_whitelist for profile in profile_whitelist |
466
|
|
|
} |
467
|
|
|
|
468
|
|
|
if args.product: |
469
|
|
|
product_whitelist &= set(args.product) |
470
|
|
|
if args.profile: |
471
|
|
|
profile_whitelist &= set(args.profile) |
472
|
|
|
|
473
|
|
|
selected_roles = select_roles_to_upload( |
474
|
|
|
product_whitelist, profile_whitelist, args.build_playbooks_dir |
475
|
|
|
) |
476
|
|
|
|
477
|
|
|
if args.dry_run: |
478
|
|
|
for product_profile in selected_roles.values(): |
479
|
|
|
playbook_filename = "%s-playbook-%s.yml" % product_profile |
480
|
|
|
playbook_full_path = os.path.join( |
481
|
|
|
args.build_playbooks_dir, playbook_filename) |
482
|
|
|
PlaybookToRoleConverter(playbook_full_path).save_to_disk(args.dry_run) |
483
|
|
|
else: |
484
|
|
|
if not args.token: |
485
|
|
|
print("Input your GitHub credentials:") |
486
|
|
|
username = raw_input("username or token: ") |
|
|
|
|
487
|
|
|
password = getpass.getpass("password (or empty for token): ") |
488
|
|
|
else: |
489
|
|
|
username = args.token |
490
|
|
|
password = "" |
491
|
|
|
|
492
|
|
|
github = Github(username, password) |
493
|
|
|
github_org = github.get_organization(args.organization) |
494
|
|
|
github_repositories = [repo.name for repo in github_org.get_repos()] |
495
|
|
|
|
496
|
|
|
# Create empty repositories |
497
|
|
|
github_new_repos = sorted(list(set(map(str.lower, selected_roles.keys())) - set(map(unicode.lower, github_repositories)))) |
|
|
|
|
498
|
|
|
if github_new_repos: |
499
|
|
|
create_empty_repositories(github_new_repos, github_org) |
500
|
|
|
|
501
|
|
|
locally_clone_and_init_repositories(args.organization, github_new_repos) |
502
|
|
|
|
503
|
|
|
# Update repositories |
504
|
|
|
for repo in sorted(github_org.get_repos(), key=lambda repo: repo.name): |
505
|
|
|
if repo.name in selected_roles: |
506
|
|
|
playbook_filename = "%s-playbook-%s.yml" % selected_roles[repo.name] |
507
|
|
|
playbook_full_path = os.path.join( |
508
|
|
|
args.build_playbooks_dir, playbook_filename) |
509
|
|
|
RoleGithubUpdater(repo, playbook_full_path).update_repository() |
510
|
|
|
if args.tag_release: |
511
|
|
|
update_repo_release(github, repo) |
512
|
|
|
elif repo.name not in potential_roles: |
513
|
|
|
print("Repo '%s' is not managed by this script. " |
514
|
|
|
"It may need to be deleted, please verify and do that " |
515
|
|
|
"manually!" % repo.name) |
516
|
|
|
|
517
|
|
|
|
518
|
|
|
if __name__ == "__main__": |
519
|
|
|
main() |
520
|
|
|
|