1
|
|
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more |
2
|
|
|
# contributor license agreements. See the NOTICE file distributed with |
3
|
|
|
# this work for additional information regarding copyright ownership. |
4
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0 |
5
|
|
|
# (the "License"); you may not use this file except in compliance with |
6
|
|
|
# the License. You may obtain a copy of the License at |
7
|
|
|
# |
8
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
9
|
|
|
# |
10
|
|
|
# Unless required by applicable law or agreed to in writing, software |
11
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
12
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13
|
|
|
# See the License for the specific language governing permissions and |
14
|
|
|
# limitations under the License. |
15
|
|
|
|
16
|
|
|
import itertools |
17
|
|
|
import json |
18
|
|
|
|
19
|
|
|
import requests |
20
|
|
|
import six |
21
|
|
|
from oslo_config import cfg |
22
|
|
|
|
23
|
|
|
from st2common import log as logging |
24
|
|
|
from st2common.models.api.pack import PackAPI |
25
|
|
|
from st2common.persistence.pack import Pack |
26
|
|
|
|
27
|
|
|
__all__ = [ |
28
|
|
|
'get_pack_by_ref', |
29
|
|
|
'fetch_pack_index', |
30
|
|
|
'get_pack_from_index', |
31
|
|
|
'search_pack_index', |
32
|
|
|
'check_index_health' |
33
|
|
|
] |
34
|
|
|
|
35
|
|
|
EXCLUDE_FIELDS = [ |
36
|
|
|
"repo_url", |
37
|
|
|
"email" |
38
|
|
|
] |
39
|
|
|
|
40
|
|
|
SEARCH_PRIORITY = [ |
41
|
|
|
"name", |
42
|
|
|
"keywords" |
43
|
|
|
] |
44
|
|
|
|
45
|
|
|
LOG = logging.getLogger(__name__) |
46
|
|
|
|
47
|
|
|
|
48
|
|
|
def _build_index_list(index_url): |
49
|
|
|
if not index_url: |
50
|
|
|
# Reversing the indexes list from config so that the indexes have |
51
|
|
|
# descending (left-to-right) priority. |
52
|
|
|
# When multiple indexes have a pack with a given name, the index |
53
|
|
|
# that comes first in the list will be used. |
54
|
|
|
index_urls = cfg.CONF.content.index_url[::-1] |
55
|
|
|
elif isinstance(index_url, str): |
56
|
|
|
index_urls = [index_url] |
57
|
|
|
elif hasattr(index_url, '__iter__'): |
58
|
|
|
index_urls = index_url |
59
|
|
|
else: |
60
|
|
|
raise TypeError('"index_url" should either be a string or an iterable object.') |
61
|
|
|
return index_urls |
62
|
|
|
|
63
|
|
|
|
64
|
|
|
def _fetch_and_compile_index(index_urls, logger=None): |
65
|
|
|
""" |
66
|
|
|
Go through the index list and compile results into a single object. |
67
|
|
|
""" |
68
|
|
|
status = [] |
69
|
|
|
index = {} |
70
|
|
|
|
71
|
|
|
for index_url in index_urls: |
72
|
|
|
|
73
|
|
|
index_status = { |
74
|
|
|
'url': index_url, |
75
|
|
|
'packs': 0, |
76
|
|
|
'message': None, |
77
|
|
|
'error': None, |
78
|
|
|
} |
79
|
|
|
index_json = None |
80
|
|
|
|
81
|
|
|
try: |
82
|
|
|
request = requests.get(index_url) |
83
|
|
|
request.raise_for_status() |
84
|
|
|
index_json = request.json() |
85
|
|
|
except ValueError as e: |
86
|
|
|
index_status['error'] = 'malformed' |
87
|
|
|
index_status['message'] = repr(e) |
88
|
|
|
except requests.exceptions.RequestException as e: |
89
|
|
|
index_status['error'] = 'unresponsive' |
90
|
|
|
index_status['message'] = repr(e) |
91
|
|
|
|
92
|
|
|
if index_json == {}: |
93
|
|
|
index_status['error'] = 'empty' |
94
|
|
|
index_status['message'] = 'The index URL returned an empty object.' |
95
|
|
|
elif type(index_json) is list: |
96
|
|
|
index_status['error'] = 'malformed' |
97
|
|
|
index_status['message'] = 'Expected an index object, got a list instead.' |
98
|
|
|
|
99
|
|
|
if index_status['error']: |
100
|
|
|
logger.error("Index parsing error: %s" % json.dumps(index_status, indent=4)) |
101
|
|
|
else: |
102
|
|
|
index_status['message'] = 'Success.' |
103
|
|
|
index_status['packs'] = len(index_json) |
104
|
|
|
index.update(index_json) |
105
|
|
|
|
106
|
|
|
status.append(index_status) |
107
|
|
|
|
108
|
|
|
return index, status |
109
|
|
|
|
110
|
|
|
|
111
|
|
|
def get_pack_by_ref(pack_ref): |
112
|
|
|
""" |
113
|
|
|
Retrieve PackDB by the provided reference. |
114
|
|
|
""" |
115
|
|
|
pack_db = Pack.get_by_ref(pack_ref) |
116
|
|
|
return pack_db |
117
|
|
|
|
118
|
|
|
|
119
|
|
|
def fetch_pack_index(index_url=None, logger=None): |
120
|
|
|
""" |
121
|
|
|
Fetch the pack indexes (either from the config or provided as an argument) |
122
|
|
|
and return the object. |
123
|
|
|
""" |
124
|
|
|
logger = logger or LOG |
125
|
|
|
|
126
|
|
|
index_urls = _build_index_list(index_url) |
127
|
|
|
index, status = _fetch_and_compile_index(index_urls, logger) |
128
|
|
|
|
129
|
|
|
# If one of the indexes on the list is unresponsive, we do not throw |
130
|
|
|
# immediately. The only case where an exception is raised is when no |
131
|
|
|
# results could be obtained from all listed indexes. |
132
|
|
|
# This behavior allows for mirrors / backups and handling connection |
133
|
|
|
# or network issues in one of the indexes. |
134
|
|
|
if not index: |
135
|
|
|
raise ValueError("No results from the %s: tried %s.\nStatus: %s" % ( |
136
|
|
|
("index" if len(index_urls) == 1 else "indexes"), |
137
|
|
|
", ".join(index_urls), |
138
|
|
|
json.dumps(status, indent=4) |
139
|
|
|
)) |
140
|
|
|
return index |
141
|
|
|
|
142
|
|
|
|
143
|
|
|
def check_index_health(index_url=None, status=None, logger=None): |
144
|
|
|
""" |
145
|
|
|
Check if all listed indexes are healthy: they should be reachable, |
146
|
|
|
return valid JSON objects, and yield more than one result. |
147
|
|
|
""" |
148
|
|
|
logger = logger or LOG |
149
|
|
|
|
150
|
|
|
if not status: |
151
|
|
|
index_urls = _build_index_list(index_url) |
152
|
|
|
_, status = _fetch_and_compile_index(index_urls, logger) |
153
|
|
|
|
154
|
|
|
health = { |
155
|
|
|
"indexes": { |
156
|
|
|
"count": len(status), |
157
|
|
|
"valid": 0, |
158
|
|
|
"invalid": 0, |
159
|
|
|
"errors": {}, |
160
|
|
|
"status": status, |
161
|
|
|
}, |
162
|
|
|
"packs": { |
163
|
|
|
"count": 0, |
164
|
|
|
}, |
165
|
|
|
} |
166
|
|
|
|
167
|
|
|
for index in status: |
168
|
|
|
if index['error']: |
169
|
|
|
error_count = health['indexes']['errors'].get(index['error'], 0) + 1 |
170
|
|
|
health['indexes']['invalid'] += 1 |
171
|
|
|
health['indexes']['errors'][index['error']] = error_count |
172
|
|
|
else: |
173
|
|
|
health['indexes']['valid'] += 1 |
174
|
|
|
health['packs']['count'] += index['packs'] |
175
|
|
|
|
176
|
|
|
return health |
177
|
|
|
|
178
|
|
|
|
179
|
|
|
def get_pack_from_index(pack): |
180
|
|
|
""" |
181
|
|
|
Search index by pack name. |
182
|
|
|
Returns a pack. |
183
|
|
|
""" |
184
|
|
|
if not pack: |
185
|
|
|
raise ValueError("Pack name must be specified.") |
186
|
|
|
|
187
|
|
|
index = fetch_pack_index() |
188
|
|
|
|
189
|
|
|
return PackAPI(**index.get(pack)) |
190
|
|
|
|
191
|
|
|
|
192
|
|
|
def search_pack_index(query, exclude=None, priority=None): |
193
|
|
|
""" |
194
|
|
|
Search the pack index by query. |
195
|
|
|
Returns a list of matches for a query. |
196
|
|
|
""" |
197
|
|
|
if not query: |
198
|
|
|
raise ValueError("Query must be specified.") |
199
|
|
|
|
200
|
|
|
if not exclude: |
201
|
|
|
exclude = EXCLUDE_FIELDS |
202
|
|
|
if not priority: |
203
|
|
|
priority = SEARCH_PRIORITY |
204
|
|
|
|
205
|
|
|
index = fetch_pack_index() |
206
|
|
|
|
207
|
|
|
matches = [[] for _ in range(len(priority) + 1)] |
208
|
|
|
for pack_dict in six.itervalues(index): |
209
|
|
|
pack = PackAPI(**pack_dict) |
210
|
|
|
|
211
|
|
|
for key, value in six.iteritems(vars(pack)): |
212
|
|
|
if not hasattr(value, '__contains__'): |
213
|
|
|
value = str(value) |
214
|
|
|
|
215
|
|
|
if key not in exclude and query in value: |
216
|
|
|
if key in priority: |
217
|
|
|
matches[priority.index(key)].append(pack) |
218
|
|
|
else: |
219
|
|
|
matches[-1].append(pack) |
220
|
|
|
break |
221
|
|
|
|
222
|
|
|
return list(itertools.chain.from_iterable(matches)) |
223
|
|
|
|