Completed
Pull Request — master (#2842)
by Edward
05:45
created

search_pack_index()   F

Complexity

Conditions 11

Size

Total Lines 31

Duplication

Lines 0
Ratio 0 %

Importance

Changes 5
Bugs 0 Features 0
Metric Value
cc 11
c 5
b 0
f 0
dl 0
loc 31
rs 3.1764

How to fix   Complexity   

Complexity

Complex classes like search_pack_index() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
2
# contributor license agreements.  See the NOTICE file distributed with
3
# this work for additional information regarding copyright ownership.
4
# The ASF licenses this file to You under the Apache License, Version 2.0
5
# (the "License"); you may not use this file except in compliance with
6
# the License.  You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15
16
import itertools
17
import json
18
19
import requests
20
import six
21
from oslo_config import cfg
22
23
from st2common import log as logging
24
from st2common.models.api.pack import PackAPI
25
from st2common.persistence.pack import Pack
26
27
__all__ = [
28
    'get_pack_by_ref',
29
    'fetch_pack_index',
30
    'get_pack_from_index',
31
    'search_pack_index',
32
    'check_index_health'
33
]
34
35
EXCLUDE_FIELDS = [
36
    "repo_url",
37
    "email"
38
]
39
40
SEARCH_PRIORITY = [
41
    "name",
42
    "keywords"
43
]
44
45
LOG = logging.getLogger(__name__)
46
47
48
def _build_index_list(index_url):
49
    if not index_url:
50
        # Reversing the indexes list from config so that the indexes have
51
        # descending (left-to-right) priority.
52
        # When multiple indexes have a pack with a given name, the index
53
        # that comes first in the list will be used.
54
        index_urls = cfg.CONF.content.index_url[::-1]
55
    elif isinstance(index_url, str):
56
        index_urls = [index_url]
57
    elif hasattr(index_url, '__iter__'):
58
        index_urls = index_url
59
    else:
60
        raise TypeError('"index_url" should either be a string or an iterable object.')
61
    return index_urls
62
63
64
def _fetch_and_compile_index(index_urls, logger=None):
65
    """
66
    Go through the index list and compile results into a single object.
67
    """
68
    status = []
69
    index = {}
70
71
    for index_url in index_urls:
72
73
        index_status = {
74
            'url': index_url,
75
            'packs': 0,
76
            'message': None,
77
            'error': None,
78
        }
79
80
        try:
81
            request = requests.get(index_url)
82
            request.raise_for_status()
83
            index_json = request.json()
84
        except ValueError as e:
85
            index_status['error'] = 'malformed'
86
            index_status['message'] = e
87
        except requests.exceptions.RequestException as e:
88
            index_status['error'] = 'unresponsive'
89
            index_status['message'] = e
90
91
        if not index_json:
92
            index_status['error'] = 'empty'
93
            index_status['message'] = 'The index URL returned an empty object.'
94
        elif type(index_json) is list:
95
            index_status['error'] = 'malformed'
96
            index_status['message'] = 'Expected an index object, got a list instead.'
97
98
        if index_status['error']:
99
            logger.error("Index parsing error: %s" % json.dumps(index_status, indent=4))
100
        else:
101
            index_status['message'] = 'Success.'
102
            index_status['packs'] = len(index_json)
103
            index.update(index_json)
104
105
        status.append(index_status)
106
107
    return index, status
108
109
110
def get_pack_by_ref(pack_ref):
111
    """
112
    Retrieve PackDB by the provided reference.
113
    """
114
    pack_db = Pack.get_by_ref(pack_ref)
115
    return pack_db
116
117
118
def fetch_pack_index(index_url=None, logger=None):
119
    """
120
    Fetch the pack indexes (either from the config or provided as an argument)
121
    and return the object.
122
    """
123
    logger = logger or LOG
124
125
    index_urls = _build_index_list(index_url)
126
    index, status = _fetch_and_compile_index(index_urls, logger)
127
128
    # If one of the indexes on the list is unresponsive, we do not throw
129
    # immediately. The only case where an exception is raised is when no
130
    # results could be obtained from all listed indexes.
131
    # This behavior allows for mirrors / backups and handling connection
132
    # or network issues in one of the indexes.
133
    if not index:
134
        raise ValueError("No results from the %s: tried %s.\nStatus: %s" % (
135
            ("index" if len(index_urls) == 1 else "indexes"),
136
            ", ".join(index_urls),
137
            json.dumps(status, indent=4)
138
        ))
139
    return index
140
141
142
def check_index_health(index_url=None, status=None, logger=None):
143
    """
144
    Check if all listed indexes are healthy: they should be reachable,
145
    return valid JSON objects, and yield more than one result.
146
    """
147
    logger = logger or LOG
148
149
    if not status:
150
        index_urls = _build_index_list(index_url)
151
        _, status = _fetch_and_compile_index(index_urls, logger)
152
153
    health = {
154
        "indexes": {
155
            "count": len(status),
156
            "valid": 0,
157
            "invalid": 0,
158
            "errors": {},
159
            "status": status,
160
        },
161
        "packs": {
162
            "count": 0,
163
        },
164
    }
165
166
    for index in status:
167
        if index['error']:
168
            health['invalid'] += 1
169
            health['errors'][index['error']] = health['errors'].get(index['error'], 0) + 1
170
        else:
171
            health['valid'] += 1
172
        health['packs']['count'] += index['packs']
173
174
    return health
175
176
177
def get_pack_from_index(pack):
178
    """
179
    Search index by pack name.
180
    Returns a pack.
181
    """
182
    if not pack:
183
        raise ValueError("Pack name must be specified.")
184
185
    index = fetch_pack_index()
186
187
    return PackAPI(**index.get(pack))
188
189
190
def search_pack_index(query, exclude=None, priority=None):
191
    """
192
    Search the pack index by query.
193
    Returns a list of matches for a query.
194
    """
195
    if not query:
196
        raise ValueError("Query must be specified.")
197
198
    if not exclude:
199
        exclude = EXCLUDE_FIELDS
200
    if not priority:
201
        priority = SEARCH_PRIORITY
202
203
    index = fetch_pack_index()
204
205
    matches = [[] for _ in range(len(priority) + 1)]
206
    for pack_dict in six.itervalues(index):
207
        pack = PackAPI(**pack_dict)
208
209
        for key, value in six.iteritems(vars(pack)):
210
            if not hasattr(value, '__contains__'):
211
                value = str(value)
212
213
            if key not in exclude and query in value:
214
                if key in priority:
215
                    matches[priority.index(key)].append(pack)
216
                else:
217
                    matches[-1].append(pack)
218
                break
219
220
    return list(itertools.chain.from_iterable(matches))
221