bika.lims.jsonapi.read.read()   F
last analyzed

Complexity

Conditions 22

Size

Total Lines 111
Code Lines 84

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 84
dl 0
loc 111
rs 0
c 0
b 0
f 0
cc 22
nop 2

How to fix   Long Method    Complexity   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

Complexity

Complex classes like bika.lims.jsonapi.read.read() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
# -*- coding: utf-8 -*-
2
#
3
# This file is part of SENAITE.CORE.
4
#
5
# SENAITE.CORE is free software: you can redistribute it and/or modify it under
6
# the terms of the GNU General Public License as published by the Free Software
7
# Foundation, version 2.
8
#
9
# This program is distributed in the hope that it will be useful, but WITHOUT
10
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
12
# details.
13
#
14
# You should have received a copy of the GNU General Public License along with
15
# this program; if not, write to the Free Software Foundation, Inc., 51
16
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
#
18
# Copyright 2018-2025 by it's authors.
19
# Some rights reserved, see README and LICENSE.
20
21
import re
22
23
import App
24
from bika.lims import logger
25
from bika.lims.interfaces import IJSONReadExtender
26
from bika.lims.jsonapi import get_include_fields
27
from bika.lims.jsonapi import get_include_methods
28
from bika.lims.jsonapi import load_brain_metadata
29
from bika.lims.jsonapi import load_field_values
30
from bika.lims.jsonapi import load_method_values
31
from plone.jsonapi.core import router
32
from plone.jsonapi.core.interfaces import IRouteProvider
33
from plone.protect.authenticator import AuthenticatorView
34
from Products.CMFCore.utils import getToolByName
35
from Products.CMFPlone.utils import safe_unicode
36
from zope import interface
37
from zope.component import getAdapters
38
39
UID_CATALOG = "uid_catalog"
40
41
42
def read(context, request):
43
    tag = AuthenticatorView(context, request).authenticator()
44
    pattern = r'<input .*name="(\w+)".*value="(\w+)"'
45
    _authenticator = re.match(pattern, tag).groups()[1]
46
47
    ret = {
48
        "url": router.url_for("read", force_external=True),
49
        "success": True,
50
        "error": False,
51
        "objects": [],
52
        "_authenticator": _authenticator,
53
    }
54
    debug_mode = App.config.getConfiguration().debug_mode
55
    catalog_name = request.get("catalog_name", UID_CATALOG)
56
    if not catalog_name:
57
        raise ValueError("bad or missing catalog_name: " + catalog_name)
58
    catalog = getToolByName(context, catalog_name)
59
    indexes = catalog.indexes()
60
61
    contentFilter = {}
62
    for index in indexes:
63
        if index in request:
64
            if index == 'UID' and safe_unicode(request[index]) == "":
65
                msg = 'Request with no UID for %s catalog. Dismissing UID ' \
66
                      'while filtering' % catalog_name
67
                logger.warning(msg)
68
            if index == 'review_state' and "{" in request[index]:
69
                continue
70
            contentFilter[index] = safe_unicode(request[index])
71
        if "%s[]"%index in request:
72
            value = request["%s[]"%index]
73
            if type(value) in (list, tuple):
74
                contentFilter[index] = [safe_unicode(v) for v in value]
75
            else:
76
                contentFilter[index] = value
77
78
    if 'limit' in request:
79
        try:
80
            contentFilter['sort_limit'] = int(request["limit"])
81
        except ValueError:
82
            pass
83
    sort_on = request.get('sort_on', 'id')
84
    contentFilter['sort_on'] = sort_on
85
    # sort order
86
    sort_order = request.get('sort_order', '')
87
    if sort_order:
88
        contentFilter['sort_order'] = sort_order
89
    else:
90
        contentFilter['sort_order'] = 'ascending'
91
92
    include_fields = get_include_fields(request)
93
94
    include_methods = get_include_methods(request)
95
96
    # Get matching objects from catalog
97
    proxies = catalog(**contentFilter)
98
99
    if debug_mode:
100
        if len(proxies) == 0:
101
            logger.info("contentFilter {} returned zero objects"
102
                        .format(contentFilter))
103
        elif len(proxies) == 1:
104
            logger.info("contentFilter {} returned {} ({})".format(
105
                contentFilter, proxies[0].portal_type, proxies[0].UID))
106
        else:
107
            types = ','.join(set([p.portal_type for p in proxies]))
108
            logger.info("contentFilter {} returned {} items (types: {})"
109
                        .format(contentFilter, len(proxies), types))
110
111
    # batching items
112
    page_nr = int(request.get("page_nr", 0))
113
    try:
114
        page_size = int(request.get("page_size", 10))
115
    except ValueError:
116
        page_size = 10
117
    # page_size == 0: show all
118
    if page_size == 0:
119
        page_size = len(proxies)
120
    first_item_nr = page_size * page_nr
121
    if first_item_nr > len(proxies):
122
        first_item_nr = 0
123
    page_proxies = proxies[first_item_nr:first_item_nr + page_size]
124
    for proxy in page_proxies:
125
        obj_data = {}
126
127
        # Place all proxy attributes into the result.
128
        obj_data.update(load_brain_metadata(proxy, include_fields))
129
130
        # Place all schema fields ino the result.
131
        obj = proxy.getObject()
132
        obj_data.update(load_field_values(obj, include_fields))
133
        # Add methods results
134
        obj_data.update(load_method_values(obj, include_methods))
135
136
        obj_data['path'] = "/".join(obj.getPhysicalPath())
137
138
        # call any adapters that care to modify this data.
139
        adapters = getAdapters((obj, ), IJSONReadExtender)
140
        for name, adapter in adapters:
141
            adapter(request, obj_data)
142
143
        ret['objects'].append(obj_data)
144
145
    ret['total_objects'] = len(proxies)
146
    ret['first_object_nr'] = first_item_nr
147
    last_object_nr = first_item_nr + len(page_proxies)
148
    if last_object_nr > ret['total_objects']:
149
        last_object_nr = ret['total_objects']
150
    ret['last_object_nr'] = last_object_nr
151
152
    return ret
153
154
155
class Read(object):
156
    interface.implements(IRouteProvider)
157
158
    def initialize(self, context, request):
159
        pass
160
161
    @property
162
    def routes(self):
163
        return (
164
            ("/read", "read", self.read, dict(methods=['GET', 'POST'])),
165
        )
166
167
    def read(self, context, request):
168
        """/@@API/read: Search the catalog and return data for all objects found
169
170
        Optional parameters:
171
172
            - catalog_name: uses portal_catalog if unspecified
173
            - limit  default=1
174
            - All catalog indexes are searched for in the request.
175
176
        {
177
            runtime: Function running time.
178
            error: true or string(message) if error. false if no error.
179
            success: true or string(message) if success. false if no success.
180
            objects: list of dictionaries, containing catalog metadata
181
        }
182
        """
183
184
        return read(context, request)
185