Conditions | 22 |
Total Lines | 111 |
Code Lines | 84 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like bika.lims.jsonapi.read.read() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | # -*- coding: utf-8 -*- |
||
42 | def read(context, request): |
||
43 | tag = AuthenticatorView(context, request).authenticator() |
||
44 | pattern = r'<input .*name="(\w+)".*value="(\w+)"' |
||
45 | _authenticator = re.match(pattern, tag).groups()[1] |
||
46 | |||
47 | ret = { |
||
48 | "url": router.url_for("read", force_external=True), |
||
49 | "success": True, |
||
50 | "error": False, |
||
51 | "objects": [], |
||
52 | "_authenticator": _authenticator, |
||
53 | } |
||
54 | debug_mode = App.config.getConfiguration().debug_mode |
||
55 | catalog_name = request.get("catalog_name", UID_CATALOG) |
||
56 | if not catalog_name: |
||
57 | raise ValueError("bad or missing catalog_name: " + catalog_name) |
||
58 | catalog = getToolByName(context, catalog_name) |
||
59 | indexes = catalog.indexes() |
||
60 | |||
61 | contentFilter = {} |
||
62 | for index in indexes: |
||
63 | if index in request: |
||
64 | if index == 'UID' and safe_unicode(request[index]) == "": |
||
65 | msg = 'Request with no UID for %s catalog. Dismissing UID ' \ |
||
66 | 'while filtering' % catalog_name |
||
67 | logger.warning(msg) |
||
68 | if index == 'review_state' and "{" in request[index]: |
||
69 | continue |
||
70 | contentFilter[index] = safe_unicode(request[index]) |
||
71 | if "%s[]"%index in request: |
||
72 | value = request["%s[]"%index] |
||
73 | if type(value) in (list, tuple): |
||
74 | contentFilter[index] = [safe_unicode(v) for v in value] |
||
75 | else: |
||
76 | contentFilter[index] = value |
||
77 | |||
78 | if 'limit' in request: |
||
79 | try: |
||
80 | contentFilter['sort_limit'] = int(request["limit"]) |
||
81 | except ValueError: |
||
82 | pass |
||
83 | sort_on = request.get('sort_on', 'id') |
||
84 | contentFilter['sort_on'] = sort_on |
||
85 | # sort order |
||
86 | sort_order = request.get('sort_order', '') |
||
87 | if sort_order: |
||
88 | contentFilter['sort_order'] = sort_order |
||
89 | else: |
||
90 | contentFilter['sort_order'] = 'ascending' |
||
91 | |||
92 | include_fields = get_include_fields(request) |
||
93 | |||
94 | include_methods = get_include_methods(request) |
||
95 | |||
96 | # Get matching objects from catalog |
||
97 | proxies = catalog(**contentFilter) |
||
98 | |||
99 | if debug_mode: |
||
100 | if len(proxies) == 0: |
||
101 | logger.info("contentFilter {} returned zero objects" |
||
102 | .format(contentFilter)) |
||
103 | elif len(proxies) == 1: |
||
104 | logger.info("contentFilter {} returned {} ({})".format( |
||
105 | contentFilter, proxies[0].portal_type, proxies[0].UID)) |
||
106 | else: |
||
107 | types = ','.join(set([p.portal_type for p in proxies])) |
||
108 | logger.info("contentFilter {} returned {} items (types: {})" |
||
109 | .format(contentFilter, len(proxies), types)) |
||
110 | |||
111 | # batching items |
||
112 | page_nr = int(request.get("page_nr", 0)) |
||
113 | try: |
||
114 | page_size = int(request.get("page_size", 10)) |
||
115 | except ValueError: |
||
116 | page_size = 10 |
||
117 | # page_size == 0: show all |
||
118 | if page_size == 0: |
||
119 | page_size = len(proxies) |
||
120 | first_item_nr = page_size * page_nr |
||
121 | if first_item_nr > len(proxies): |
||
122 | first_item_nr = 0 |
||
123 | page_proxies = proxies[first_item_nr:first_item_nr + page_size] |
||
124 | for proxy in page_proxies: |
||
125 | obj_data = {} |
||
126 | |||
127 | # Place all proxy attributes into the result. |
||
128 | obj_data.update(load_brain_metadata(proxy, include_fields)) |
||
129 | |||
130 | # Place all schema fields ino the result. |
||
131 | obj = proxy.getObject() |
||
132 | obj_data.update(load_field_values(obj, include_fields)) |
||
133 | # Add methods results |
||
134 | obj_data.update(load_method_values(obj, include_methods)) |
||
135 | |||
136 | obj_data['path'] = "/".join(obj.getPhysicalPath()) |
||
137 | |||
138 | # call any adapters that care to modify this data. |
||
139 | adapters = getAdapters((obj, ), IJSONReadExtender) |
||
140 | for name, adapter in adapters: |
||
141 | adapter(request, obj_data) |
||
142 | |||
143 | ret['objects'].append(obj_data) |
||
144 | |||
145 | ret['total_objects'] = len(proxies) |
||
146 | ret['first_object_nr'] = first_item_nr |
||
147 | last_object_nr = first_item_nr + len(page_proxies) |
||
148 | if last_object_nr > ret['total_objects']: |
||
149 | last_object_nr = ret['total_objects'] |
||
150 | ret['last_object_nr'] = last_object_nr |
||
151 | |||
152 | return ret |
||
153 | |||
185 |