|
1
|
|
|
import uuid |
|
2
|
|
|
import json |
|
3
|
|
|
import xmlrpclib |
|
4
|
|
|
import math |
|
5
|
|
|
import logging |
|
6
|
|
|
|
|
7
|
|
|
from django.db import models |
|
8
|
|
|
from django.db.models.signals import post_save |
|
9
|
|
|
from django.dispatch import receiver |
|
10
|
|
|
from django.core.mail import mail_managers |
|
11
|
|
|
from django.http import HttpResponse |
|
12
|
|
|
from django.conf import settings |
|
13
|
|
|
|
|
14
|
|
|
from .graph import Graph |
|
15
|
|
|
from .node import Node |
|
16
|
|
|
from .configuration import Configuration |
|
17
|
|
|
from .node_configuration import NodeConfiguration |
|
18
|
|
|
from .result import Result |
|
19
|
|
|
from ore.models import xml_backend |
|
20
|
|
|
from ore.middleware import HttpResponseServerErrorAnswer |
|
21
|
|
|
from .xml_configurations import FeatureChoice, InclusionChoice, RedundancyChoice |
|
22
|
|
|
from .xml_backend import AnalysisResult, MincutResult, SimulationResult |
|
23
|
|
|
|
|
24
|
|
|
|
|
25
|
|
|
logger = logging.getLogger('ore') |
|
26
|
|
|
|
|
27
|
|
|
|
|
28
|
|
|
class NativeXmlField(models.Field): |
|
29
|
|
|
|
|
30
|
|
|
def db_type(self, connection): |
|
31
|
|
|
return 'xml' |
|
32
|
|
|
|
|
33
|
|
|
|
|
34
|
|
|
def gen_uuid(): |
|
35
|
|
|
return str(uuid.uuid4()) |
|
36
|
|
|
|
|
37
|
|
|
|
|
38
|
|
|
class Job(models.Model): |
|
39
|
|
|
|
|
40
|
|
|
class Meta: |
|
41
|
|
|
app_label = 'ore' |
|
42
|
|
|
|
|
43
|
|
|
MINCUT_JOB = 'mincut' |
|
44
|
|
|
TOP_EVENT_JOB = 'topevent' |
|
45
|
|
|
SIMULATION_JOB = 'simulation' |
|
46
|
|
|
EPS_RENDERING_JOB = 'eps' |
|
47
|
|
|
PDF_RENDERING_JOB = 'pdf' |
|
48
|
|
|
|
|
49
|
|
|
JOB_TYPES = ( |
|
50
|
|
|
(MINCUT_JOB, 'Cutset computation'), |
|
51
|
|
|
(TOP_EVENT_JOB, 'Top event calculation (analytical)'), |
|
52
|
|
|
(SIMULATION_JOB, 'Top event calculation (simulation)'), |
|
53
|
|
|
(EPS_RENDERING_JOB, 'EPS rendering job'), |
|
54
|
|
|
(PDF_RENDERING_JOB, 'PDF rendering job') |
|
55
|
|
|
) |
|
56
|
|
|
|
|
57
|
|
|
graph = models.ForeignKey(Graph, null=True, related_name='jobs') |
|
58
|
|
|
# Detect graph changes during job execution |
|
59
|
|
|
graph_modified = models.DateTimeField() |
|
60
|
|
|
secret = models.CharField( |
|
61
|
|
|
max_length=64, |
|
62
|
|
|
default=gen_uuid) # Unique secret for this job |
|
63
|
|
|
kind = models.CharField(max_length=127, choices=JOB_TYPES) |
|
64
|
|
|
created = models.DateTimeField(auto_now_add=True, editable=False) |
|
65
|
|
|
# Exit code for this job, NULL if pending |
|
66
|
|
|
exit_code = models.IntegerField(null=True) |
|
67
|
|
|
|
|
68
|
|
|
def input_data(self): |
|
69
|
|
|
''' Used by the API to get the input data needed for the particular job type.''' |
|
70
|
|
|
if self.kind in ( |
|
71
|
|
|
Job.MINCUT_JOB, Job.TOP_EVENT_JOB, Job.SIMULATION_JOB): |
|
72
|
|
|
return self.graph.to_xml(), 'application/xml' |
|
73
|
|
|
elif self.kind in (Job.EPS_RENDERING_JOB, Job.PDF_RENDERING_JOB): |
|
74
|
|
|
return self.graph.to_tikz(), 'application/text' |
|
75
|
|
|
assert (False) |
|
76
|
|
|
|
|
77
|
|
|
def done(self): |
|
78
|
|
|
return self.exit_code is not None |
|
79
|
|
|
|
|
80
|
|
|
@property |
|
81
|
|
|
def requires_download(self): |
|
82
|
|
|
""" |
|
83
|
|
|
Indicates if the result should be delivered directly to the frontend |
|
84
|
|
|
as file, or if it must be preprocessed with self.result_rendering(). |
|
85
|
|
|
""" |
|
86
|
|
|
return self.kind in [Job.EPS_RENDERING_JOB, Job.PDF_RENDERING_JOB] |
|
87
|
|
|
|
|
88
|
|
|
@property |
|
89
|
|
|
def result_titles(self): |
|
90
|
|
|
''' |
|
91
|
|
|
The result class knows how the titles should look like. |
|
92
|
|
|
''' |
|
93
|
|
|
if self.kind == self.TOP_EVENT_JOB: |
|
94
|
|
|
return Result.titles(Result.ANALYSIS_RESULT, self.graph.kind) |
|
95
|
|
|
elif self.kind == self.SIMULATION_JOB: |
|
96
|
|
|
return Result.titles(Result.SIMULATION_RESULT, self.graph.kind) |
|
97
|
|
|
elif self.kind == self.MINCUT_JOB: |
|
98
|
|
|
return Result.titles(Result.MINCUT_RESULT, self.graph.kind) |
|
99
|
|
|
|
|
100
|
|
|
def static_info(self): |
|
101
|
|
|
''' |
|
102
|
|
|
Provides a static info string for the result that is independent from frontend parsing. |
|
103
|
|
|
This is mainly a debugging vehicle. |
|
104
|
|
|
''' |
|
105
|
|
|
# raw_results = [str(result.to_dict()) for result in self.results.all()] |
|
106
|
|
|
# return "Raw result information:<br/>"+"<br/>".join(raw_results) |
|
107
|
|
|
|
|
108
|
|
|
def axis_titles(self): |
|
109
|
|
|
''' |
|
110
|
|
|
Computes labeling and axis scales for the analysis results menu. |
|
111
|
|
|
Descriptions of configurations values for 'xAxis' and 'yAxis' can be taken from the official Highcharts api. |
|
112
|
|
|
''' |
|
113
|
|
|
axis_titles = { |
|
114
|
|
|
'xAxis': { |
|
115
|
|
|
'min': -0.05, |
|
116
|
|
|
'max': 1.05, |
|
117
|
|
|
'title': { |
|
118
|
|
|
'text': None, # 'x title', |
|
119
|
|
|
'style': { |
|
120
|
|
|
'fontSize': '9px' |
|
121
|
|
|
} |
|
122
|
|
|
}, |
|
123
|
|
|
'tickInterval': 0.2 |
|
124
|
|
|
}, |
|
125
|
|
|
'yAxis': { |
|
126
|
|
|
'min': 0, |
|
127
|
|
|
'max': 1.0, |
|
128
|
|
|
'title': { |
|
129
|
|
|
'text': None, # 'y title', |
|
130
|
|
|
'style': { |
|
131
|
|
|
'fontSize': '9px' |
|
132
|
|
|
} |
|
133
|
|
|
}, |
|
134
|
|
|
'tickInterval': 1.0, |
|
135
|
|
|
'minorTickInterval': 1.0 / 10 |
|
136
|
|
|
} |
|
137
|
|
|
} |
|
138
|
|
|
|
|
139
|
|
|
return axis_titles |
|
140
|
|
|
|
|
141
|
|
|
@classmethod |
|
142
|
|
|
def exists_with_result(cls, graph, kind): |
|
143
|
|
|
''' |
|
144
|
|
|
Return an existing job object for that graph and job kind, but only |
|
145
|
|
|
if it was computed on the same graph data and has existing results. |
|
146
|
|
|
|
|
147
|
|
|
Theoretically, there is only one cached job left, since the new creation |
|
148
|
|
|
leads to deletion of old versions. We anyway prepare for the case of |
|
149
|
|
|
having multiple cached old results, by just using the youngest one. |
|
150
|
|
|
''' |
|
151
|
|
|
return None |
|
152
|
|
|
# TODO: The cached job fetching seems to fail in the API part, test again heavily and re-enable then |
|
153
|
|
|
# try: |
|
154
|
|
|
# return Job.objects.filter( |
|
155
|
|
|
# graph=graph, kind=kind, graph_modified=graph.modified, exit_code=0).order_by('-created')[0] |
|
156
|
|
|
# except: |
|
157
|
|
|
# return None |
|
158
|
|
|
|
|
159
|
|
|
def result_download(self): |
|
160
|
|
|
""" |
|
161
|
|
|
Returns an HttpResponse as direct file download of the result data. |
|
162
|
|
|
""" |
|
163
|
|
|
response = HttpResponse() |
|
164
|
|
|
response[ |
|
165
|
|
|
'Content-Disposition'] = 'attachment; filename=graph%u.%s' % (self.graph.pk, self.kind) |
|
166
|
|
|
response.content = Result.objects.exclude( |
|
167
|
|
|
kind=Result.GRAPH_ISSUES).get( |
|
168
|
|
|
job=self).binary_value |
|
169
|
|
|
response[ |
|
170
|
|
|
'Content-Type'] = 'application/pdf' if self.kind == 'pdf' else 'application/postscript' |
|
171
|
|
|
return response |
|
172
|
|
|
|
|
173
|
|
|
def interpret_issues(self, xml_issues): |
|
174
|
|
|
""" |
|
175
|
|
|
Interpret the incoming list of issues and convert to feasible JSON for storage. |
|
176
|
|
|
""" |
|
177
|
|
|
errors = [] |
|
178
|
|
|
warnings = [] |
|
179
|
|
|
for issue in xml_issues: |
|
180
|
|
|
json_issue = {'message': issue.message, |
|
181
|
|
|
'issueId': issue.issueId, |
|
182
|
|
|
'elementId': issue.elementId} |
|
183
|
|
|
if issue.isFatal: |
|
184
|
|
|
errors.append(json_issue) |
|
185
|
|
|
else: |
|
186
|
|
|
warnings.append(json_issue) |
|
187
|
|
|
return {'errors': errors, 'warnings': warnings} |
|
188
|
|
|
|
|
189
|
|
|
def interpret_value(self, xml_result_value, db_result): |
|
190
|
|
|
""" |
|
191
|
|
|
Interpret the incoming result value and convert it to feasible JSON for storage. |
|
192
|
|
|
|
|
193
|
|
|
Fuzzy probability values as result are given for each alpha cut. Putting |
|
194
|
|
|
all the different values together forms a triangular membership function. |
|
195
|
|
|
Crisp probabilities are just a special case of this, were the membership |
|
196
|
|
|
function collapses to a straight vertical line. |
|
197
|
|
|
|
|
198
|
|
|
The method determines both a list of drawable diagram coordinates, |
|
199
|
|
|
and the result values to be shown directly to the user. |
|
200
|
|
|
|
|
201
|
|
|
Diagram point determination: |
|
202
|
|
|
|
|
203
|
|
|
The X axis represents the unreliability value (== probability of failure), |
|
204
|
|
|
the Y axis the membership function probability value for the given unreliability value. |
|
205
|
|
|
For each alpha cut, the backend returns us the points were the upper border of the |
|
206
|
|
|
alphacut stripe is crossing the membership triangle. |
|
207
|
|
|
The lowest alphacut (0) has its upper border directly on the X axis. |
|
208
|
|
|
The highest alphacut has its upper border crossing the tip of the membership function. |
|
209
|
|
|
|
|
210
|
|
|
The two points were the alphacut border touches the membership function are called |
|
211
|
|
|
"[lower, upper]", the number of the alphacut is the "key". |
|
212
|
|
|
For this reason, "lower" and "upper" are used as X coordinates, |
|
213
|
|
|
while the key is used as "Y" coordinate. |
|
214
|
|
|
|
|
215
|
|
|
""" |
|
216
|
|
|
if hasattr(xml_result_value, |
|
217
|
|
|
'probability') and xml_result_value.probability is not None: |
|
218
|
|
|
points = [] |
|
219
|
|
|
logging.debug("Probability: " + str(xml_result_value.probability)) |
|
220
|
|
|
# we don't believe the delivered decomp_number |
|
221
|
|
|
alphacut_count = len(xml_result_value.probability.alphaCuts) |
|
222
|
|
|
for alpha_cut in xml_result_value.probability.alphaCuts: |
|
223
|
|
|
# Alphacut indexes start at zero |
|
224
|
|
|
y_val = alpha_cut.key + 1 / alphacut_count |
|
225
|
|
|
assert (0 <= y_val <= 1) |
|
226
|
|
|
points.append([alpha_cut.value_.lowerBound, y_val]) |
|
227
|
|
|
if alpha_cut.value_.upperBound != alpha_cut.value_.lowerBound: |
|
228
|
|
|
points.append([alpha_cut.value_.upperBound, y_val]) |
|
229
|
|
|
else: |
|
230
|
|
|
# This is the tip of the triangle. |
|
231
|
|
|
# If this is a crisp probability, then there is only the point above added. |
|
232
|
|
|
# In this case, add another fake point to draw a strisaght line. |
|
233
|
|
|
# points.append([alpha_cut.value_.lowerBound, 0]) |
|
234
|
|
|
pass |
|
235
|
|
|
|
|
236
|
|
|
# Points is now a wild collection of coordinates, were double values for the same X |
|
237
|
|
|
# coordinate may occur. We sort it (since the JS code likes that) and leave only the |
|
238
|
|
|
# largest Y values per X value. |
|
239
|
|
|
|
|
240
|
|
|
# If we have only one point, it makes no sense to draw a graph |
|
241
|
|
|
# TODO: Instead, we could draw a nice exponential curve for the resulting rate parameter |
|
242
|
|
|
# This demands some better support for feeding the frontend graph |
|
243
|
|
|
# rendering (Axis range etc.) |
|
244
|
|
|
if alphacut_count > 1: |
|
245
|
|
|
db_result.points = json.dumps(sorted(points)) |
|
246
|
|
|
|
|
247
|
|
|
# Compute some additional statistics for the front-end, based on |
|
248
|
|
|
# the gathered probabilities |
|
249
|
|
|
if len(points) > 0: |
|
250
|
|
|
db_result.minimum = min( |
|
251
|
|
|
points, |
|
252
|
|
|
key=lambda point: point[0])[0] # left triangle border position |
|
253
|
|
|
db_result.maximum = max( |
|
254
|
|
|
points, |
|
255
|
|
|
key=lambda point: point[0])[0] # right triangle border position |
|
256
|
|
|
db_result.peak = max( |
|
257
|
|
|
points, |
|
258
|
|
|
key=lambda point: point[1])[0] # triangle tip position |
|
259
|
|
|
|
|
260
|
|
|
if hasattr(xml_result_value, |
|
261
|
|
|
'reliability') and xml_result_value.reliability is not None: |
|
262
|
|
|
reliability = float(xml_result_value.reliability) |
|
263
|
|
|
db_result.reliability = None if math.isnan( |
|
264
|
|
|
reliability) else reliability |
|
265
|
|
|
|
|
266
|
|
|
if hasattr(xml_result_value, |
|
267
|
|
|
'mttf') and xml_result_value.mttf is not None: |
|
268
|
|
|
mttf = float(xml_result_value.mttf) |
|
269
|
|
|
db_result.mttf = None if math.isnan(mttf) else mttf |
|
270
|
|
|
|
|
271
|
|
|
if hasattr( |
|
272
|
|
|
xml_result_value, 'nSimulatedRounds') and xml_result_value.nSimulatedRounds is not None: |
|
273
|
|
|
rounds = int(xml_result_value.nSimulatedRounds) |
|
274
|
|
|
db_result.rounds = None if math.isnan(rounds) else rounds |
|
275
|
|
|
|
|
276
|
|
|
if hasattr(xml_result_value, |
|
277
|
|
|
'nFailures') and xml_result_value.nFailures is not None: |
|
278
|
|
|
failures = int(xml_result_value.nFailures) |
|
279
|
|
|
db_result.failures = None if math.isnan(failures) else failures |
|
280
|
|
|
|
|
281
|
|
|
if hasattr(xml_result_value, |
|
282
|
|
|
'timestamp') and xml_result_value.timestamp is not None: |
|
283
|
|
|
timestamp = int(xml_result_value.timestamp) |
|
284
|
|
|
db_result.timestamp = None if math.isnan(timestamp) else timestamp |
|
285
|
|
|
else: |
|
286
|
|
|
# All analysis results not refering to a particular timestamp refer |
|
287
|
|
|
# to the configured missionTime |
|
288
|
|
|
top_node = db_result.graph.top_node() |
|
289
|
|
|
if top_node: |
|
290
|
|
|
timestamp = top_node.get_property('missionTime') |
|
291
|
|
|
db_result.timestamp = None if math.isnan( |
|
292
|
|
|
timestamp) else timestamp |
|
293
|
|
|
|
|
294
|
|
|
def parse_result(self, data): |
|
295
|
|
|
""" |
|
296
|
|
|
Parses the result data and saves the content to the database, |
|
297
|
|
|
in relation to this job. |
|
298
|
|
|
""" |
|
299
|
|
|
if self.requires_download: |
|
300
|
|
|
if self.kind == self.PDF_RENDERING_JOB: |
|
301
|
|
|
old_results = self.results.filter( |
|
302
|
|
|
graph=self.graph, |
|
303
|
|
|
kind=Result.PDF_RESULT) |
|
304
|
|
|
old_results.delete() |
|
305
|
|
|
db_result = Result( |
|
306
|
|
|
graph=self.graph, |
|
307
|
|
|
job=self, |
|
308
|
|
|
kind=Result.PDF_RESULT) |
|
309
|
|
|
elif self.kind == self.EPS_RENDERING_JOB: |
|
310
|
|
|
old_results = self.results.filter( |
|
311
|
|
|
graph=self.graph, |
|
312
|
|
|
kind=Result.EPS_RESULT) |
|
313
|
|
|
old_results.delete() |
|
314
|
|
|
db_result = Result( |
|
315
|
|
|
graph=self.graph, |
|
316
|
|
|
job=self, |
|
317
|
|
|
kind=Result.EPS_RESULT) |
|
318
|
|
|
db_result.binary_value = data |
|
|
|
|
|
|
319
|
|
|
db_result.save() |
|
320
|
|
|
return |
|
321
|
|
|
|
|
322
|
|
|
# Ok, it is not binary, it is true XML result data |
|
323
|
|
|
print str(data) |
|
324
|
|
|
logger.debug( |
|
325
|
|
|
"Parsing backend result XML into database: \n" + |
|
326
|
|
|
str(data)) |
|
327
|
|
|
doc = xml_backend.CreateFromDocument(str(data)) |
|
328
|
|
|
|
|
329
|
|
|
# Delete old graph issues from a former analysis run |
|
330
|
|
|
self.graph.delete_results(kind=Result.GRAPH_ISSUES) |
|
331
|
|
|
|
|
332
|
|
|
if hasattr(doc, 'issue'): |
|
333
|
|
|
# Result-independent issues (for the whole graph, and not per configuration), |
|
334
|
|
|
# are saved as special kind of result |
|
335
|
|
|
db_result = Result( |
|
336
|
|
|
graph=self.graph, |
|
337
|
|
|
job=self, |
|
338
|
|
|
kind=Result.GRAPH_ISSUES) |
|
339
|
|
|
db_result.issues = json.dumps(self.interpret_issues(doc.issue)) |
|
340
|
|
|
db_result.save() |
|
341
|
|
|
|
|
342
|
|
|
conf_id_mappings = {} # XML conf ID's to DB conf ID's |
|
343
|
|
|
|
|
344
|
|
|
if hasattr(doc, 'configuration'): |
|
345
|
|
|
# Throw away existing configurations information |
|
346
|
|
|
self.graph.delete_configurations() |
|
347
|
|
|
# walk through all the configurations determined by the backend, as shown in the XML |
|
348
|
|
|
# Node configurations can be bulk-inserted, since nobody links to them |
|
349
|
|
|
# The expensive looped Configuration object creation cannot be bulk-inserted, |
|
350
|
|
|
# since we need their pk's in the NodeCOnfiguration object |
|
351
|
|
|
db_nodeconfs = [] |
|
352
|
|
|
for configuration in doc.configuration: |
|
353
|
|
|
db_conf = Configuration( |
|
354
|
|
|
graph=self.graph, |
|
355
|
|
|
costs=configuration.costs if hasattr( |
|
356
|
|
|
configuration, |
|
357
|
|
|
'costs') else None) |
|
358
|
|
|
db_conf.save() |
|
359
|
|
|
conf_id_mappings[configuration.id] = db_conf |
|
360
|
|
|
logger.debug( |
|
361
|
|
|
"Storing DB configuration %u for XML configuration %s in graph %u" % |
|
362
|
|
|
(db_conf.pk, configuration.id, self.graph.pk)) |
|
363
|
|
|
# Analyze node configuration choices in this configuration |
|
364
|
|
|
assert( |
|
365
|
|
|
hasattr( |
|
366
|
|
|
configuration, |
|
367
|
|
|
'choice')) # according to XSD, this must be given |
|
368
|
|
|
for choice in configuration.choice: |
|
369
|
|
|
element = choice.value_ |
|
370
|
|
|
json_choice = {} |
|
371
|
|
|
if isinstance(element, FeatureChoice): |
|
372
|
|
|
json_choice['type'] = 'FeatureChoice' |
|
373
|
|
|
json_choice['featureId'] = element.featureId |
|
374
|
|
|
elif isinstance(element, InclusionChoice): |
|
375
|
|
|
json_choice['type'] = 'InclusionChoice' |
|
376
|
|
|
json_choice['included'] = element.included |
|
377
|
|
|
elif isinstance(element, RedundancyChoice): |
|
378
|
|
|
json_choice['type'] = 'RedundancyChoice' |
|
379
|
|
|
json_choice['n'] = int(element.n) |
|
380
|
|
|
else: |
|
381
|
|
|
raise ValueError('Unknown choice %s' % element) |
|
382
|
|
|
db_node = Node.objects.get( |
|
383
|
|
|
client_id=choice.key, |
|
384
|
|
|
graph=self.graph) |
|
385
|
|
|
db_nodeconf = NodeConfiguration( |
|
386
|
|
|
node=db_node, |
|
387
|
|
|
configuration=db_conf, |
|
388
|
|
|
setting=json.dumps(json_choice)) |
|
389
|
|
|
db_nodeconfs.append(db_nodeconf) |
|
390
|
|
|
logger.debug("Performing bulk insert of node configurations") |
|
391
|
|
|
NodeConfiguration.objects.bulk_create(db_nodeconfs) |
|
392
|
|
|
|
|
393
|
|
|
if hasattr(doc, 'result'): |
|
394
|
|
|
# Remove earlier results of the same kind |
|
395
|
|
|
if self.kind == self.TOP_EVENT_JOB: |
|
396
|
|
|
self.graph.delete_results(kind=Result.ANALYSIS_RESULT) |
|
397
|
|
|
elif self.kind == self.SIMULATION_JOB: |
|
398
|
|
|
self.graph.delete_results(kind=Result.SIMULATION_RESULT) |
|
399
|
|
|
elif self.kind == self.MINCUT_JOB: |
|
400
|
|
|
self.graph.delete_results(kind=Result.MINCUT_RESULT) |
|
401
|
|
|
db_results = [] |
|
402
|
|
|
for result in doc.result: |
|
403
|
|
|
assert(int(result.modelId) == self.graph.pk) |
|
404
|
|
|
db_result = Result(graph=self.graph, job=self) |
|
405
|
|
|
if result.configId in conf_id_mappings: |
|
406
|
|
|
db_result.configuration = conf_id_mappings[result.configId] |
|
407
|
|
|
if isinstance(result, AnalysisResult): |
|
408
|
|
|
db_result.kind = Result.ANALYSIS_RESULT |
|
409
|
|
|
elif isinstance(result, MincutResult): |
|
410
|
|
|
db_result.kind = Result.MINCUT_RESULT |
|
411
|
|
|
elif isinstance(result, SimulationResult): |
|
412
|
|
|
db_result.kind = Result.SIMULATION_RESULT |
|
413
|
|
|
self.interpret_value(result, db_result) |
|
414
|
|
|
if result.issue: |
|
415
|
|
|
db_result.issues = json.dumps( |
|
416
|
|
|
self.interpret_issues( |
|
417
|
|
|
result.issue)) |
|
418
|
|
|
db_results.append(db_result) |
|
419
|
|
|
logger.debug("Performing bulk insert of parsed results") |
|
420
|
|
|
Result.objects.bulk_create(db_results) |
|
421
|
|
|
|
|
422
|
|
|
|
|
423
|
|
|
@receiver(post_save, sender=Job) |
|
424
|
|
|
def job_post_save(sender, instance, created, **kwargs): |
|
425
|
|
|
''' Informs notification listeners. |
|
426
|
|
|
The payload contains the job URL prefix with a secret, |
|
427
|
|
|
which allows the listener to perform according actions. |
|
428
|
|
|
''' |
|
429
|
|
|
if created: |
|
430
|
|
|
# The only way to determine our own hostname + port number at runtime in Django |
|
431
|
|
|
# is from an HttpRequest object, which we do not have here. |
|
432
|
|
|
# Option 1 is to fetch this information from the HttpRequest and somehow move it here. |
|
433
|
|
|
# This works nice as long as LiveServerTestCase is not used, since the Django Test |
|
434
|
|
|
# Client still accesses the http://testserver URL and not the live server URL. |
|
435
|
|
|
# We therefore take the static approach with a setting here, which is overriden |
|
436
|
|
|
# by the test suite run accordingly |
|
437
|
|
|
|
|
438
|
|
|
# TODO: Use reverse() for this |
|
439
|
|
|
job_url = settings.SERVER + '/api/back/jobs/' + instance.secret |
|
440
|
|
|
|
|
441
|
|
|
try: |
|
442
|
|
|
# The proxy is instantiated here, since the connection should go |
|
443
|
|
|
# away when finished |
|
444
|
|
|
s = xmlrpclib.ServerProxy(settings.BACKEND_DAEMON) |
|
445
|
|
|
logger.debug( |
|
446
|
|
|
"Triggering %s job available through url %s" % |
|
447
|
|
|
(instance.kind, job_url)) |
|
448
|
|
|
s.start_job(instance.kind, job_url) |
|
449
|
|
|
except Exception as e: |
|
450
|
|
|
mail_managers( |
|
451
|
|
|
"Exception on backend call - " + |
|
452
|
|
|
settings.BACKEND_DAEMON, |
|
453
|
|
|
str(e)) |
|
454
|
|
|
raise HttpResponseServerErrorAnswer( |
|
455
|
|
|
"Sorry, we seem to have a problem with our ORE backend. The admins are informed, thanks for the patience.") |
|
456
|
|
|
|