|
1
|
|
|
package it.cnr.istc.pst.platinum.ai.deliberative.strategy; |
|
2
|
|
|
|
|
3
|
|
|
import java.io.BufferedWriter; |
|
4
|
|
|
import java.io.File; |
|
5
|
|
|
import java.io.FileOutputStream; |
|
6
|
|
|
import java.io.OutputStreamWriter; |
|
7
|
|
|
import java.util.Comparator; |
|
8
|
|
|
import java.util.HashMap; |
|
9
|
|
|
import java.util.List; |
|
10
|
|
|
import java.util.Map; |
|
11
|
|
|
import java.util.NoSuchElementException; |
|
12
|
|
|
import java.util.PriorityQueue; |
|
13
|
|
|
import java.util.Queue; |
|
14
|
|
|
import java.util.Set; |
|
15
|
|
|
|
|
16
|
|
|
import org.bson.Document; |
|
17
|
|
|
|
|
18
|
|
|
import com.mongodb.client.MongoClient; |
|
19
|
|
|
import com.mongodb.client.MongoClients; |
|
20
|
|
|
import com.mongodb.client.MongoCollection; |
|
21
|
|
|
import com.mongodb.client.MongoDatabase; |
|
22
|
|
|
|
|
23
|
|
|
import it.cnr.istc.pst.platinum.ai.deliberative.solver.SearchSpaceNode; |
|
24
|
|
|
import it.cnr.istc.pst.platinum.ai.deliberative.strategy.ex.EmptyFringeException; |
|
25
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.domain.component.ComponentValue; |
|
26
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.domain.component.DomainComponent; |
|
27
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.domain.component.PlanDataBase; |
|
28
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.domain.knowledge.DomainKnowledge; |
|
29
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.microkernel.FrameworkObject; |
|
30
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.microkernel.annotation.inject.framework.PlanDataBasePlaceholder; |
|
31
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.microkernel.annotation.lifecycle.PostConstruct; |
|
32
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.microkernel.lang.flaw.Flaw; |
|
33
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.microkernel.lang.flaw.FlawType; |
|
34
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.microkernel.resolver.plan.Goal; |
|
35
|
|
|
import it.cnr.istc.pst.platinum.ai.framework.utils.properties.FilePropertyReader; |
|
36
|
|
|
|
|
37
|
|
|
/** |
|
38
|
|
|
* |
|
39
|
|
|
* @author alessandro |
|
40
|
|
|
* |
|
41
|
|
|
*/ |
|
42
|
|
|
public abstract class SearchStrategy extends FrameworkObject implements Comparator<SearchSpaceNode> { |
|
43
|
|
|
|
|
44
|
|
|
@PlanDataBasePlaceholder |
|
45
|
|
|
protected PlanDataBase pdb; // reference to plan data-base |
|
46
|
|
|
|
|
47
|
|
|
protected Queue<SearchSpaceNode> fringe; // the fringe of the search space |
|
48
|
|
|
|
|
49
|
|
|
protected String label; // strategy label |
|
50
|
|
|
|
|
51
|
|
|
protected Map<ComponentValue, List<List<ComponentValue>>> pgraph; // planning graph |
|
52
|
|
|
protected Map<DomainComponent, Set<DomainComponent>> dgraph; // dependency graph |
|
53
|
|
|
protected List<DomainComponent>[] dhierarchy; // domain hierarchy |
|
54
|
|
|
|
|
55
|
|
|
protected double schedulingCost; // set scheduling cost |
|
56
|
|
|
protected double completionCost; // set completion cost |
|
57
|
|
|
protected double planningCost; // general planning cost |
|
58
|
|
|
protected double expansionCost; // detailed planning cost |
|
59
|
|
|
protected double unificationCost; // detailed unification cost |
|
60
|
|
|
|
|
61
|
|
|
|
|
62
|
|
|
// set client connection |
|
63
|
|
|
protected static MongoClient client; |
|
64
|
|
|
// prepare collection |
|
65
|
|
|
protected MongoCollection<Document> collection; |
|
66
|
|
|
|
|
67
|
|
|
/** |
|
68
|
|
|
* |
|
69
|
|
|
* @param label |
|
70
|
|
|
*/ |
|
71
|
|
|
protected SearchStrategy(String label) { |
|
72
|
|
|
super(); |
|
73
|
|
|
|
|
74
|
|
|
// initialize the fringe |
|
75
|
|
|
this.fringe = new PriorityQueue<SearchSpaceNode>(this); |
|
76
|
|
|
|
|
77
|
|
|
// set label |
|
78
|
|
|
this.label = label; |
|
79
|
|
|
|
|
80
|
|
|
// get deliberative property file |
|
81
|
|
|
FilePropertyReader properties = new FilePropertyReader( |
|
82
|
|
|
FRAMEWORK_HOME + FilePropertyReader.DEFAULT_DELIBERATIVE_PROPERTY); |
|
83
|
|
|
// set operation costs from parameters |
|
84
|
|
|
this.planningCost = Double.parseDouble(properties.getProperty("expansion-cost")); |
|
85
|
|
|
this.expansionCost = Double.parseDouble(properties.getProperty("expansion-cost")); |
|
86
|
|
|
this.unificationCost = Double.parseDouble(properties.getProperty("unification-cost")); |
|
87
|
|
|
this.schedulingCost = Double.parseDouble(properties.getProperty("scheduling-cost")); |
|
88
|
|
|
this.completionCost = Double.parseDouble(properties.getProperty("completion-cost")); |
|
89
|
|
|
} |
|
90
|
|
|
|
|
91
|
|
|
/** |
|
92
|
|
|
* |
|
93
|
|
|
*/ |
|
94
|
|
|
@PostConstruct |
|
95
|
|
|
protected void init() { |
|
96
|
|
|
|
|
97
|
|
|
// get domain knowledge |
|
98
|
|
|
DomainKnowledge dk = this.pdb.getDomainKnowledge(); |
|
99
|
|
|
// get the decomposition tree from the domain theory |
|
100
|
|
|
this.pgraph = dk.getDecompositionGraph(); |
|
101
|
|
|
// export decomposition graph |
|
102
|
|
|
this.exportDecompositionGraph(this.pgraph); |
|
103
|
|
|
|
|
104
|
|
|
// get dependency graph |
|
105
|
|
|
this.dgraph = dk.getDependencyGraph(); |
|
106
|
|
|
// export dependency graph |
|
107
|
|
|
this.exportDependencyGraph(this.dgraph); |
|
108
|
|
|
|
|
109
|
|
|
// get domain hierarchy |
|
110
|
|
|
this.dhierarchy = dk.getDomainHierarchy(); |
|
111
|
|
|
// export hierarchy |
|
112
|
|
|
this.exportHierarchyGraph(this.dhierarchy); |
|
113
|
|
|
|
|
114
|
|
|
|
|
115
|
|
|
// get deliberative property file |
|
116
|
|
|
FilePropertyReader properties = new FilePropertyReader( |
|
117
|
|
|
FRAMEWORK_HOME + FilePropertyReader.DEFAULT_DELIBERATIVE_PROPERTY); |
|
118
|
|
|
|
|
119
|
|
|
// get mongo |
|
120
|
|
|
String mongodb = properties.getProperty("mongodb"); |
|
121
|
|
|
// check if exists |
|
122
|
|
|
if (mongodb != null && !mongodb.equals("")) { |
|
123
|
|
|
|
|
124
|
|
|
// create a collection to the DB |
|
125
|
|
|
if (client == null) { |
|
126
|
|
|
// check DB host |
|
127
|
|
|
String dbHost = properties.getProperty("mongodb_host"); |
|
128
|
|
|
// create client |
|
129
|
|
|
client = MongoClients.create(dbHost); |
|
|
|
|
|
|
130
|
|
|
} |
|
131
|
|
|
|
|
132
|
|
|
// get DB |
|
133
|
|
|
MongoDatabase db = client.getDatabase(mongodb.trim()); |
|
134
|
|
|
// get collection |
|
135
|
|
|
this.collection = db.getCollection("planner_search"); |
|
136
|
|
|
// remove all data from the collection |
|
137
|
|
|
this.collection.drop(); |
|
138
|
|
|
} |
|
139
|
|
|
} |
|
140
|
|
|
|
|
141
|
|
|
|
|
142
|
|
|
/** |
|
143
|
|
|
* Compute the (pessimistic) planning cost of a domain value by analyzing the extracted decomposition graph |
|
144
|
|
|
* |
|
145
|
|
|
* @param value |
|
146
|
|
|
* @return |
|
147
|
|
|
*/ |
|
148
|
|
|
private Map<DomainComponent, Double[]> computeCostProjections(ComponentValue value) { |
|
149
|
|
|
|
|
150
|
|
|
// set cost |
|
151
|
|
|
Map<DomainComponent, Double[]> cost = new HashMap<>(); |
|
152
|
|
|
// check if leaf |
|
153
|
|
|
if (!this.pgraph.containsKey(value) || |
|
154
|
|
|
this.pgraph.get(value).isEmpty()) { |
|
155
|
|
|
|
|
156
|
|
|
// set cost |
|
157
|
|
|
cost.put(value.getComponent(), new Double[] { |
|
158
|
|
|
this.unificationCost, |
|
159
|
|
|
this.unificationCost |
|
160
|
|
|
}); |
|
161
|
|
|
|
|
162
|
|
|
} else { |
|
163
|
|
|
|
|
164
|
|
|
// get possible decompositions |
|
165
|
|
View Code Duplication |
for (List<ComponentValue> decomposition : this.pgraph.get(value)) { |
|
|
|
|
|
|
166
|
|
|
|
|
167
|
|
|
// decomposition costs |
|
168
|
|
|
Map<DomainComponent, Double[]> dCosts = new HashMap<>(); |
|
169
|
|
|
for (ComponentValue subgoal : decomposition) { |
|
170
|
|
|
|
|
171
|
|
|
// compute planning cost of the subgoal |
|
172
|
|
|
Map<DomainComponent, Double[]> update = this.computeCostProjections(subgoal); |
|
173
|
|
|
for (DomainComponent c : update.keySet()) { |
|
|
|
|
|
|
174
|
|
|
|
|
175
|
|
|
if (!dCosts.containsKey(c)) { |
|
176
|
|
|
// set cost |
|
177
|
|
|
dCosts.put(c, new Double[] { |
|
178
|
|
|
update.get(c)[0], |
|
179
|
|
|
update.get(c)[1] |
|
180
|
|
|
}); |
|
181
|
|
|
|
|
182
|
|
|
} else { |
|
183
|
|
|
|
|
184
|
|
|
// update cost |
|
185
|
|
|
dCosts.put(c, new Double[] { |
|
186
|
|
|
dCosts.get(c)[0] + update.get(c)[0], |
|
187
|
|
|
dCosts.get(c)[1] + update.get(c)[1] |
|
188
|
|
|
}); |
|
189
|
|
|
} |
|
190
|
|
|
} |
|
191
|
|
|
} |
|
192
|
|
|
|
|
193
|
|
|
// update pessimistic and optimistic projections |
|
194
|
|
|
for (DomainComponent c : dCosts.keySet()) { |
|
|
|
|
|
|
195
|
|
|
if (!cost.containsKey(c)) { |
|
196
|
|
|
|
|
197
|
|
|
// set cost |
|
198
|
|
|
cost.put(c, new Double[] { |
|
199
|
|
|
dCosts.get(c)[0], |
|
200
|
|
|
dCosts.get(c)[1] |
|
201
|
|
|
}); |
|
202
|
|
|
|
|
203
|
|
|
} else { |
|
204
|
|
|
|
|
205
|
|
|
// get min and max |
|
206
|
|
|
cost.put(c, new Double[] { |
|
207
|
|
|
Math.min(cost.get(c)[0], dCosts.get(c)[0]), |
|
208
|
|
|
Math.max(cost.get(c)[1], dCosts.get(c)[1]) |
|
209
|
|
|
}); |
|
210
|
|
|
} |
|
211
|
|
|
} |
|
212
|
|
|
} |
|
213
|
|
|
|
|
214
|
|
|
// set cost associated to the value |
|
215
|
|
|
if (!cost.containsKey(value.getComponent())) { |
|
216
|
|
|
|
|
217
|
|
|
// set cost |
|
218
|
|
|
cost.put(value.getComponent(), new Double[] { |
|
219
|
|
|
this.unificationCost, |
|
220
|
|
|
this.unificationCost |
|
221
|
|
|
}); |
|
222
|
|
|
|
|
223
|
|
|
} else { |
|
224
|
|
|
|
|
225
|
|
|
// weight cost according to the hierarchical value |
|
226
|
|
|
cost.put(value.getComponent(), new Double[] { |
|
227
|
|
|
this.unificationCost + cost.get(value.getComponent())[0], |
|
228
|
|
|
this.unificationCost + cost.get(value.getComponent())[1] |
|
229
|
|
|
}); |
|
230
|
|
|
} |
|
231
|
|
|
} |
|
232
|
|
|
|
|
233
|
|
|
// get cost |
|
234
|
|
|
return cost; |
|
235
|
|
|
} |
|
236
|
|
|
|
|
237
|
|
|
/** |
|
238
|
|
|
* Compute the (pessimistic) makespan projection by analyzing the extracted decomposition graph starting |
|
239
|
|
|
* from a given value of the domain |
|
240
|
|
|
* |
|
241
|
|
|
* @param value |
|
242
|
|
|
* @return |
|
243
|
|
|
*/ |
|
244
|
|
|
private Map<DomainComponent, Double[]> computeMakespanProjections(ComponentValue value) |
|
245
|
|
|
{ |
|
246
|
|
|
// set data structure |
|
247
|
|
|
Map<DomainComponent, Double[]> makespan = new HashMap<>(); |
|
248
|
|
|
// check if leaf |
|
249
|
|
|
if (!this.pgraph.containsKey(value) || |
|
250
|
|
|
this.pgraph.get(value).isEmpty()) { |
|
251
|
|
|
|
|
252
|
|
|
// set value expected minimum duration |
|
253
|
|
|
makespan.put(value.getComponent(), new Double[] { |
|
254
|
|
|
(double) value.getDurationLowerBound(), |
|
255
|
|
|
(double) value.getDurationUpperBound() |
|
256
|
|
|
}); |
|
257
|
|
|
|
|
258
|
|
|
} else { |
|
259
|
|
|
|
|
260
|
|
|
// check possible decompositions |
|
261
|
|
View Code Duplication |
for (List<ComponentValue> decomposition : this.pgraph.get(value)) { |
|
|
|
|
|
|
262
|
|
|
|
|
263
|
|
|
// set decomposition makespan |
|
264
|
|
|
Map<DomainComponent, Double[]> dMakespan = new HashMap<>(); |
|
265
|
|
|
// check subgoals |
|
266
|
|
|
for (ComponentValue subgoal : decomposition) { |
|
267
|
|
|
|
|
268
|
|
|
// recursive call to compute (pessimistic) makespan estimation |
|
269
|
|
|
Map<DomainComponent, Double[]> update = this.computeMakespanProjections(subgoal); |
|
270
|
|
|
// increment decomposition makespan |
|
271
|
|
|
for (DomainComponent c : update.keySet()) { |
|
|
|
|
|
|
272
|
|
|
|
|
273
|
|
|
// check decomposition makespan |
|
274
|
|
|
if (!dMakespan.containsKey(c)) { |
|
275
|
|
|
// add entry |
|
276
|
|
|
dMakespan.put(c, new Double[] { |
|
277
|
|
|
update.get(c)[0], |
|
278
|
|
|
update.get(c)[1] |
|
279
|
|
|
}); |
|
280
|
|
|
|
|
281
|
|
|
} else { |
|
282
|
|
|
|
|
283
|
|
|
// increment component's makespan |
|
284
|
|
|
dMakespan.put(c, new Double[] { |
|
285
|
|
|
dMakespan.get(c)[0] + update.get(c)[0], |
|
286
|
|
|
dMakespan.get(c)[1] + update.get(c)[1] |
|
287
|
|
|
}); |
|
288
|
|
|
} |
|
289
|
|
|
} |
|
290
|
|
|
} |
|
291
|
|
|
|
|
292
|
|
|
// update resulting makespan by taking into account the maximum value |
|
293
|
|
|
for (DomainComponent c : dMakespan.keySet()) { |
|
|
|
|
|
|
294
|
|
|
|
|
295
|
|
|
// check makespan |
|
296
|
|
|
if (!makespan.containsKey(c)) { |
|
297
|
|
|
|
|
298
|
|
|
// add entry |
|
299
|
|
|
makespan.put(c, new Double[] { |
|
300
|
|
|
dMakespan.get(c)[0], |
|
301
|
|
|
dMakespan.get(c)[1] |
|
302
|
|
|
}); |
|
303
|
|
|
|
|
304
|
|
|
} else { |
|
305
|
|
|
|
|
306
|
|
|
// set the pessimistic and optimistic projections |
|
307
|
|
|
makespan.put(c, new Double[] { |
|
308
|
|
|
Math.min(makespan.get(c)[0], dMakespan.get(c)[0]), |
|
309
|
|
|
Math.max(makespan.get(c)[1], dMakespan.get(c)[1]) |
|
310
|
|
|
}); |
|
311
|
|
|
} |
|
312
|
|
|
} |
|
313
|
|
|
} |
|
314
|
|
|
|
|
315
|
|
|
// set cost associated to the value |
|
316
|
|
|
if (!makespan.containsKey(value.getComponent())) { |
|
317
|
|
|
|
|
318
|
|
|
// set cost |
|
319
|
|
|
makespan.put(value.getComponent(), new Double[] { |
|
320
|
|
|
(double) value.getDurationLowerBound(), |
|
321
|
|
|
(double) value.getDurationLowerBound() |
|
322
|
|
|
}); |
|
323
|
|
|
|
|
324
|
|
|
} else { |
|
325
|
|
|
|
|
326
|
|
|
// increment makespan |
|
327
|
|
|
makespan.put(value.getComponent(), new Double[] { |
|
328
|
|
|
makespan.get(value.getComponent())[0] + ((double) value.getDurationLowerBound()), |
|
329
|
|
|
makespan.get(value.getComponent())[1] + ((double) value.getDurationLowerBound()) |
|
330
|
|
|
}); |
|
331
|
|
|
} |
|
332
|
|
|
} |
|
333
|
|
|
|
|
334
|
|
|
// get the makespan |
|
335
|
|
|
return makespan; |
|
336
|
|
|
} |
|
337
|
|
|
|
|
338
|
|
|
|
|
339
|
|
|
/** |
|
340
|
|
|
* |
|
341
|
|
|
* @return |
|
342
|
|
|
*/ |
|
343
|
|
|
public String getLabel() { |
|
344
|
|
|
return this.label; |
|
345
|
|
|
} |
|
346
|
|
|
|
|
347
|
|
|
/** |
|
348
|
|
|
* |
|
349
|
|
|
* @return |
|
350
|
|
|
*/ |
|
351
|
|
|
public int getFringeSize() { |
|
352
|
|
|
return this.fringe.size(); |
|
353
|
|
|
} |
|
354
|
|
|
|
|
355
|
|
|
/** |
|
356
|
|
|
* |
|
357
|
|
|
* @param node |
|
358
|
|
|
*/ |
|
359
|
|
|
public abstract void enqueue(SearchSpaceNode node); |
|
360
|
|
|
|
|
361
|
|
|
/** |
|
362
|
|
|
* |
|
363
|
|
|
*/ |
|
364
|
|
|
@Override |
|
365
|
|
|
public abstract int compare(SearchSpaceNode n1, SearchSpaceNode n2); |
|
366
|
|
|
|
|
367
|
|
|
/** |
|
368
|
|
|
* |
|
369
|
|
|
* @return |
|
370
|
|
|
* @throws EmptyFringeException |
|
371
|
|
|
*/ |
|
372
|
|
|
public SearchSpaceNode dequeue() |
|
373
|
|
|
throws EmptyFringeException |
|
374
|
|
|
{ |
|
375
|
|
|
// set next node of the fringe |
|
376
|
|
|
SearchSpaceNode next = null; |
|
377
|
|
|
try |
|
378
|
|
|
{ |
|
379
|
|
|
// extract the "best" node from the fringe |
|
380
|
|
|
next = this.fringe.remove(); |
|
381
|
|
|
// store search data record |
|
382
|
|
|
this.registerSearchChoice(next); |
|
383
|
|
|
} |
|
384
|
|
|
catch (NoSuchElementException ex) { |
|
385
|
|
|
// empty fringe |
|
386
|
|
|
throw new EmptyFringeException("No more nodes in the fringe"); |
|
387
|
|
|
} |
|
388
|
|
|
|
|
389
|
|
|
// get extracted node |
|
390
|
|
|
return next; |
|
391
|
|
|
} |
|
392
|
|
|
|
|
393
|
|
|
/** |
|
394
|
|
|
* Clear the internal data structures of a search strategy |
|
395
|
|
|
*/ |
|
396
|
|
|
public void clear() { |
|
397
|
|
|
// clear queue |
|
398
|
|
|
this.fringe.clear(); |
|
399
|
|
|
// close DB connection if necessary |
|
400
|
|
|
if (client != null) { |
|
401
|
|
|
client.close(); |
|
402
|
|
|
client = null; |
|
|
|
|
|
|
403
|
|
|
this.collection = null; |
|
404
|
|
|
} |
|
405
|
|
|
} |
|
406
|
|
|
|
|
407
|
|
|
/** |
|
408
|
|
|
* |
|
409
|
|
|
*/ |
|
410
|
|
|
public String toString() { |
|
411
|
|
|
// JSON like object description |
|
412
|
|
|
return "{ \"label\": \"" + this.label + "\" }"; |
|
413
|
|
|
} |
|
414
|
|
|
|
|
415
|
|
|
/** |
|
416
|
|
|
* |
|
417
|
|
|
* @param node |
|
418
|
|
|
*/ |
|
419
|
|
|
protected void registerSearchChoice(SearchSpaceNode node) |
|
420
|
|
|
{ |
|
421
|
|
|
// check db collection |
|
422
|
|
|
if (this.collection != null) { |
|
423
|
|
|
// create solving statistic record |
|
424
|
|
|
Document doc = new Document("step", node.getId()); |
|
425
|
|
|
doc.append("fringe-size", this.fringe.size()); |
|
426
|
|
|
doc.append("node-number-of-flaws", node.getNumberOfFlaws()); |
|
427
|
|
|
doc.append("node-depth", node.getDepth()); |
|
428
|
|
|
|
|
429
|
|
|
// consolidated values of metrics |
|
430
|
|
|
doc.append("node-plan-cost", node.getPlanCost()); |
|
431
|
|
|
doc.append("node-plan-makespan-min", node.getPlanMakespan()[0]); |
|
432
|
|
|
doc.append("node-plan-makespan-max", node.getPlanMakespan()[1]); |
|
433
|
|
|
|
|
434
|
|
|
// heuristic estimation of metrics |
|
435
|
|
|
doc.append("node-heuristic-plan-cost-min", node.getPlanHeuristicCost()[0]); |
|
436
|
|
|
doc.append("node-heuristic-plan-cost-max", node.getPlanHeuristicCost()[1]); |
|
437
|
|
|
doc.append("node-heuristic-plan-makespan-min", node.getPlanHeuristicMakespan()[0]); |
|
438
|
|
|
doc.append("node-heuristic-plan-makespan-max", node.getPlanHeuristicMakespan()[1]); |
|
439
|
|
|
|
|
440
|
|
|
// insert data into the collection |
|
441
|
|
|
this.collection.insertOne(doc); |
|
442
|
|
|
} |
|
443
|
|
|
} |
|
444
|
|
|
|
|
445
|
|
|
/** |
|
446
|
|
|
* This method computes an evaluation concerning the (planning) distance of |
|
447
|
|
|
* a given node from a solution plan. |
|
448
|
|
|
* |
|
449
|
|
|
* Namely the method computes the expected cost the planner should "pay" to refine |
|
450
|
|
|
* the given node and obtain a valid solution. The cost takes into account both planning |
|
451
|
|
|
* and scheduling decisions. Also, the cost considers possible "gaps" on timelines and |
|
452
|
|
|
* tries to estimates the planning effort needed to complete the behaviors of |
|
453
|
|
|
* related timelines. |
|
454
|
|
|
* |
|
455
|
|
|
* The heuristics computes a cost for each component of the domain and |
|
456
|
|
|
* takes into account timeline projections and therefore computes a pessimistic |
|
457
|
|
|
* and optimistic evaluation. |
|
458
|
|
|
* |
|
459
|
|
|
* @param node |
|
460
|
|
|
* @return |
|
461
|
|
|
*/ |
|
462
|
|
|
protected Map<DomainComponent, Double[]> computeHeuristicCost(SearchSpaceNode node) |
|
463
|
|
|
{ |
|
464
|
|
|
// compute an optimistic and pessimistic estimation of planning operations |
|
465
|
|
|
Map<DomainComponent, Double[]> cost = new HashMap<>(); |
|
466
|
|
|
// check node flaws and compute heuristic estimation |
|
467
|
|
|
for (Flaw flaw : node.getFlaws()) |
|
468
|
|
|
{ |
|
469
|
|
|
// check planning goal |
|
470
|
|
|
if (flaw.getType().equals(FlawType.PLAN_REFINEMENT)) |
|
471
|
|
|
{ |
|
472
|
|
|
// get flaw data |
|
473
|
|
|
Goal goal = (Goal) flaw; |
|
474
|
|
|
// compute cost projections |
|
475
|
|
|
Map<DomainComponent, Double[]> update = this.computeCostProjections(goal.getDecision().getValue()); |
|
476
|
|
|
// update cost |
|
477
|
|
|
for (DomainComponent c : update.keySet()) { |
|
|
|
|
|
|
478
|
|
|
if (!cost.containsKey(c)) { |
|
479
|
|
|
// set cost |
|
480
|
|
|
cost.put(c, new Double[] { |
|
481
|
|
|
this.planningCost * update.get(c)[0], |
|
482
|
|
|
this.planningCost * update.get(c)[1] |
|
483
|
|
|
}); |
|
484
|
|
|
} |
|
485
|
|
|
else { |
|
486
|
|
|
// update cost |
|
487
|
|
|
cost.put(c, new Double[] { |
|
488
|
|
|
cost.get(c)[0] + (this.planningCost * update.get(c)[0]), |
|
489
|
|
|
cost.get(c)[1] + (this.planningCost * update.get(c)[1]) |
|
490
|
|
|
}); |
|
491
|
|
|
} |
|
492
|
|
|
} |
|
493
|
|
|
} |
|
494
|
|
|
|
|
495
|
|
|
// check scheduling goal |
|
496
|
|
View Code Duplication |
if (flaw.getType().equals(FlawType.TIMELINE_OVERFLOW)) |
|
|
|
|
|
|
497
|
|
|
{ |
|
498
|
|
|
// get component |
|
499
|
|
|
DomainComponent comp = flaw.getComponent(); |
|
500
|
|
|
// update cost |
|
501
|
|
|
if (!cost.containsKey(comp)) { |
|
502
|
|
|
// set cost |
|
503
|
|
|
cost.put(comp, new Double[] { |
|
504
|
|
|
this.schedulingCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1), |
|
505
|
|
|
this.schedulingCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1) |
|
506
|
|
|
}); |
|
507
|
|
|
} |
|
508
|
|
|
else { |
|
509
|
|
|
// update cost |
|
510
|
|
|
cost.put(comp, new Double[] { |
|
511
|
|
|
cost.get(comp)[0] + (this.schedulingCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1)), |
|
512
|
|
|
cost.get(comp)[1] + (this.schedulingCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1)) |
|
513
|
|
|
}); |
|
514
|
|
|
} |
|
515
|
|
|
} |
|
516
|
|
|
|
|
517
|
|
|
// check scheduling goal |
|
518
|
|
View Code Duplication |
if (flaw.getType().equals(FlawType.TIMELINE_BEHAVIOR_PLANNING)) |
|
|
|
|
|
|
519
|
|
|
{ |
|
520
|
|
|
// get component |
|
521
|
|
|
DomainComponent comp = flaw.getComponent(); |
|
522
|
|
|
// update cost |
|
523
|
|
|
if (!cost.containsKey(comp)) { |
|
524
|
|
|
// set cost |
|
525
|
|
|
cost.put(comp, new Double[] { |
|
526
|
|
|
this.completionCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1), |
|
527
|
|
|
this.completionCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1) |
|
528
|
|
|
}); |
|
529
|
|
|
} |
|
530
|
|
|
else { |
|
531
|
|
|
// update cost |
|
532
|
|
|
cost.put(comp, new Double[] { |
|
533
|
|
|
cost.get(comp)[0] + (this.completionCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1)), |
|
534
|
|
|
cost.get(comp)[1] + (this.completionCost * (this.pdb.getDomainKnowledge().getHierarchicalLevelValue(comp) + 1)) |
|
535
|
|
|
}); |
|
536
|
|
|
} |
|
537
|
|
|
} |
|
538
|
|
|
} |
|
539
|
|
|
|
|
540
|
|
|
|
|
541
|
|
|
// finalize data structure |
|
542
|
|
|
for (DomainComponent c : this.pdb.getComponents()) { |
|
543
|
|
|
if (!cost.containsKey(c)) { |
|
544
|
|
|
cost.put(c, new Double[] { |
|
545
|
|
|
(double) 0, |
|
546
|
|
|
(double) 0 |
|
547
|
|
|
}); |
|
548
|
|
|
} |
|
549
|
|
|
} |
|
550
|
|
|
|
|
551
|
|
|
// get cost |
|
552
|
|
|
return cost; |
|
553
|
|
|
} |
|
554
|
|
|
|
|
555
|
|
|
/** |
|
556
|
|
|
* |
|
557
|
|
|
* This method provides an heuristic evaluation of the makespan of domain components. |
|
558
|
|
|
* |
|
559
|
|
|
* Namely, the method considesrs planning subgoals of a given partial plan and computes |
|
560
|
|
|
* a projection of the makespan. The evalution takes into account optmistic and pessimistic |
|
561
|
|
|
* projections of timelines |
|
562
|
|
|
* |
|
563
|
|
|
* @param node |
|
564
|
|
|
* @return |
|
565
|
|
|
*/ |
|
566
|
|
|
protected Map<DomainComponent, Double[]> computeHeuristicMakespan(SearchSpaceNode node) |
|
567
|
|
|
{ |
|
568
|
|
|
// initialize makespan projects |
|
569
|
|
|
Map<DomainComponent, Double[]> projections = new HashMap<>(); |
|
570
|
|
|
// check node flaws and compute heuristic estimation |
|
571
|
|
|
for (Flaw flaw : node.getFlaws()) |
|
572
|
|
|
{ |
|
573
|
|
|
// check planning goals |
|
574
|
|
|
if (flaw.getType().equals(FlawType.PLAN_REFINEMENT)) |
|
575
|
|
|
{ |
|
576
|
|
|
// get planning goal |
|
577
|
|
|
Goal goal = (Goal) flaw; |
|
578
|
|
|
// compute optimistic and pessimistic projections of makespan from goals |
|
579
|
|
|
Map<DomainComponent, Double[]> update = this.computeMakespanProjections( |
|
580
|
|
|
goal.getDecision().getValue()); |
|
581
|
|
|
|
|
582
|
|
|
// update plan projections |
|
583
|
|
|
for (DomainComponent c : update.keySet()) |
|
|
|
|
|
|
584
|
|
|
{ |
|
585
|
|
|
// check projection |
|
586
|
|
|
if (!projections.containsKey(c)) { |
|
587
|
|
|
// set projection |
|
588
|
|
|
projections.put(c, |
|
589
|
|
|
new Double[] { |
|
590
|
|
|
update.get(c)[0], |
|
591
|
|
|
update.get(c)[1] |
|
592
|
|
|
}); |
|
593
|
|
|
} |
|
594
|
|
|
else { |
|
595
|
|
|
// update projection |
|
596
|
|
|
projections.put(c, |
|
597
|
|
|
new Double[] { |
|
598
|
|
|
projections.get(c)[0] + update.get(c)[0], |
|
599
|
|
|
projections.get(c)[1] + update.get(c)[1] |
|
600
|
|
|
}); |
|
601
|
|
|
} |
|
602
|
|
|
} |
|
603
|
|
|
} |
|
604
|
|
|
} |
|
605
|
|
|
|
|
606
|
|
|
// finalize data structure |
|
607
|
|
|
for (DomainComponent c : this.pdb.getComponents()) { |
|
608
|
|
|
if (!projections.containsKey(c)) { |
|
609
|
|
|
projections.put(c, new Double[] { |
|
610
|
|
|
(double) 0, |
|
611
|
|
|
(double) 0 |
|
612
|
|
|
}); |
|
613
|
|
|
} |
|
614
|
|
|
} |
|
615
|
|
|
|
|
616
|
|
|
// get projections |
|
617
|
|
|
return projections; |
|
618
|
|
|
} |
|
619
|
|
|
|
|
620
|
|
|
/** |
|
621
|
|
|
* |
|
622
|
|
|
* @param graph |
|
623
|
|
|
*/ |
|
624
|
|
|
private void exportHierarchyGraph(List<DomainComponent>[] graph) |
|
625
|
|
|
{ |
|
626
|
|
|
// export graph |
|
627
|
|
|
String str = "digraph hierarhcy_graph {\n"; |
|
628
|
|
|
str += "\trankdir=TB;\n"; |
|
629
|
|
|
str += "\tnode [fontsize=11, style=filled, fillcolor=azure, shape = box]\n"; |
|
630
|
|
|
|
|
631
|
|
|
|
|
632
|
|
|
// check dependencies |
|
633
|
|
|
for (int index = 0; index < graph.length - 1; index++) { |
|
634
|
|
|
// get components at current level |
|
635
|
|
|
List<DomainComponent> currlist = graph[index]; |
|
636
|
|
|
// get components at next level |
|
637
|
|
|
List<DomainComponent> nextlist = graph[index + 1]; |
|
638
|
|
|
|
|
639
|
|
|
for (DomainComponent curr : currlist) { |
|
640
|
|
|
for (DomainComponent next : nextlist) { |
|
641
|
|
|
// add an edge to the graph |
|
642
|
|
|
str += "\t" + curr.getName() + " -> " + next.getName(); |
|
|
|
|
|
|
643
|
|
|
|
|
644
|
|
|
} |
|
645
|
|
|
} |
|
646
|
|
|
|
|
647
|
|
|
} |
|
648
|
|
|
|
|
649
|
|
|
// close |
|
650
|
|
|
str += "\n}\n\n"; |
|
651
|
|
|
|
|
652
|
|
|
try |
|
653
|
|
|
{ |
|
654
|
|
|
File pdlFile = new File(FRAMEWORK_HOME + "hierarchy_graph.dot"); |
|
655
|
|
|
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(pdlFile), "UTF-8"))) { |
|
656
|
|
|
// write file |
|
657
|
|
|
writer.write(str); |
|
658
|
|
|
} |
|
659
|
|
|
} |
|
660
|
|
|
catch (Exception ex) { |
|
661
|
|
|
throw new RuntimeException(ex.getMessage()); |
|
|
|
|
|
|
662
|
|
|
} |
|
663
|
|
|
} |
|
664
|
|
|
|
|
665
|
|
|
|
|
666
|
|
|
/** |
|
667
|
|
|
* |
|
668
|
|
|
* @param graph |
|
669
|
|
|
*/ |
|
670
|
|
|
private void exportDependencyGraph(Map<DomainComponent, Set<DomainComponent>> graph) |
|
671
|
|
|
{ |
|
672
|
|
|
// export graph |
|
673
|
|
|
String str = "digraph dependency_graph {\n"; |
|
674
|
|
|
str += "\trankdir=TB;\n"; |
|
675
|
|
|
str += "\tnode [fontsize=11, style=filled, fillcolor=azure, shape = box]\n"; |
|
676
|
|
|
|
|
677
|
|
|
// check dependencies |
|
678
|
|
|
for (DomainComponent comp : graph.keySet()) { |
|
|
|
|
|
|
679
|
|
|
// check dependencies |
|
680
|
|
|
for (DomainComponent dep : graph.get(comp)) { |
|
681
|
|
|
// add an edge to the graph |
|
682
|
|
|
str += "\t" + dep.getName() + " -> " + comp.getName(); |
|
|
|
|
|
|
683
|
|
|
} |
|
684
|
|
|
} |
|
685
|
|
|
|
|
686
|
|
|
// close |
|
687
|
|
|
str += "\n}\n\n"; |
|
688
|
|
|
|
|
689
|
|
|
try |
|
690
|
|
|
{ |
|
691
|
|
|
File pdlFile = new File(FRAMEWORK_HOME + "dependency_graph.dot"); |
|
692
|
|
|
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(pdlFile), "UTF-8"))) { |
|
693
|
|
|
// write file |
|
694
|
|
|
writer.write(str); |
|
695
|
|
|
} |
|
696
|
|
|
} |
|
697
|
|
|
catch (Exception ex) { |
|
698
|
|
|
throw new RuntimeException(ex.getMessage()); |
|
|
|
|
|
|
699
|
|
|
} |
|
700
|
|
|
} |
|
701
|
|
|
|
|
702
|
|
|
/** |
|
703
|
|
|
* |
|
704
|
|
|
* @param graph |
|
705
|
|
|
*/ |
|
706
|
|
|
private void exportDecompositionGraph(Map<ComponentValue, List<List<ComponentValue>>> graph) |
|
707
|
|
|
{ |
|
708
|
|
|
// export graph |
|
709
|
|
|
String str = "digraph decomposition_graph {\n"; |
|
710
|
|
|
str += "\trankdir=TB;\n"; |
|
711
|
|
|
str += "\tnode [fontsize=11, style=filled, fillcolor=azure, shape = box]\n"; |
|
712
|
|
|
|
|
713
|
|
|
// node id |
|
714
|
|
|
int counter = 0; |
|
715
|
|
|
// create AND nodes |
|
716
|
|
|
int andCounter = 0; |
|
717
|
|
|
// check the graph |
|
718
|
|
|
for (ComponentValue value : graph.keySet()) |
|
|
|
|
|
|
719
|
|
|
{ |
|
720
|
|
|
// check number of disjunctions |
|
721
|
|
|
List<List<ComponentValue>> disjunctions = graph.get(value); |
|
722
|
|
|
if (disjunctions.size() == 1) |
|
723
|
|
|
{ |
|
724
|
|
|
String andNode = "AND_" + andCounter; |
|
725
|
|
|
str += "\t" + andNode + " [fontsize=6, shape= oval, style=filled, fillcolor= palegreen];\n"; |
|
|
|
|
|
|
726
|
|
|
|
|
727
|
|
|
str += "\t" + value.getComponent().getName() + "_" + value.getLabel().replace("-", "_") + |
|
|
|
|
|
|
728
|
|
|
" -> " + andNode + ";\n"; |
|
729
|
|
|
|
|
730
|
|
|
|
|
731
|
|
|
|
|
732
|
|
|
// set weight of the edge |
|
733
|
|
|
Map<ComponentValue, Integer> wc = new HashMap<>(); |
|
734
|
|
|
for (ComponentValue child : disjunctions.get(0)) { |
|
735
|
|
|
if (!wc.containsKey(child)) { |
|
736
|
|
|
wc.put(child, 1); |
|
737
|
|
|
} |
|
738
|
|
|
else { |
|
739
|
|
|
// increment |
|
740
|
|
|
int v = wc.get(child); |
|
741
|
|
|
wc.put(child, ++v); |
|
742
|
|
|
} |
|
743
|
|
|
} |
|
744
|
|
|
|
|
745
|
|
|
// no disjunctions |
|
746
|
|
|
for (ComponentValue child : wc.keySet()) { |
|
|
|
|
|
|
747
|
|
|
// add edge |
|
748
|
|
|
str += "\t" + andNode + " -> " + child.getComponent().getName() + "_" + child.getLabel().replace("-", "_") + " [label= \"" + wc.get(child) + "\"];\n"; |
|
|
|
|
|
|
749
|
|
|
} |
|
750
|
|
|
|
|
751
|
|
|
// increment and node counter |
|
752
|
|
|
andCounter++; |
|
753
|
|
|
} |
|
754
|
|
|
else |
|
755
|
|
|
{ |
|
756
|
|
|
// add OR node label |
|
757
|
|
|
String orLabel = "OR_" + counter; |
|
758
|
|
|
// add an edge to the OR node |
|
759
|
|
|
str += "\t" + orLabel + " [fontsize=6, shape= diamond, style=filled, fillcolor= thistle];\n"; |
|
|
|
|
|
|
760
|
|
|
str += "\t" + value.getComponent().getName() + "_" + value.getLabel().replace("-", "_") + |
|
|
|
|
|
|
761
|
|
|
" -> " + orLabel + ";\n"; |
|
762
|
|
|
|
|
763
|
|
|
// add disjunctions |
|
764
|
|
|
for (List<ComponentValue> conjunctions : disjunctions) |
|
765
|
|
|
{ |
|
766
|
|
|
// set AND node label |
|
767
|
|
|
String andLabel = "AND_" + andCounter; |
|
768
|
|
|
str += "\t" + andLabel + " [fontsize=6, shape= oval, style=filled, fillcolor= palegreen];\n"; |
|
|
|
|
|
|
769
|
|
|
|
|
770
|
|
|
// set weight of the edge |
|
771
|
|
|
Map<ComponentValue, Integer> wc = new HashMap<>(); |
|
772
|
|
|
for (ComponentValue child : conjunctions) { |
|
773
|
|
|
if (!wc.containsKey(child)) { |
|
774
|
|
|
wc.put(child, 1); |
|
775
|
|
|
} |
|
776
|
|
|
else { |
|
777
|
|
|
// increment |
|
778
|
|
|
int v = wc.get(child); |
|
779
|
|
|
wc.put(child, ++v); |
|
780
|
|
|
} |
|
781
|
|
|
} |
|
782
|
|
|
|
|
783
|
|
|
// add and edge to the AND node |
|
784
|
|
|
str += "\t" + orLabel + " -> " + andLabel + ";\n"; |
|
|
|
|
|
|
785
|
|
|
for (ComponentValue child : wc.keySet()) { |
|
|
|
|
|
|
786
|
|
|
// add edge from AND node to the value |
|
787
|
|
|
str += "\t" + andLabel + |
|
|
|
|
|
|
788
|
|
|
" -> " + child.getComponent().getName() + "_" +child.getLabel().replace("-", "_") + " [label= \"" + wc.get(child) + "\"];\n"; |
|
789
|
|
|
} |
|
790
|
|
|
|
|
791
|
|
|
|
|
792
|
|
|
// increment and node counter |
|
793
|
|
|
andCounter++; |
|
794
|
|
|
} |
|
795
|
|
|
|
|
796
|
|
|
counter++; |
|
797
|
|
|
} |
|
798
|
|
|
|
|
799
|
|
|
|
|
800
|
|
|
} |
|
801
|
|
|
|
|
802
|
|
|
// close |
|
803
|
|
|
str += "\n}\n\n"; |
|
804
|
|
|
|
|
805
|
|
|
try |
|
806
|
|
|
{ |
|
807
|
|
|
File pdlFile = new File(FRAMEWORK_HOME + "decomposition_graph.dot"); |
|
808
|
|
|
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(pdlFile), "UTF-8"))) { |
|
809
|
|
|
// write file |
|
810
|
|
|
writer.write(str); |
|
811
|
|
|
} |
|
812
|
|
|
} |
|
813
|
|
|
catch (Exception ex) { |
|
814
|
|
|
throw new RuntimeException(ex.getMessage()); |
|
|
|
|
|
|
815
|
|
|
} |
|
816
|
|
|
} |
|
817
|
|
|
} |
|
818
|
|
|
|
If you really need to set this static field, consider writing a thread-safe setter and atomic getter.