Completed
Push — master ( 6ce24b...74690e )
by Andrei
01:47
created

cure.__create_queue()   C

Complexity

Conditions 7

Size

Total Lines 29

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 7
dl 0
loc 29
rs 5.5
c 0
b 0
f 0
1
"""!
2
3
@brief Cluster analysis algorithm: CURE
4
@details Implementation based on paper @cite article::cure::1.
5
6
@authors Andrei Novikov ([email protected])
7
@date 2014-2018
8
@copyright GNU Public License
9
10
@cond GNU_PUBLIC_LICENSE
11
    PyClustering is free software: you can redistribute it and/or modify
12
    it under the terms of the GNU General Public License as published by
13
    the Free Software Foundation, either version 3 of the License, or
14
    (at your option) any later version.
15
    
16
    PyClustering is distributed in the hope that it will be useful,
17
    but WITHOUT ANY WARRANTY; without even the implied warranty of
18
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
19
    GNU General Public License for more details.
20
    
21
    You should have received a copy of the GNU General Public License
22
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
23
@endcond
24
25
"""
26
27
28
import numpy
0 ignored issues
show
Configuration introduced by
The import numpy could not be resolved.

This can be caused by one of the following:

1. Missing Dependencies

This error could indicate a configuration issue of Pylint. Make sure that your libraries are available by adding the necessary commands.

# .scrutinizer.yml
before_commands:
    - sudo pip install abc # Python2
    - sudo pip3 install abc # Python3
Tip: We are currently not using virtualenv to run pylint, when installing your modules make sure to use the command for the correct version.

2. Missing __init__.py files

This error could also result from missing __init__.py files in your module folders. Make sure that you place one file in each sub-folder.

Loading history...
Unused Code introduced by
The import numpy seems to be unused.
Loading history...
29
30
from pyclustering.cluster.encoder import type_encoding;
31
32
from pyclustering.utils import euclidean_distance;
33
34
from pyclustering.container.kdtree import kdtree;
35
36
from pyclustering.core.wrapper import ccore_library
37
38
import pyclustering.core.cure_wrapper as wrapper;
39
40
41
class cure_cluster:
42
    """!
43
    @brief Represents data cluster in CURE term. 
44
    @details CURE cluster is described by points of cluster, representation points of the cluster and by the cluster center.
45
    
46
    """
47
    
48
    def __init__(self, point, index):
49
        """!
50
        @brief Constructor of CURE cluster.
51
        
52
        @param[in] point (list): Point represented by list of coordinates.
53
        @param[in] index (uint): Index point in dataset.
54
        
55
        """
56
        
57
        ## List of points that make up cluster.
58
        self.points = [ ];
59
        
60
        ## Point indexes in dataset.
61
        self.indexes = -1;
62
        
63
        ## Mean of points that make up cluster.
64
        self.mean = None;
65
        
66
        ## List of points that represents clusters.
67
        self.rep = [ ];
68
        
69
        if (point is not None):
70
            self.points = [ point ];
71
            self.indexes = [ index ];
72
            self.mean = point;
73
            self.rep = [ point ];
74
        
75
        ## Pointer to the closest cluster.
76
        self.closest = None;
77
        
78
        ## Distance to the closest cluster.
79
        self.distance = float('inf');      # calculation of distance is really complexity operation (even square distance), so let's store distance to closest cluster.
80
81
    def __repr__(self):
82
        """!
83
        @brief Displays distance to closest cluster and points that are contained by current cluster.
84
        
85
        """
86
        return "%s, %s" % (self.distance, self.points);
87
        
88
89
class cure:
90
    """!
91
    @brief Class represents clustering algorithm CURE with KD-tree optimization.
92
    @details CCORE option can be used to use the pyclustering core - C/C++ shared library for processing that significantly increases performance.
93
    
94
    Example:
95
    @code
96
        # read data for clustering from some file
97
        sample = read_sample(path_to_data);
98
        
99
        # create instance of cure algorithm for cluster analysis
100
        # request for allocation of two clusters.
101
        cure_instance = cure(sample, 2, 5, 0.5, True);
102
        
103
        # run cluster analysis
104
        cure_instance.process();
105
        
106
        # get results of clustering
107
        clusters = cure_instance.get_clusters();
108
    @endcode
109
    
110
    """
111
    
112
    def __init__(self, data, number_cluster, number_represent_points = 5, compression = 0.5, ccore = True):
113
        """!
114
        @brief Constructor of clustering algorithm CURE.
115
        
116
        @param[in] data (list): Input data that is presented as list of points (objects), each point should be represented by list or tuple.
117
        @param[in] number_cluster (uint): Number of clusters that should be allocated.
118
        @param[in] number_represent_points (uint): Number of representative points for each cluster.
119
        @param[in] compression (double): Coefficient defines level of shrinking of representation points toward the mean of the new created cluster after merging on each step. Usually it destributed from 0 to 1.
120
        @param[in] ccore (bool): If True than DLL CCORE (C++ solution) will be used for solving.
121
        
122
        """
123
        
124
        self.__pointer_data = data
125
        
126
        self.__clusters = None
127
        self.__representors = None
128
        self.__means = None
129
        
130
        self.__number_cluster = number_cluster
131
        self.__number_represent_points = number_represent_points
132
        self.__compression = compression
133
        
134
        self.__ccore = ccore
135
        if self.__ccore:
136
            self.__ccore = ccore_library.workable()
137
138
        self.__validate_arguments()
139
140
141
    def process(self):
142
        """!
143
        @brief Performs cluster analysis in line with rules of CURE algorithm.
144
        
145
        @remark Results of clustering can be obtained using corresponding get methods.
146
        
147
        @see get_clusters()
148
        
149
        """
150
        
151
        if (self.__ccore is True):
152
            cure_data_pointer = wrapper.cure_algorithm(self.__pointer_data, self.__number_cluster, self.__number_represent_points, self.__compression);
153
            
154
            self.__clusters = wrapper.cure_get_clusters(cure_data_pointer);
155
            self.__representors = wrapper.cure_get_representors(cure_data_pointer);
156
            self.__means = wrapper.cure_get_means(cure_data_pointer);
157
            
158
            wrapper.cure_data_destroy(cure_data_pointer);
159
            
160
        else:
161
            self.__create_queue()      # queue
162
            self.__create_kdtree()     # create k-d tree
163
164
            while (len(self.__queue) > self.__number_cluster):
165
                cluster1 = self.__queue[0];            # cluster that has nearest neighbor.
166
                cluster2 = cluster1.closest;    # closest cluster.
167
                
168
                #print("Merge decision: \n\t", cluster1, "\n\t", cluster2);
169
                
170
                self.__queue.remove(cluster1);
171
                self.__queue.remove(cluster2);
172
                
173
                self.__delete_represented_points(cluster1);
174
                self.__delete_represented_points(cluster2);
175
        
176
                merged_cluster = self.__merge_clusters(cluster1, cluster2);
177
        
178
                self.__insert_represented_points(merged_cluster);
179
                
180
                # Pointers to clusters that should be relocated is stored here.
181
                cluster_relocation_requests = [];
182
                
183
                # Check for the last cluster
184
                if (len(self.__queue) > 0):
185
                    merged_cluster.closest = self.__queue[0];  # arbitrary cluster from queue
186
                    merged_cluster.distance = self.__cluster_distance(merged_cluster, merged_cluster.closest);
187
                    
188
                    for item in self.__queue:
189
                        distance = self.__cluster_distance(merged_cluster, item);
190
                        # Check if distance between new cluster and current is the best than now.
191
                        if (distance < merged_cluster.distance):
192
                            merged_cluster.closest = item;
193
                            merged_cluster.distance = distance;
194
                        
195
                        # Check if current cluster has removed neighbor.
196
                        if ( (item.closest is cluster1) or (item.closest is cluster2) ):
197
                            # If previous distance was less then distance to new cluster then nearest cluster should be found in the tree.
198
                            #print("Update: ", item);
199
                            if (item.distance < distance):
200
                                (item.closest, item.distance) = self.__closest_cluster(item, distance);
201
                                
202
                                # TODO: investigation of root cause is required.
203
                                # Itself and merged cluster should be always in list of neighbors in line with specified radius.
204
                                # But merged cluster may not be in list due to error calculation, therefore it should be added
205
                                # manually.
206
                                if (item.closest is None):
207
                                    item.closest = merged_cluster;
208
                                    item.distance = distance;
209
                                
210
                            # Otherwise new cluster is nearest.
211
                            else:
212
                                item.closest = merged_cluster;
213
                                item.distance = distance;
214
                            
215
                            cluster_relocation_requests.append(item);
216
                        elif (item.distance > distance):
217
                            item.closest = merged_cluster;
218
                            item.distance = distance;
219
                            
220
                            cluster_relocation_requests.append(item);
221
                
222
                # New cluster and updated clusters should relocated in queue
223
                self.__insert_cluster(merged_cluster);
224
                for item in cluster_relocation_requests:
225
                    self.__relocate_cluster(item);
226
        
227
            # Change cluster representation
228
            self.__clusters = [ cure_cluster_unit.indexes for cure_cluster_unit in self.__queue ];
229
            self.__representors = [ cure_cluster_unit.rep for cure_cluster_unit in self.__queue ];
230
            self.__means = [ cure_cluster_unit.mean for cure_cluster_unit in self.__queue ];
231
    
232
    
233
    def get_clusters(self):
234
        """!
235
        @brief Returns list of allocated clusters, each cluster contains indexes of objects in list of data.
236
        
237
        @return (list) List of allocated clusters.
238
        
239
        @see process()
240
        @see get_representors()
241
        @see get_means()
242
        
243
        """
244
        
245
        return self.__clusters;
246
    
247
    
248
    def get_representors(self):
249
        """!
250
        @brief Returns list of point-representors of each cluster.
251
        @details Cluster index should be used for navigation between lists of point-representors.
252
        
253
        @return (list) List of point-representors of each cluster.
254
        
255
        @see get_clusters()
256
        @see get_means()
257
        
258
        """
259
        
260
        return self.__representors;
261
    
262
    
263
    def get_means(self):
264
        """!
265
        @brief Returns list of mean values of each cluster.
266
        @details Cluster index should be used for navigation between mean values.
267
        
268
        @return (list) List of mean values of each cluster.
269
        
270
        @see get_clusters()
271
        @see get_representors()
272
        
273
        """
274
        
275
        return self.__means;
276
277
278
    def get_cluster_encoding(self):
279
        """!
280
        @brief Returns clustering result representation type that indicate how clusters are encoded.
281
        
282
        @return (type_encoding) Clustering result representation.
283
        
284
        @see get_clusters()
285
        
286
        """
287
        
288
        return type_encoding.CLUSTER_INDEX_LIST_SEPARATION;
289
290
291
    def __validate_arguments(self):
292
        """!
293
        @brief Check input arguments of BANG algorithm and if one of them is not correct then appropriate exception
294
                is thrown.
295
296
        """
297
        if not isinstance(self.__pointer_data, list):
298
            raise ValueError("Incorrect type of data: '%s'. Input data should have 'list' type." % type(self.__pointer_data))
299
300
        if len(self.__pointer_data) == 0:
301
            raise ValueError("Empty input data. Data should contain at least one point.")
302
303
        if self.__number_cluster <= 0:
304
            raise ValueError("Incorrect amount of clusters '%d'. Amount of cluster should be greater than 0." % self.__number_cluster)
305
306
        if self.__compression < 0:
307
            raise ValueError("Incorrect compression level '%f'. Compression should not be negative." % self.__compression)
308
309
        if self.__number_represent_points <= 0:
310
            raise ValueError("Incorrect amount of representatives '%d'. Amount of representatives should be greater than 0." % self.__number_cluster)
311
312
313
    def __insert_cluster(self, cluster):
314
        """!
315
        @brief Insert cluster to the list (sorted queue) in line with sequence order (distance).
316
        
317
        @param[in] cluster (cure_cluster): Cluster that should be inserted.
318
        
319
        """
320
        
321
        for index in range(len(self.__queue)):
322
            if (cluster.distance < self.__queue[index].distance):
323
                self.__queue.insert(index, cluster);
324
                return;
325
    
326
        self.__queue.append(cluster);
327
328
329
    def __relocate_cluster(self, cluster):
330
        """!
331
        @brief Relocate cluster in list in line with distance order.
332
        
333
        @param[in] cluster (cure_cluster): Cluster that should be relocated in line with order.
334
        
335
        """
336
        
337
        self.__queue.remove(cluster);
338
        self.__insert_cluster(cluster);
339
340
341
    def __closest_cluster(self, cluster, distance):
342
        """!
343
        @brief Find closest cluster to the specified cluster in line with distance.
344
        
345
        @param[in] cluster (cure_cluster): Cluster for which nearest cluster should be found.
346
        @param[in] distance (double): Closest distance to the previous cluster.
347
        
348
        @return (tuple) Pair (nearest CURE cluster, nearest distance) if the nearest cluster has been found, otherwise None is returned.
349
        
350
        """
351
        
352
        nearest_cluster = None;
353
        nearest_distance = float('inf');
354
        
355
        for point in cluster.rep:
356
            # Nearest nodes should be returned (at least it will return itself).
357
            nearest_nodes = self.__tree.find_nearest_dist_nodes(point, distance);
358
            for (candidate_distance, kdtree_node) in nearest_nodes:
359
                if ( (candidate_distance < nearest_distance) and (kdtree_node is not None) and (kdtree_node.payload is not cluster) ):
360
                    nearest_distance = candidate_distance;
361
                    nearest_cluster = kdtree_node.payload;
362
                    
363
        return (nearest_cluster, nearest_distance);
364
365
366
    def __insert_represented_points(self, cluster):
367
        """!
368
        @brief Insert representation points to the k-d tree.
369
        
370
        @param[in] cluster (cure_cluster): Cluster whose representation points should be inserted.
371
        
372
        """
373
        
374
        for point in cluster.rep:
375
            self.__tree.insert(point, cluster);
376
377
378
    def __delete_represented_points(self, cluster): 
379
        """!
380
        @brief Remove representation points of clusters from the k-d tree
381
        
382
        @param[in] cluster (cure_cluster): Cluster whose representation points should be removed.
383
        
384
        """
385
        
386
        for point in cluster.rep:
387
            self.__tree.remove(point, payload=cluster);
388
389
390
    def __merge_clusters(self, cluster1, cluster2):
391
        """!
392
        @brief Merges two clusters and returns new merged cluster. Representation points and mean points are calculated for the new cluster.
393
        
394
        @param[in] cluster1 (cure_cluster): Cluster that should be merged.
395
        @param[in] cluster2 (cure_cluster): Cluster that should be merged.
396
        
397
        @return (cure_cluster) New merged CURE cluster.
398
        
399
        """
400
        
401
        merged_cluster = cure_cluster(None, None);
402
        
403
        merged_cluster.points = cluster1.points + cluster2.points;
404
        merged_cluster.indexes = cluster1.indexes + cluster2.indexes;
405
        
406
        # merged_cluster.mean = ( len(cluster1.points) * cluster1.mean + len(cluster2.points) * cluster2.mean ) / ( len(cluster1.points) + len(cluster2.points) );
407
        dimension = len(cluster1.mean);
408
        merged_cluster.mean = [0] * dimension;
409
        if merged_cluster.points[1:] == merged_cluster.points[:-1]:
410
            merged_cluster.mean = merged_cluster.points[0]
411
        else:
412
            for index in range(dimension):
413
                merged_cluster.mean[index] = ( len(cluster1.points) * cluster1.mean[index] + len(cluster2.points) * cluster2.mean[index] ) / ( len(cluster1.points) + len(cluster2.points) );
414
        
415
        temporary = list();
416
        
417
        for index in range(self.__number_represent_points):
418
            maximal_distance = 0;
419
            maximal_point = None;
420
            
421
            for point in merged_cluster.points:
422
                minimal_distance = 0;
423
                if (index == 0):
424
                    minimal_distance = euclidean_distance(point, merged_cluster.mean);
425
                    #minimal_distance = euclidean_distance_sqrt(point, merged_cluster.mean);
426
                else:
427
                    minimal_distance = min([euclidean_distance(point, p) for p in temporary]);
428
                    #minimal_distance = cluster_distance(cure_cluster(point), cure_cluster(temporary[0]));
429
                    
430
                if (minimal_distance >= maximal_distance):
431
                    maximal_distance = minimal_distance;
432
                    maximal_point = point;
433
        
434
            if (maximal_point not in temporary):
435
                temporary.append(maximal_point);
436
                
437
        for point in temporary:
438
            representative_point = [0] * dimension;
439
            for index in range(dimension):
440
                representative_point[index] = point[index] + self.__compression * (merged_cluster.mean[index] - point[index]);
441
                
442
            merged_cluster.rep.append(representative_point);
443
        
444
        return merged_cluster;
445
446
447
    def __create_queue(self):
448
        """!
449
        @brief Create queue of sorted clusters by distance between them, where first cluster has the nearest neighbor. At the first iteration each cluster contains only one point.
450
        
451
        @param[in] data (list): Input data that is presented as list of points (objects), each point should be represented by list or tuple.
452
        
453
        @return (list) Create queue of sorted clusters by distance between them.
454
        
455
        """
456
        
457
        self.__queue = [cure_cluster(self.__pointer_data[index_point], index_point) for index_point in range(len(self.__pointer_data))];
458
        
459
        # set closest clusters
460
        for i in range(0, len(self.__queue)):
461
            minimal_distance = float('inf');
462
            closest_index_cluster = -1;
463
            
464
            for k in range(0, len(self.__queue)):
465
                if (i != k):
466
                    dist = self.__cluster_distance(self.__queue[i], self.__queue[k]);
467
                    if (dist < minimal_distance):
468
                        minimal_distance = dist;
469
                        closest_index_cluster = k;
470
            
471
            self.__queue[i].closest = self.__queue[closest_index_cluster];
472
            self.__queue[i].distance = minimal_distance;
473
        
474
        # sort clusters
475
        self.__queue.sort(key = lambda x: x.distance, reverse = False);
476
    
477
478
    def __create_kdtree(self):
479
        """!
480
        @brief Create k-d tree in line with created clusters. At the first iteration contains all points from the input data set.
481
        
482
        @return (kdtree) k-d tree that consist of representative points of CURE clusters.
483
        
484
        """
485
        
486
        self.__tree = kdtree();
487
        for current_cluster in self.__queue:
488
            for representative_point in current_cluster.rep:
489
                self.__tree.insert(representative_point, current_cluster);
490
491
492
    def __cluster_distance(self, cluster1, cluster2):
493
        """!
494
        @brief Calculate minimal distance between clusters using representative points.
495
        
496
        @param[in] cluster1 (cure_cluster): The first cluster.
497
        @param[in] cluster2 (cure_cluster): The second cluster.
498
        
499
        @return (double) Euclidean distance between two clusters that is defined by minimum distance between representation points of two clusters.
500
        
501
        """
502
        
503
        distance = float('inf');
504
        for i in range(0, len(cluster1.rep)):
505
            for k in range(0, len(cluster2.rep)):
506
                #dist = euclidean_distance_sqrt(cluster1.rep[i], cluster2.rep[k]);   # Fast mode
507
                dist = euclidean_distance(cluster1.rep[i], cluster2.rep[k]);        # Slow mode
508
                if (dist < distance):
509
                    distance = dist;
510
                    
511
        return distance;
512