Passed
Push — master ( fd4969...54df52 )
by
unknown
01:50
created

utils.gen_entities()   A

Complexity

Conditions 1

Size

Total Lines 9
Code Lines 8

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 8
nop 2
dl 0
loc 9
rs 10
c 0
b 0
f 0
1
import os
2
import sys
3
import random
4
import pdb
5
import string
6
import struct
7
import logging
8
import time, datetime
9
import copy
10
import numpy as np
11
from sklearn import preprocessing
12
from milvus import Milvus, IndexType, MetricType, DataType
13
14
port = 19530
15
epsilon = 0.000001
16
default_flush_interval = 1
17
big_flush_interval = 1000
18
dimension = 128
19
segment_size = 10
20
21
# TODO:
22
all_index_types = [
23
    "FLAT",
24
    "IVF_FLAT",
25
    "IVF_SQ8",
26
    "IVF_SQ8_HYBRID",
27
    "IVF_PQ",
28
    "HNSW",
29
    # "NSG",
30
    "ANNOY",
31
    "BIN_FLAT",
32
    "BIN_IVF_FLAT"
33
]
34
35
36
default_index_params = [
37
    {"nlist": 1024},
38
    {"nlist": 1024},
39
    {"nlist": 1024},
40
    {"nlist": 1024},
41
    {"nlist": 1024, "m": 16},
42
    {"M": 48, "efConstruction": 500},
43
    # {"search_length": 50, "out_degree": 40, "candidate_pool_size": 100, "knng": 50},
44
    {"n_trees": 4},
45
    {"nlist": 1024},
46
    {"nlist": 1024}
47
]
48
49
50
def index_cpu_not_support():
51
    return ["IVF_SQ8_HYBRID"]
52
53
54
def binary_support():
55
    return ["BIN_FLAT", "BIN_IVF_FLAT"]
56
57
58
def delete_support():
59
    return ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID", "IVF_PQ"]
60
61
62
def ivf():
63
    return ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID", "IVF_PQ"]
64
65
66
def l2(x, y):
67
    return np.linalg.norm(np.array(x) - np.array(y))
68
69
70
def ip(x, y):
71
    return np.inner(np.array(x), np.array(y))
72
73
74
def jaccard(x, y):
75
    x = np.asarray(x, np.bool)
76
    y = np.asarray(y, np.bool)
77
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.double(np.bitwise_or(x, y).sum())
78
79
80
def hamming(x, y):
81
    x = np.asarray(x, np.bool)
82
    y = np.asarray(y, np.bool)
83
    return np.bitwise_xor(x, y).sum()
84
85
86
def tanimoto(x, y):
87
    x = np.asarray(x, np.bool)
88
    y = np.asarray(y, np.bool)
89
    return -np.log2(np.double(np.bitwise_and(x, y).sum()) / np.double(np.bitwise_or(x, y).sum()))
90
91
92
def substructure(x, y):
93
    x = np.asarray(x, np.bool)
94
    y = np.asarray(y, np.bool)
95
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.count_nonzero(y)
96
97
98
def superstructure(x, y):
99
    x = np.asarray(x, np.bool)
100
    y = np.asarray(y, np.bool)
101
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.count_nonzero(x)
102
103
104
def get_milvus(host, port, uri=None, handler=None, **kwargs):
105
    if handler is None:
106
        handler = "GRPC"
107
    try_connect = kwargs.get("try_connect", True)
108
    if uri is not None:
109
        milvus = Milvus(uri=uri, handler=handler, try_connect=try_connect)
110
    else:
111
        milvus = Milvus(host=host, port=port, handler=handler, try_connect=try_connect)
112
    return milvus
113
114
115
def disable_flush(connect):
116
    connect.set_config("storage", "auto_flush_interval", big_flush_interval)
117
118
119
def enable_flush(connect):
120
    # reset auto_flush_interval=1
121
    connect.set_config("storage", "auto_flush_interval", default_flush_interval)
122
    config_value = connect.get_config("storage", "auto_flush_interval")
123
    assert config_value == str(default_flush_interval)
124
125
126
def gen_inaccuracy(num):
127
    return num / 255.0
128
129
130
def gen_vectors(num, dim, is_normal=False):
131
    vectors = [[random.random() for _ in range(dim)] for _ in range(num)]
132
    vectors = preprocessing.normalize(vectors, axis=1, norm='l2')
133
    return vectors.tolist()
134
135
136
# def gen_vectors(num, dim, seed=np.random.RandomState(1234), is_normal=False):
137
#     xb = seed.rand(num, dim).astype("float32")
138
#     xb = preprocessing.normalize(xb, axis=1, norm='l2')
139
#     return xb.tolist()
140
141
142
def gen_binary_vectors(num, dim):
143
    raw_vectors = []
144
    binary_vectors = []
145
    for i in range(num):
146
        raw_vector = [random.randint(0, 1) for i in range(dim)]
147
        raw_vectors.append(raw_vector)
148
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
149
    return raw_vectors, binary_vectors
150
151
152
def gen_binary_sub_vectors(vectors, length):
153
    raw_vectors = []
154
    binary_vectors = []
155
    dim = len(vectors[0])
156
    for i in range(length):
157
        raw_vector = [0 for i in range(dim)]
158
        vector = vectors[i]
159
        for index, j in enumerate(vector):
160
            if j == 1:
161
                raw_vector[index] = 1
162
        raw_vectors.append(raw_vector)
163
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
164
    return raw_vectors, binary_vectors
165
166
167
def gen_binary_super_vectors(vectors, length):
168
    raw_vectors = []
169
    binary_vectors = []
170
    dim = len(vectors[0])
171
    for i in range(length):
172
        cnt_1 = np.count_nonzero(vectors[i])
173
        raw_vector = [1 for i in range(dim)] 
174
        raw_vectors.append(raw_vector)
175
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
176
    return raw_vectors, binary_vectors
177
178
179
def gen_int_attr(row_num):
180
    return [random.randint(0, 255) for _ in range(row_num)]
181
182
183
def gen_float_attr(row_num):
184
    return [random.uniform(0, 255) for _ in range(row_num)]
185
186
187
def gen_unique_str(str_value=None):
188
    prefix = "".join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
189
    return "test_" + prefix if str_value is None else str_value + "_" + prefix
190
191
192
def gen_single_filter_fields():
193
    fields = []
194
    for data_type in DataType:
195
        if data_type in [DataType.INT8, DataType.INT16, DataType.INT32, DataType.INT64, DataType.FLOAT, DataType.DOUBLE]:
196
            fields.append({"field": data_type.name, "type": data_type})
197
    return fields
198
199
200
def gen_single_vector_fields():
201
    fields = []
202
    for metric_type in ['HAMMING', 'IP', 'JACCARD', 'L2', 'SUBSTRUCTURE', 'SUPERSTRUCTURE', 'TANIMOTO']:
203
        for data_type in [DataType.FLOAT_VECTOR, DataType.BINARY_VECTOR]:
204
            if metric_type in ["L2", "IP"] and data_type == DataType.BINARY_VECTOR:
205
                continue
206
            if metric_type not in ["L2", "IP"] and data_type == DataType.FLOAT_VECTOR:
207
                continue
208
            field = {"field": data_type.name, "type": data_type, "params": {"metric_type": metric_type, "dimension": dimension}}
209
            fields.append(field)
210
    return fields
211
212
213
def gen_default_fields():
214
    default_fields = {
215
        "fields": [
216
            {"field": "int8", "type": DataType.INT8},
217
            {"field": "int64", "type": DataType.INT64},
218
            {"field": "float", "type": DataType.FLOAT},
219
            {"field": "vector", "type": DataType.FLOAT_VECTOR, "params": {"metric_type": "L2", "dimension": dimension}}
220
        ],
221
        "segment_size": segment_size
222
    }
223
    return default_fields
224
225
226
def gen_entities(nb, is_normal=False):
227
    vectors = gen_vectors(nb, dimension, is_normal)
228
    entities = [
229
        {"field": "int8", "type": DataType.INT8, "values": [1 for i in range(nb)]},
230
        {"field": "int64", "type": DataType.INT64, "values": [2 for i in range(nb)]},
231
        {"field": "float", "type": DataType.FLOAT, "values": [3.0 for i in range(nb)]},
232
        {"field": "vector", "type": DataType.FLOAT_VECTOR, "values": vectors}
233
    ]
234
    return entities
235
236
237
def gen_binary_entities(nb):
238
    raw_vectors, vectors = gen_binary_vectors(nb, dimension)
239
    entities = [
240
        {"field": "int8", "type": DataType.INT8, "values": [1 for i in range(nb)]},
241
        {"field": "int64", "type": DataType.INT64, "values": [2 for i in range(nb)]},
242
        {"field": "float", "type": DataType.FLOAT, "values": [3.0 for i in range(nb)]},
243
        {"field": "binary_vector", "type": DataType.BINARY_VECTOR, "values": vectors}
244
    ]
245
    return raw_vectors, entities
246
247
248
def gen_entities_by_fields(fields, nb, dimension):
249
    entities = []
250
    for field in fields:
251
        if field["type"] in [DataType.INT8, DataType.INT16, DataType.INT32, DataType.INT64]:
252
            field_value = [1 for i in range(nb)]
253
        elif field["type"] in [DataType.FLOAT, DataType.DOUBLE]:
254
            field_value = [3.0 for i in range(nb)]
255
        elif field["type"] == DataType.BINARY_VECTOR:
256
            field_value = gen_binary_vectors(nb, dimension)[1]
257
        elif field["type"] == DataType.FLOAT_VECTOR:
258
            field_value = gen_vectors(nb, dimension)
259
        field.update({"values": field_value})
0 ignored issues
show
introduced by
The variable field_value does not seem to be defined for all execution paths.
Loading history...
260
        entities.append(field)
261
    return entities
262
263
264
def assert_equal_entity(a, b):
265
    pass
266
267
268
def gen_query_vectors_inside_entities(field_name, entities, top_k, nq, search_params={"nprobe": 10}):
269
    query_vectors = entities[-1]["values"][:nq]
270
    query = {
271
        "bool": {
272
            "must": [
273
                {"vector": {field_name: {"topk": top_k, "query": query_vectors, "params": search_params}}}
274
            ]
275
        }
276
    }
277
    return query, query_vectors
278
279
280
def gen_query_vectors_rand_entities(field_name, entities, top_k, nq, search_params={"nprobe": 10}):
281
    dimension = len(entities[-1]["values"][0])
282
    query_vectors = gen_vectors(nq, dimension)
283
    query = {
284
        "bool": {
285
            "must": [
286
                {"vector": {field_name: {"topk": top_k, "query": query_vectors, "params": search_params}}}
287
            ]
288
        }
289
    }
290
    return query, query_vectors
291
292
293
294
def add_field(entities):
295
    nb = len(entities[0]["values"])
296
    field = {
297
        "field": gen_unique_str(), 
298
        "type": DataType.INT8, 
299
        "values": [1 for i in range(nb)]
300
    }
301
    entities.append(field)
302
    return entities
303
304
305
def add_vector_field(entities, is_normal=False):
306
    nb = len(entities[0]["values"])
307
    vectors = gen_vectors(nb, dimension, is_normal)
308
    field = {
309
        "field": gen_unique_str(), 
310
        "type": DataType.FLOAT_VECTOR,
311
        "values": vectors
312
    }
313
    entities.append(field)
314
    return entities
315
316
317
def update_fields_metric_type(fields, metric_type):
318
    tmp_fields = copy.deepcopy(fields)
319
    if metric_type in ["L2", "IP"]:
320
        tmp_fields["fields"][-1]["type"] = DataType.FLOAT_VECTOR
321
    else:
322
        tmp_fields["fields"][-1]["type"] = DataType.BINARY_VECTOR
323
    tmp_fields["fields"][-1]["params"]["metric_type"] = metric_type
324
    return tmp_fields
325
326
327
def remove_field(entities):
328
    del entities[0]
329
    return entities
330
331
332
def remove_vector_field(entities):
333
    del entities[-1]
334
    return entities
335
336
337
def update_field_name(entities, old_name, new_name):
338
    for item in entities:
339
        if item["field"] == old_name:
340
            item["field"] = new_name
341
    return entities
342
343
344
def update_field_type(entities, old_name, new_name):
345
    for item in entities:
346
        if item["field"] == old_name:
347
            item["type"] = new_name
348
    return entities
349
350
351
def update_field_value(entities, old_type, new_value):
352
    for item in entities:
353
        if item["type"] == old_type:
354
            for i in item["values"]:
355
                item["values"][i] = new_value
356
    return entities
357
358
359
def add_vector_field(nb, dimension=dimension):
360
    field_name = gen_unique_str()
361
    field = {
362
        "field": field_name,
363
        "type": DataType.FLOAT_VECTOR,
364
        "values": gen_vectors(nb, dimension)
365
    }
366
    return field_name
367
        
368
369
def gen_segment_sizes():
370
    sizes = [
371
            1,
372
            2,
373
            1024,
374
            4096
375
    ]
376
    return sizes
377
378
379
def gen_invalid_ips():
380
    ips = [
381
            # "255.0.0.0",
382
            # "255.255.0.0",
383
            # "255.255.255.0",
384
            # "255.255.255.255",
385
            "127.0.0",
386
            # "123.0.0.2",
387
            "12-s",
388
            " ",
389
            "12 s",
390
            "BB。A",
391
            " siede ",
392
            "(mn)",
393
            "中文",
394
            "a".join("a" for _ in range(256))
395
    ]
396
    return ips
397
398
399
def gen_invalid_uris():
400
    ip = None
401
    uris = [
402
            " ",
403
            "中文",
404
            # invalid protocol
405
            # "tc://%s:%s" % (ip, port),
406
            # "tcp%s:%s" % (ip, port),
407
408
            # # invalid port
409
            # "tcp://%s:100000" % ip,
410
            # "tcp://%s: " % ip,
411
            # "tcp://%s:19540" % ip,
412
            # "tcp://%s:-1" % ip,
413
            # "tcp://%s:string" % ip,
414
415
            # invalid ip
416
            "tcp:// :19530",
417
            # "tcp://123.0.0.1:%s" % port,
418
            "tcp://127.0.0:19530",
419
            # "tcp://255.0.0.0:%s" % port,
420
            # "tcp://255.255.0.0:%s" % port,
421
            # "tcp://255.255.255.0:%s" % port,
422
            # "tcp://255.255.255.255:%s" % port,
423
            "tcp://\n:19530",
424
    ]
425
    return uris
426
427
428
def gen_invalid_strs():
429
    strings = [
430
            1,
431
            [1],
432
            None,
433
            "12-s",
434
            " ",
435
            # "",
436
            # None,
437
            "12 s",
438
            "BB。A",
439
            "c|c",
440
            " siede ",
441
            "(mn)",
442
            "pip+",
443
            "=c",
444
            "中文",
445
            "a".join("a" for i in range(256))
446
    ]
447
    return strings
448
449
450
def gen_invalid_field_types():
451
    field_types = [
452
            # 1,
453
            "=c",
454
            # 0,
455
            None,
456
            "",
457
            "a".join("a" for i in range(256))
458
    ]
459
    return field_types
460
461
462
def gen_invalid_metric_types():
463
    metric_types = [
464
            1,
465
            "=c",
466
            0,
467
            None,
468
            "",
469
            "a".join("a" for i in range(256))
470
    ]
471
    return metric_types
472
473
474
# TODO:
475
def gen_invalid_ints():
476
    top_ks = [
477
            # 1.0,
478
            None,
479
            "stringg",
480
            [1,2,3],
481
            (1,2),
482
            {"a": 1},
483
            " ",
484
            "",
485
            "String",
486
            "12-s",
487
            "BB。A",
488
            " siede ",
489
            "(mn)",
490
            "pip+",
491
            "=c",
492
            "中文",
493
            "a".join("a" for i in range(256))
494
    ]
495
    return top_ks
496
497
498
def gen_invalid_params():
499
    params = [
500
            9999999999,
501
            -1,
502
            # None,
503
            [1,2,3],
504
            (1,2),
505
            {"a": 1},
506
            " ",
507
            "",
508
            "String",
509
            "12-s",
510
            "BB。A",
511
            " siede ",
512
            "(mn)",
513
            "pip+",
514
            "=c",
515
            "中文"
516
    ]
517
    return params
518
519
520
def gen_invalid_vectors():
521
    invalid_vectors = [
522
            "1*2",
523
            [],
524
            [1],
525
            [1,2],
526
            [" "],
527
            ['a'],
528
            [None],
529
            None,
530
            (1,2),
531
            {"a": 1},
532
            " ",
533
            "",
534
            "String",
535
            "12-s",
536
            "BB。A",
537
            " siede ",
538
            "(mn)",
539
            "pip+",
540
            "=c",
541
            "中文",
542
            "a".join("a" for i in range(256))
543
    ]
544
    return invalid_vectors
545
546
547
def gen_invaild_search_params():
548
    invalid_search_key = 100
549
    search_params = []
550
    for index_type in all_index_types:
551
        if index_type == "FLAT":
552
            continue
553
        search_params.append({"index_type": index_type, "search_params": {"invalid_key": invalid_search_key}})
554
        if index_type in delete_support():
555
            for nprobe in gen_invalid_params():
556
                ivf_search_params = {"index_type": index_type, "search_params": {"nprobe": nprobe}}
557
                search_params.append(ivf_search_params)
558
        elif index_type == "HNSW":
559
            for ef in gen_invalid_params():
560
                hnsw_search_param = {"index_type": index_type, "search_params": {"ef": ef}}
561
                search_params.append(hnsw_search_param)
562
        elif index_type == "NSG":
563
            for search_length in gen_invalid_params():
564
                nsg_search_param = {"index_type": index_type, "search_params": {"search_length": search_length}}
565
                search_params.append(nsg_search_param)
566
            search_params.append({"index_type": index_type, "search_params": {"invalid_key": 100}})
567
        elif index_type == "ANNOY":
568
            for search_k in gen_invalid_params():
569
                if isinstance(search_k, int):
570
                    continue
571
                annoy_search_param = {"index_type": index_type, "search_params": {"search_k": search_k}}
572
                search_params.append(annoy_search_param)
573
    return search_params
574
575
576
def gen_invalid_index():
577
    index_params = []
578
    for index_type in gen_invalid_strs():
579
        index_param = {"index_type": index_type, "params": {"nlist": 1024}}
580
        index_params.append(index_param)
581
    for nlist in gen_invalid_params():
582
        index_param = {"index_type": "IVF_FLAT", "params": {"nlist": nlist}}
583
        index_params.append(index_param)
584
    for M in gen_invalid_params():
585
        index_param = {"index_type": "HNSW", "params": {"M": M, "efConstruction": 100}}
586
        index_params.append(index_param)
587
    for efConstruction in gen_invalid_params():
588
        index_param = {"index_type": "HNSW", "params": {"M": 16, "efConstruction": efConstruction}}
589
        index_params.append(index_param)
590
    for search_length in gen_invalid_params():
591
        index_param = {"index_type": "NSG",
592
                       "params": {"search_length": search_length, "out_degree": 40, "candidate_pool_size": 50,
593
                                       "knng": 100}}
594
        index_params.append(index_param)
595
    for out_degree in gen_invalid_params():
596
        index_param = {"index_type": "NSG",
597
                       "params": {"search_length": 100, "out_degree": out_degree, "candidate_pool_size": 50,
598
                                       "knng": 100}}
599
        index_params.append(index_param)
600
    for candidate_pool_size in gen_invalid_params():
601
        index_param = {"index_type": "NSG", "params": {"search_length": 100, "out_degree": 40,
602
                                                                     "candidate_pool_size": candidate_pool_size,
603
                                                                     "knng": 100}}
604
        index_params.append(index_param)
605
    index_params.append({"index_type": "IVF_FLAT", "params": {"invalid_key": 1024}})
606
    index_params.append({"index_type": "HNSW", "params": {"invalid_key": 16, "efConstruction": 100}})
607
    index_params.append({"index_type": "NSG",
608
                         "params": {"invalid_key": 100, "out_degree": 40, "candidate_pool_size": 300,
609
                                         "knng": 100}})
610
    for invalid_n_trees in gen_invalid_params():
611
        index_params.append({"index_type": "ANNOY", "params": {"n_trees": invalid_n_trees}})
612
613
    return index_params
614
615
616
def gen_index():
617
    nlists = [1, 1024, 16384]
618
    pq_ms = [128, 64, 32, 16, 8, 4]
619
    Ms = [5, 24, 48]
620
    efConstructions = [100, 300, 500]
621
    search_lengths = [10, 100, 300]
622
    out_degrees = [5, 40, 300]
623
    candidate_pool_sizes = [50, 100, 300]
624
    knngs = [5, 100, 300]
625
626
    index_params = []
627
    for index_type in all_index_types:
628
        if index_type in ["FLAT", "BIN_FLAT", "BIN_IVF_FLAT"]:
629
            index_params.append({"index_type": index_type, "index_param": {"nlist": 1024}})
630
        elif index_type in ["IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID"]:
631
            ivf_params = [{"index_type": index_type, "index_param": {"nlist": nlist}} \
632
                          for nlist in nlists]
633
            index_params.extend(ivf_params)
634
        elif index_type == "IVF_PQ":
635
            IVFPQ_params = [{"index_type": index_type, "index_param": {"nlist": nlist, "m": m}} \
636
                        for nlist in nlists \
637
                        for m in pq_ms]
638
            index_params.extend(IVFPQ_params)
639
        elif index_type == "HNSW":
640
            hnsw_params = [{"index_type": index_type, "index_param": {"M": M, "efConstruction": efConstruction}} \
641
                           for M in Ms \
642
                           for efConstruction in efConstructions]
643
            index_params.extend(hnsw_params)
644
        elif index_type == "NSG":
645
            nsg_params = [{"index_type": index_type,
646
                           "index_param": {"search_length": search_length, "out_degree": out_degree,
647
                                           "candidate_pool_size": candidate_pool_size, "knng": knng}} \
648
                          for search_length in search_lengths \
649
                          for out_degree in out_degrees \
650
                          for candidate_pool_size in candidate_pool_sizes \
651
                          for knng in knngs]
652
            index_params.extend(nsg_params)
653
654
    return index_params
655
656
657
def gen_simple_index():
658
    index_params = []
659
    for i in range(len(all_index_types)):
660
        if all_index_types[i] in binary_support():
661
            continue
662
        dic = {"index_type": all_index_types[i]}
663
        dic.update(default_index_params[i])
664
        index_params.append(dic)
665
    return index_params
666
667
668
def gen_binary_index():
669
    index_params = []
670
    for i in range(len(all_index_types)):
671
        if all_index_types[i] in binary_support():
672
            dic = {"index_type": all_index_types[i]}
673
            dic.update(default_index_params[i])
674
            index_params.append(dic)
675
    return index_params
676
677
678
def get_search_param(index_type):
679
    if index_type in ivf() or index_type in binary_support():
680
        return {"nprobe": 32}
681
    elif index_type == "HNSW":
682
        return {"ef": 64}
683
    elif index_type == "NSG":
684
        return {"search_length": 100}
685
    elif index_type == "ANNOY":
686
        return {"search_k": 100}
687
    else:
688
        logging.getLogger().info("Invalid index_type.")
689
690
691
def assert_equal_vector(v1, v2):
692
    if len(v1) != len(v2):
693
        assert False
694
    for i in range(len(v1)):
695
        assert abs(v1[i] - v2[i]) < epsilon
696
697
698
def restart_server(helm_release_name):
699
    res = True
700
    timeout = 120
701
    from kubernetes import client, config
702
    client.rest.logger.setLevel(logging.WARNING)
703
704
    namespace = "milvus"
705
    # service_name = "%s.%s.svc.cluster.local" % (helm_release_name, namespace)
706
    config.load_kube_config()
707
    v1 = client.CoreV1Api()
708
    pod_name = None
709
    # config_map_names = v1.list_namespaced_config_map(namespace, pretty='true')
710
    # body = {"replicas": 0}
711
    pods = v1.list_namespaced_pod(namespace)
712
    for i in pods.items:
713
        if i.metadata.name.find(helm_release_name) != -1 and i.metadata.name.find("mysql") == -1:
714
            pod_name = i.metadata.name
715
            break
716
            # v1.patch_namespaced_config_map(config_map_name, namespace, body, pretty='true')
717
    # status_res = v1.read_namespaced_service_status(helm_release_name, namespace, pretty='true')
718
    # print(status_res)
719
    if pod_name is not None:
720
        try:
721
            v1.delete_namespaced_pod(pod_name, namespace)
722
        except Exception as e:
723
            logging.error(str(e))
724
            logging.error("Exception when calling CoreV1Api->delete_namespaced_pod")
725
            res = False
726
            return res
727
        time.sleep(5)
728
        # check if restart successfully
729
        pods = v1.list_namespaced_pod(namespace)
730
        for i in pods.items:
731
            pod_name_tmp = i.metadata.name
732
            if pod_name_tmp.find(helm_release_name) != -1:
733
                logging.debug(pod_name_tmp)
734
                start_time = time.time()
735
                while time.time() - start_time > timeout:
736
                    status_res = v1.read_namespaced_pod_status(pod_name_tmp, namespace, pretty='true')
737
                    if status_res.status.phase == "Running":
738
                        break
739
                    time.sleep(1)
740
                if time.time() - start_time > timeout:
741
                    logging.error("Restart pod: %s timeout" % pod_name_tmp)
742
                    res = False
743
                    return res
744
    else:
745
        logging.error("Pod: %s not found" % helm_release_name)
746
        res = False
747
    return res
748