Passed
Push — master ( ba48ef...3dfbe6 )
by
unknown
08:51 queued 06:56
created

utils.gen_unique_str()   A

Complexity

Conditions 2

Size

Total Lines 3
Code Lines 3

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 2
eloc 3
nop 1
dl 0
loc 3
rs 10
c 0
b 0
f 0
1
import os
2
import sys
3
import random
4
import pdb
5
import string
6
import struct
7
import logging
8
import time, datetime
9
import copy
10
import numpy as np
11
from sklearn import preprocessing
12
from milvus import Milvus, DataType
13
14
port = 19530
15
epsilon = 0.000001
16
default_flush_interval = 1
17
big_flush_interval = 1000
18
dimension = 128
19
segment_row_count = 5000
20
default_float_vec_field_name = "float_vector"
21
default_binary_vec_field_name = "binary_vector"
22
23
# TODO:
24
all_index_types = [
25
    "FLAT",
26
    "IVF_FLAT",
27
    "IVF_SQ8",
28
    "IVF_SQ8_HYBRID",
29
    "IVF_PQ",
30
    "HNSW",
31
    # "NSG",
32
    "ANNOY",
33
    "BIN_FLAT",
34
    "BIN_IVF_FLAT"
35
]
36
37
38
default_index_params = [
39
    {"nlist": 1024},
40
    {"nlist": 1024},
41
    {"nlist": 1024},
42
    {"nlist": 1024},
43
    {"nlist": 1024, "m": 16},
44
    {"M": 48, "efConstruction": 500},
45
    # {"search_length": 50, "out_degree": 40, "candidate_pool_size": 100, "knng": 50},
46
    {"n_trees": 4},
47
    {"nlist": 1024},
48
    {"nlist": 1024}
49
]
50
51
52
def index_cpu_not_support():
53
    return ["IVF_SQ8_HYBRID"]
54
55
56
def binary_support():
57
    return ["BIN_FLAT", "BIN_IVF_FLAT"]
58
59
60
def delete_support():
61
    return ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID", "IVF_PQ"]
62
63
64
def ivf():
65
    return ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID", "IVF_PQ"]
66
67
68
def l2(x, y):
69
    return np.linalg.norm(np.array(x) - np.array(y))
70
71
72
def ip(x, y):
73
    return np.inner(np.array(x), np.array(y))
74
75
76
def jaccard(x, y):
77
    x = np.asarray(x, np.bool)
78
    y = np.asarray(y, np.bool)
79
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.double(np.bitwise_or(x, y).sum())
80
81
82
def hamming(x, y):
83
    x = np.asarray(x, np.bool)
84
    y = np.asarray(y, np.bool)
85
    return np.bitwise_xor(x, y).sum()
86
87
88
def tanimoto(x, y):
89
    x = np.asarray(x, np.bool)
90
    y = np.asarray(y, np.bool)
91
    return -np.log2(np.double(np.bitwise_and(x, y).sum()) / np.double(np.bitwise_or(x, y).sum()))
92
93
94
def substructure(x, y):
95
    x = np.asarray(x, np.bool)
96
    y = np.asarray(y, np.bool)
97
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.count_nonzero(y)
98
99
100
def superstructure(x, y):
101
    x = np.asarray(x, np.bool)
102
    y = np.asarray(y, np.bool)
103
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.count_nonzero(x)
104
105
106
def get_milvus(host, port, uri=None, handler=None, **kwargs):
107
    if handler is None:
108
        handler = "GRPC"
109
    try_connect = kwargs.get("try_connect", True)
110
    if uri is not None:
111
        milvus = Milvus(uri=uri, handler=handler, try_connect=try_connect)
112
    else:
113
        milvus = Milvus(host=host, port=port, handler=handler, try_connect=try_connect)
114
    return milvus
115
116
117
def disable_flush(connect):
118
    connect.set_config("storage", "auto_flush_interval", big_flush_interval)
119
120
121
def enable_flush(connect):
122
    # reset auto_flush_interval=1
123
    connect.set_config("storage", "auto_flush_interval", default_flush_interval)
124
    config_value = connect.get_config("storage", "auto_flush_interval")
125
    assert config_value == str(default_flush_interval)
126
127
128
def gen_inaccuracy(num):
129
    return num / 255.0
130
131
132
def gen_vectors(num, dim, is_normal=True):
133
    vectors = [[random.random() for _ in range(dim)] for _ in range(num)]
134
    vectors = preprocessing.normalize(vectors, axis=1, norm='l2')
135
    return vectors.tolist()
136
137
138
# def gen_vectors(num, dim, seed=np.random.RandomState(1234), is_normal=False):
139
#     xb = seed.rand(num, dim).astype("float32")
140
#     xb = preprocessing.normalize(xb, axis=1, norm='l2')
141
#     return xb.tolist()
142
143
144
def gen_binary_vectors(num, dim):
145
    raw_vectors = []
146
    binary_vectors = []
147
    for i in range(num):
148
        raw_vector = [random.randint(0, 1) for i in range(dim)]
149
        raw_vectors.append(raw_vector)
150
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
151
    return raw_vectors, binary_vectors
152
153
154
def gen_binary_sub_vectors(vectors, length):
155
    raw_vectors = []
156
    binary_vectors = []
157
    dim = len(vectors[0])
158
    for i in range(length):
159
        raw_vector = [0 for i in range(dim)]
160
        vector = vectors[i]
161
        for index, j in enumerate(vector):
162
            if j == 1:
163
                raw_vector[index] = 1
164
        raw_vectors.append(raw_vector)
165
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
166
    return raw_vectors, binary_vectors
167
168
169
def gen_binary_super_vectors(vectors, length):
170
    raw_vectors = []
171
    binary_vectors = []
172
    dim = len(vectors[0])
173
    for i in range(length):
174
        cnt_1 = np.count_nonzero(vectors[i])
175
        raw_vector = [1 for i in range(dim)] 
176
        raw_vectors.append(raw_vector)
177
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
178
    return raw_vectors, binary_vectors
179
180
181
def gen_int_attr(row_num):
182
    return [random.randint(0, 255) for _ in range(row_num)]
183
184
185
def gen_float_attr(row_num):
186
    return [random.uniform(0, 255) for _ in range(row_num)]
187
188
189
def gen_unique_str(str_value=None):
190
    prefix = "".join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
191
    return "test_" + prefix if str_value is None else str_value + "_" + prefix
192
193
194
def gen_single_filter_fields():
195
    fields = []
196
    for data_type in DataType:
197
        if data_type in [DataType.INT32, DataType.INT64, DataType.FLOAT, DataType.DOUBLE]:
198
            fields.append({"field": data_type.name, "type": data_type})
199
    return fields
200
201
202
def gen_single_vector_fields():
203
    fields = []
204
    for data_type in [DataType.FLOAT_VECTOR, DataType.BINARY_VECTOR]:
205
        field = {"field": data_type.name, "type": data_type, "params": {"dim": dimension}}
206
        fields.append(field)
207
    return fields
208
209
210
def gen_default_fields():
211
    default_fields = {
212
        "fields": [
213
            {"field": "int64", "type": DataType.INT64},
214
            {"field": "float", "type": DataType.FLOAT},
215
            {"field": default_float_vec_field_name, "type": DataType.FLOAT_VECTOR, "params": {"dim": dimension}}
216
        ],
217
        "segment_row_count": segment_row_count
218
    }
219
    return default_fields
220
221
222
def gen_entities(nb, is_normal=False):
223
    vectors = gen_vectors(nb, dimension, is_normal)
224
    entities = [
225
        {"field": "int64", "type": DataType.INT64, "values": [i for i in range(nb)]},
226
        {"field": "float", "type": DataType.FLOAT, "values": [float(i) for i in range(nb)]},
227
        {"field": default_float_vec_field_name, "type": DataType.FLOAT_VECTOR, "values": vectors}
228
    ]
229
    return entities
230
231
232
def gen_binary_entities(nb):
233
    raw_vectors, vectors = gen_binary_vectors(nb, dimension)
234
    entities = [
235
        {"field": "int64", "type": DataType.INT64, "values": [i for i in range(nb)]},
236
        {"field": "float", "type": DataType.FLOAT, "values": [float(i) for i in range(nb)]},
237
        {"field": default_binary_vec_field_name, "type": DataType.BINARY_VECTOR, "values": vectors}
238
    ]
239
    return raw_vectors, entities
240
241
242
def gen_entities_by_fields(fields, nb, dimension):
243
    entities = []
244
    for field in fields:
245
        if field["type"] in [DataType.INT32, DataType.INT64]:
246
            field_value = [1 for i in range(nb)]
247
        elif field["type"] in [DataType.FLOAT, DataType.DOUBLE]:
248
            field_value = [3.0 for i in range(nb)]
249
        elif field["type"] == DataType.BINARY_VECTOR:
250
            field_value = gen_binary_vectors(nb, dimension)[1]
251
        elif field["type"] == DataType.FLOAT_VECTOR:
252
            field_value = gen_vectors(nb, dimension)
253
        field.update({"values": field_value})
0 ignored issues
show
introduced by
The variable field_value does not seem to be defined for all execution paths.
Loading history...
254
        entities.append(field)
255
    return entities
256
257
258
def assert_equal_entity(a, b):
259
    pass
260
261
262
def gen_query_vectors(field_name, entities, top_k, nq, search_params={"nprobe": 10}, rand_vector=False, metric_type=None):
263
    if rand_vector is True:
264
        dimension = len(entities[-1]["values"][0])
265
        query_vectors = gen_vectors(nq, dimension)
266
    else:
267
        query_vectors = entities[-1]["values"][:nq]
268
    must_param = {"vector": {field_name: {"topk": top_k, "query": query_vectors, "params": search_params}}}
269
    if metric_type is not None:
270
        must_param["vector"][field_name]["metric_type"] = metric_type
271
    query = {
272
        "bool": {
273
            "must": [must_param]
274
        }
275
    }
276
    return query, query_vectors
277
278
279
def update_query_expr(src_query, keep_old=True, expr=None):
280
    tmp_query = copy.deepcopy(src_query)
281
    if expr is not None:
282
        tmp_query["bool"].update(expr)
283
    if keep_old is not True:
284
        tmp_query["bool"].pop("must")
285
    return tmp_query
286
287
288
def gen_default_vector_expr(default_query):
289
    return default_query["bool"]["must"][0]
290
291
292
def gen_default_term_expr(values=None):
293
    if values is None:
294
        values = [i for i in range(nb/2)]
0 ignored issues
show
Comprehensibility Best Practice introduced by
The variable nb does not seem to be defined.
Loading history...
295
    expr = {"term": {"int64": {"values": values}}}
296
    return expr
297
298
299
def gen_default_range_expr(ranges=None):
300
    if ranges is None:
301
        ranges = {"GT": 1, "LT": nb/2}
0 ignored issues
show
Comprehensibility Best Practice introduced by
The variable nb does not seem to be defined.
Loading history...
302
    expr = {"range": {"int64": {"ranges": ranges}}}
303
    return expr
304
305
306
307
def add_field(entities):
308
    nb = len(entities[0]["values"])
309
    field = {
310
        "field": gen_unique_str(), 
311
        "type": DataType.INT64, 
312
        "values": [1 for i in range(nb)]
313
    }
314
    entities.append(field)
315
    return entities
316
317
318
def add_vector_field(entities, is_normal=False):
319
    nb = len(entities[0]["values"])
320
    vectors = gen_vectors(nb, dimension, is_normal)
321
    field = {
322
        "field": gen_unique_str(), 
323
        "type": DataType.FLOAT_VECTOR,
324
        "values": vectors
325
    }
326
    entities.append(field)
327
    return entities
328
329
330
# def update_fields_metric_type(fields, metric_type):
331
#     tmp_fields = copy.deepcopy(fields)
332
#     if metric_type in ["L2", "IP"]:
333
#         tmp_fields["fields"][-1]["type"] = DataType.FLOAT_VECTOR
334
#     else:
335
#         tmp_fields["fields"][-1]["type"] = DataType.BINARY_VECTOR
336
#     tmp_fields["fields"][-1]["params"]["metric_type"] = metric_type
337
#     return tmp_fields
338
339
340
def remove_field(entities):
341
    del entities[0]
342
    return entities
343
344
345
def remove_vector_field(entities):
346
    del entities[-1]
347
    return entities
348
349
350
def update_field_name(entities, old_name, new_name):
351
    for item in entities:
352
        if item["field"] == old_name:
353
            item["field"] = new_name
354
    return entities
355
356
357
def update_field_type(entities, old_name, new_name):
358
    for item in entities:
359
        if item["field"] == old_name:
360
            item["type"] = new_name
361
    return entities
362
363
364
def update_field_value(entities, old_type, new_value):
365
    for item in entities:
366
        if item["type"] == old_type:
367
            for i in item["values"]:
368
                item["values"][i] = new_value
369
    return entities
370
371
372
def add_vector_field(nb, dimension=dimension):
373
    field_name = gen_unique_str()
374
    field = {
375
        "field": field_name,
376
        "type": DataType.FLOAT_VECTOR,
377
        "values": gen_vectors(nb, dimension)
378
    }
379
    return field_name
380
        
381
382
def gen_segment_row_counts():
383
    sizes = [
384
            1,
385
            2,
386
            1024,
387
            4096
388
    ]
389
    return sizes
390
391
392
def gen_invalid_ips():
393
    ips = [
394
            # "255.0.0.0",
395
            # "255.255.0.0",
396
            # "255.255.255.0",
397
            # "255.255.255.255",
398
            "127.0.0",
399
            # "123.0.0.2",
400
            "12-s",
401
            " ",
402
            "12 s",
403
            "BB。A",
404
            " siede ",
405
            "(mn)",
406
            "中文",
407
            "a".join("a" for _ in range(256))
408
    ]
409
    return ips
410
411
412
def gen_invalid_uris():
413
    ip = None
414
    uris = [
415
            " ",
416
            "中文",
417
            # invalid protocol
418
            # "tc://%s:%s" % (ip, port),
419
            # "tcp%s:%s" % (ip, port),
420
421
            # # invalid port
422
            # "tcp://%s:100000" % ip,
423
            # "tcp://%s: " % ip,
424
            # "tcp://%s:19540" % ip,
425
            # "tcp://%s:-1" % ip,
426
            # "tcp://%s:string" % ip,
427
428
            # invalid ip
429
            "tcp:// :19530",
430
            # "tcp://123.0.0.1:%s" % port,
431
            "tcp://127.0.0:19530",
432
            # "tcp://255.0.0.0:%s" % port,
433
            # "tcp://255.255.0.0:%s" % port,
434
            # "tcp://255.255.255.0:%s" % port,
435
            # "tcp://255.255.255.255:%s" % port,
436
            "tcp://\n:19530",
437
    ]
438
    return uris
439
440
441
def gen_invalid_strs():
442
    strings = [
443
            1,
444
            [1],
445
            None,
446
            "12-s",
447
            " ",
448
            # "",
449
            # None,
450
            "12 s",
451
            "BB。A",
452
            "c|c",
453
            " siede ",
454
            "(mn)",
455
            "pip+",
456
            "=c",
457
            "中文",
458
            "a".join("a" for i in range(256))
459
    ]
460
    return strings
461
462
463
def gen_invalid_field_types():
464
    field_types = [
465
            # 1,
466
            "=c",
467
            # 0,
468
            None,
469
            "",
470
            "a".join("a" for i in range(256))
471
    ]
472
    return field_types
473
474
475
def gen_invalid_metric_types():
476
    metric_types = [
477
            1,
478
            "=c",
479
            0,
480
            None,
481
            "",
482
            "a".join("a" for i in range(256))
483
    ]
484
    return metric_types
485
486
487
# TODO:
488
def gen_invalid_ints():
489
    top_ks = [
490
            # 1.0,
491
            None,
492
            "stringg",
493
            [1,2,3],
494
            (1,2),
495
            {"a": 1},
496
            " ",
497
            "",
498
            "String",
499
            "12-s",
500
            "BB。A",
501
            " siede ",
502
            "(mn)",
503
            "pip+",
504
            "=c",
505
            "中文",
506
            "a".join("a" for i in range(256))
507
    ]
508
    return top_ks
509
510
511
def gen_invalid_params():
512
    params = [
513
            9999999999,
514
            -1,
515
            # None,
516
            [1,2,3],
517
            (1,2),
518
            {"a": 1},
519
            " ",
520
            "",
521
            "String",
522
            "12-s",
523
            "BB。A",
524
            " siede ",
525
            "(mn)",
526
            "pip+",
527
            "=c",
528
            "中文"
529
    ]
530
    return params
531
532
533
def gen_invalid_vectors():
534
    invalid_vectors = [
535
            "1*2",
536
            [],
537
            [1],
538
            [1,2],
539
            [" "],
540
            ['a'],
541
            [None],
542
            None,
543
            (1,2),
544
            {"a": 1},
545
            " ",
546
            "",
547
            "String",
548
            "12-s",
549
            "BB。A",
550
            " siede ",
551
            "(mn)",
552
            "pip+",
553
            "=c",
554
            "中文",
555
            "a".join("a" for i in range(256))
556
    ]
557
    return invalid_vectors
558
559
560
def gen_invaild_search_params():
561
    invalid_search_key = 100
562
    search_params = []
563
    for index_type in all_index_types:
564
        if index_type == "FLAT":
565
            continue
566
        search_params.append({"index_type": index_type, "search_params": {"invalid_key": invalid_search_key}})
567
        if index_type in delete_support():
568
            for nprobe in gen_invalid_params():
569
                ivf_search_params = {"index_type": index_type, "search_params": {"nprobe": nprobe}}
570
                search_params.append(ivf_search_params)
571
        elif index_type == "HNSW":
572
            for ef in gen_invalid_params():
573
                hnsw_search_param = {"index_type": index_type, "search_params": {"ef": ef}}
574
                search_params.append(hnsw_search_param)
575
        elif index_type == "NSG":
576
            for search_length in gen_invalid_params():
577
                nsg_search_param = {"index_type": index_type, "search_params": {"search_length": search_length}}
578
                search_params.append(nsg_search_param)
579
            search_params.append({"index_type": index_type, "search_params": {"invalid_key": 100}})
580
        elif index_type == "ANNOY":
581
            for search_k in gen_invalid_params():
582
                if isinstance(search_k, int):
583
                    continue
584
                annoy_search_param = {"index_type": index_type, "search_params": {"search_k": search_k}}
585
                search_params.append(annoy_search_param)
586
    return search_params
587
588
589
def gen_invalid_index():
590
    index_params = []
591
    for index_type in gen_invalid_strs():
592
        index_param = {"index_type": index_type, "params": {"nlist": 1024}}
593
        index_params.append(index_param)
594
    for nlist in gen_invalid_params():
595
        index_param = {"index_type": "IVF_FLAT", "params": {"nlist": nlist}}
596
        index_params.append(index_param)
597
    for M in gen_invalid_params():
598
        index_param = {"index_type": "HNSW", "params": {"M": M, "efConstruction": 100}}
599
        index_params.append(index_param)
600
    for efConstruction in gen_invalid_params():
601
        index_param = {"index_type": "HNSW", "params": {"M": 16, "efConstruction": efConstruction}}
602
        index_params.append(index_param)
603
    for search_length in gen_invalid_params():
604
        index_param = {"index_type": "NSG",
605
                       "params": {"search_length": search_length, "out_degree": 40, "candidate_pool_size": 50,
606
                                       "knng": 100}}
607
        index_params.append(index_param)
608
    for out_degree in gen_invalid_params():
609
        index_param = {"index_type": "NSG",
610
                       "params": {"search_length": 100, "out_degree": out_degree, "candidate_pool_size": 50,
611
                                       "knng": 100}}
612
        index_params.append(index_param)
613
    for candidate_pool_size in gen_invalid_params():
614
        index_param = {"index_type": "NSG", "params": {"search_length": 100, "out_degree": 40,
615
                                                                     "candidate_pool_size": candidate_pool_size,
616
                                                                     "knng": 100}}
617
        index_params.append(index_param)
618
    index_params.append({"index_type": "IVF_FLAT", "params": {"invalid_key": 1024}})
619
    index_params.append({"index_type": "HNSW", "params": {"invalid_key": 16, "efConstruction": 100}})
620
    index_params.append({"index_type": "NSG",
621
                         "params": {"invalid_key": 100, "out_degree": 40, "candidate_pool_size": 300,
622
                                         "knng": 100}})
623
    for invalid_n_trees in gen_invalid_params():
624
        index_params.append({"index_type": "ANNOY", "params": {"n_trees": invalid_n_trees}})
625
626
    return index_params
627
628
629
def gen_index():
630
    nlists = [1, 1024, 16384]
631
    pq_ms = [128, 64, 32, 16, 8, 4]
632
    Ms = [5, 24, 48]
633
    efConstructions = [100, 300, 500]
634
    search_lengths = [10, 100, 300]
635
    out_degrees = [5, 40, 300]
636
    candidate_pool_sizes = [50, 100, 300]
637
    knngs = [5, 100, 300]
638
639
    index_params = []
640
    for index_type in all_index_types:
641
        if index_type in ["FLAT", "BIN_FLAT", "BIN_IVF_FLAT"]:
642
            index_params.append({"index_type": index_type, "index_param": {"nlist": 1024}})
643
        elif index_type in ["IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID"]:
644
            ivf_params = [{"index_type": index_type, "index_param": {"nlist": nlist}} \
645
                          for nlist in nlists]
646
            index_params.extend(ivf_params)
647
        elif index_type == "IVF_PQ":
648
            IVFPQ_params = [{"index_type": index_type, "index_param": {"nlist": nlist, "m": m}} \
649
                        for nlist in nlists \
650
                        for m in pq_ms]
651
            index_params.extend(IVFPQ_params)
652
        elif index_type == "HNSW":
653
            hnsw_params = [{"index_type": index_type, "index_param": {"M": M, "efConstruction": efConstruction}} \
654
                           for M in Ms \
655
                           for efConstruction in efConstructions]
656
            index_params.extend(hnsw_params)
657
        elif index_type == "NSG":
658
            nsg_params = [{"index_type": index_type,
659
                           "index_param": {"search_length": search_length, "out_degree": out_degree,
660
                                           "candidate_pool_size": candidate_pool_size, "knng": knng}} \
661
                          for search_length in search_lengths \
662
                          for out_degree in out_degrees \
663
                          for candidate_pool_size in candidate_pool_sizes \
664
                          for knng in knngs]
665
            index_params.extend(nsg_params)
666
667
    return index_params
668
669
670
def gen_simple_index():
671
    index_params = []
672
    for i in range(len(all_index_types)):
673
        if all_index_types[i] in binary_support():
674
            continue
675
        dic = {"index_type": all_index_types[i], "metric_type": "L2"}
676
        dic.update({"params": default_index_params[i]})
677
        index_params.append(dic)
678
    return index_params
679
680
681
def gen_binary_index():
682
    index_params = []
683
    for i in range(len(all_index_types)):
684
        if all_index_types[i] in binary_support():
685
            dic = {"index_type": all_index_types[i]}
686
            dic.update({"params": default_index_params[i]})
687
            index_params.append(dic)
688
    return index_params
689
690
691
def get_search_param(index_type):
692
    search_params = {"metric_type": "L2"}
693
    if index_type in ivf() or index_type in binary_support():
694
        search_params.update({"nprobe": 32})
695
    elif index_type == "HNSW":
696
        search_params.update({"ef": 64})
697
    elif index_type == "NSG":
698
        search_params.update({"search_length": 100})
699
    elif index_type == "ANNOY":
700
        search_params.update({"search_k": 100})
701
    else:
702
        logging.getLogger().error("Invalid index_type.")
703
        raise Exception("Invalid index_type.")
704
    return search_params
705
706
707
def assert_equal_vector(v1, v2):
708
    if len(v1) != len(v2):
709
        assert False
710
    for i in range(len(v1)):
711
        assert abs(v1[i] - v2[i]) < epsilon
712
713
714
def restart_server(helm_release_name):
715
    res = True
716
    timeout = 120
717
    from kubernetes import client, config
718
    client.rest.logger.setLevel(logging.WARNING)
719
720
    namespace = "milvus"
721
    # service_name = "%s.%s.svc.cluster.local" % (helm_release_name, namespace)
722
    config.load_kube_config()
723
    v1 = client.CoreV1Api()
724
    pod_name = None
725
    # config_map_names = v1.list_namespaced_config_map(namespace, pretty='true')
726
    # body = {"replicas": 0}
727
    pods = v1.list_namespaced_pod(namespace)
728
    for i in pods.items:
729
        if i.metadata.name.find(helm_release_name) != -1 and i.metadata.name.find("mysql") == -1:
730
            pod_name = i.metadata.name
731
            break
732
            # v1.patch_namespaced_config_map(config_map_name, namespace, body, pretty='true')
733
    # status_res = v1.read_namespaced_service_status(helm_release_name, namespace, pretty='true')
734
    # print(status_res)
735
    if pod_name is not None:
736
        try:
737
            v1.delete_namespaced_pod(pod_name, namespace)
738
        except Exception as e:
739
            logging.error(str(e))
740
            logging.error("Exception when calling CoreV1Api->delete_namespaced_pod")
741
            res = False
742
            return res
743
        time.sleep(5)
744
        # check if restart successfully
745
        pods = v1.list_namespaced_pod(namespace)
746
        for i in pods.items:
747
            pod_name_tmp = i.metadata.name
748
            if pod_name_tmp.find(helm_release_name) != -1:
749
                logging.debug(pod_name_tmp)
750
                start_time = time.time()
751
                while time.time() - start_time > timeout:
752
                    status_res = v1.read_namespaced_pod_status(pod_name_tmp, namespace, pretty='true')
753
                    if status_res.status.phase == "Running":
754
                        break
755
                    time.sleep(1)
756
                if time.time() - start_time > timeout:
757
                    logging.error("Restart pod: %s timeout" % pod_name_tmp)
758
                    res = False
759
                    return res
760
    else:
761
        logging.error("Pod: %s not found" % helm_release_name)
762
        res = False
763
    return res
764