Passed
Push — master ( 52a593...841a75 )
by
unknown
02:10
created

utils.gen_invalid_params()   A

Complexity

Conditions 1

Size

Total Lines 20
Code Lines 18

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 18
nop 0
dl 0
loc 20
rs 9.5
c 0
b 0
f 0
1
import os
2
import sys
3
import random
4
import pdb
5
import string
6
import struct
7
import logging
8
import time, datetime
9
import copy
10
import numpy as np
11
from sklearn import preprocessing
12
from milvus import Milvus, DataType
13
14
port = 19530
15
epsilon = 0.000001
16
default_flush_interval = 1
17
big_flush_interval = 1000
18
dimension = 128
19
segment_row_count = 5000
20
21
# TODO:
22
all_index_types = [
23
    "FLAT",
24
    "IVF_FLAT",
25
    "IVF_SQ8",
26
    "IVF_SQ8_HYBRID",
27
    "IVF_PQ",
28
    "HNSW",
29
    # "NSG",
30
    "ANNOY",
31
    "BIN_FLAT",
32
    "BIN_IVF_FLAT"
33
]
34
35
36
default_index_params = [
37
    {"nlist": 1024},
38
    {"nlist": 1024},
39
    {"nlist": 1024},
40
    {"nlist": 1024},
41
    {"nlist": 1024, "m": 16},
42
    {"M": 48, "efConstruction": 500},
43
    # {"search_length": 50, "out_degree": 40, "candidate_pool_size": 100, "knng": 50},
44
    {"n_trees": 4},
45
    {"nlist": 1024},
46
    {"nlist": 1024}
47
]
48
49
50
def index_cpu_not_support():
51
    return ["IVF_SQ8_HYBRID"]
52
53
54
def binary_support():
55
    return ["BIN_FLAT", "BIN_IVF_FLAT"]
56
57
58
def delete_support():
59
    return ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID", "IVF_PQ"]
60
61
62
def ivf():
63
    return ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID", "IVF_PQ"]
64
65
66
def l2(x, y):
67
    return np.linalg.norm(np.array(x) - np.array(y))
68
69
70
def ip(x, y):
71
    return np.inner(np.array(x), np.array(y))
72
73
74
def jaccard(x, y):
75
    x = np.asarray(x, np.bool)
76
    y = np.asarray(y, np.bool)
77
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.double(np.bitwise_or(x, y).sum())
78
79
80
def hamming(x, y):
81
    x = np.asarray(x, np.bool)
82
    y = np.asarray(y, np.bool)
83
    return np.bitwise_xor(x, y).sum()
84
85
86
def tanimoto(x, y):
87
    x = np.asarray(x, np.bool)
88
    y = np.asarray(y, np.bool)
89
    return -np.log2(np.double(np.bitwise_and(x, y).sum()) / np.double(np.bitwise_or(x, y).sum()))
90
91
92
def substructure(x, y):
93
    x = np.asarray(x, np.bool)
94
    y = np.asarray(y, np.bool)
95
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.count_nonzero(y)
96
97
98
def superstructure(x, y):
99
    x = np.asarray(x, np.bool)
100
    y = np.asarray(y, np.bool)
101
    return 1 - np.double(np.bitwise_and(x, y).sum()) / np.count_nonzero(x)
102
103
104
def get_milvus(host, port, uri=None, handler=None, **kwargs):
105
    if handler is None:
106
        handler = "GRPC"
107
    try_connect = kwargs.get("try_connect", True)
108
    if uri is not None:
109
        milvus = Milvus(uri=uri, handler=handler, try_connect=try_connect)
110
    else:
111
        milvus = Milvus(host=host, port=port, handler=handler, try_connect=try_connect)
112
    return milvus
113
114
115
def disable_flush(connect):
116
    connect.set_config("storage", "auto_flush_interval", big_flush_interval)
117
118
119
def enable_flush(connect):
120
    # reset auto_flush_interval=1
121
    connect.set_config("storage", "auto_flush_interval", default_flush_interval)
122
    config_value = connect.get_config("storage", "auto_flush_interval")
123
    assert config_value == str(default_flush_interval)
124
125
126
def gen_inaccuracy(num):
127
    return num / 255.0
128
129
130
def gen_vectors(num, dim, is_normal=False):
131
    vectors = [[random.random() for _ in range(dim)] for _ in range(num)]
132
    vectors = preprocessing.normalize(vectors, axis=1, norm='l2')
133
    return vectors.tolist()
134
135
136
# def gen_vectors(num, dim, seed=np.random.RandomState(1234), is_normal=False):
137
#     xb = seed.rand(num, dim).astype("float32")
138
#     xb = preprocessing.normalize(xb, axis=1, norm='l2')
139
#     return xb.tolist()
140
141
142
def gen_binary_vectors(num, dim):
143
    raw_vectors = []
144
    binary_vectors = []
145
    for i in range(num):
146
        raw_vector = [random.randint(0, 1) for i in range(dim)]
147
        raw_vectors.append(raw_vector)
148
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
149
    return raw_vectors, binary_vectors
150
151
152
def gen_binary_sub_vectors(vectors, length):
153
    raw_vectors = []
154
    binary_vectors = []
155
    dim = len(vectors[0])
156
    for i in range(length):
157
        raw_vector = [0 for i in range(dim)]
158
        vector = vectors[i]
159
        for index, j in enumerate(vector):
160
            if j == 1:
161
                raw_vector[index] = 1
162
        raw_vectors.append(raw_vector)
163
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
164
    return raw_vectors, binary_vectors
165
166
167
def gen_binary_super_vectors(vectors, length):
168
    raw_vectors = []
169
    binary_vectors = []
170
    dim = len(vectors[0])
171
    for i in range(length):
172
        cnt_1 = np.count_nonzero(vectors[i])
173
        raw_vector = [1 for i in range(dim)] 
174
        raw_vectors.append(raw_vector)
175
        binary_vectors.append(bytes(np.packbits(raw_vector, axis=-1).tolist()))
176
    return raw_vectors, binary_vectors
177
178
179
def gen_int_attr(row_num):
180
    return [random.randint(0, 255) for _ in range(row_num)]
181
182
183
def gen_float_attr(row_num):
184
    return [random.uniform(0, 255) for _ in range(row_num)]
185
186
187
def gen_unique_str(str_value=None):
188
    prefix = "".join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
189
    return "test_" + prefix if str_value is None else str_value + "_" + prefix
190
191
192
def gen_single_filter_fields():
193
    fields = []
194
    for data_type in DataType:
195
        if data_type in [DataType.INT32, DataType.INT64, DataType.FLOAT, DataType.DOUBLE]:
196
            fields.append({"field": data_type.name, "type": data_type})
197
    return fields
198
199
200
def gen_single_vector_fields():
201
    fields = []
202
    for data_type in [DataType.FLOAT_VECTOR, DataType.BINARY_VECTOR]:
203
        field = {"field": data_type.name, "type": data_type, "params": {"dim": dimension}}
204
        fields.append(field)
205
    return fields
206
207
208
def gen_default_fields():
209
    default_fields = {
210
        "fields": [
211
            {"field": "int64", "type": DataType.INT64},
212
            {"field": "float", "type": DataType.FLOAT},
213
            {"field": "vector", "type": DataType.FLOAT_VECTOR, "params": {"dim": dimension}}
214
        ],
215
        "segment_row_count": segment_row_count
216
    }
217
    return default_fields
218
219
220
def gen_entities(nb, is_normal=False):
221
    vectors = gen_vectors(nb, dimension, is_normal)
222
    entities = [
223
        {"field": "int64", "type": DataType.INT64, "values": [2 for i in range(nb)]},
224
        {"field": "float", "type": DataType.FLOAT, "values": [3.0 for i in range(nb)]},
225
        {"field": "vector", "type": DataType.FLOAT_VECTOR, "values": vectors}
226
    ]
227
    return entities
228
229
230
def gen_binary_entities(nb):
231
    raw_vectors, vectors = gen_binary_vectors(nb, dimension)
232
    entities = [
233
        {"field": "int64", "type": DataType.INT64, "values": [2 for i in range(nb)]},
234
        {"field": "float", "type": DataType.FLOAT, "values": [3.0 for i in range(nb)]},
235
        {"field": "binary_vector", "type": DataType.BINARY_VECTOR, "values": vectors}
236
    ]
237
    return raw_vectors, entities
238
239
240
def gen_entities_by_fields(fields, nb, dimension):
241
    entities = []
242
    for field in fields:
243
        if field["type"] in [DataType.INT32, DataType.INT64]:
244
            field_value = [1 for i in range(nb)]
245
        elif field["type"] in [DataType.FLOAT, DataType.DOUBLE]:
246
            field_value = [3.0 for i in range(nb)]
247
        elif field["type"] == DataType.BINARY_VECTOR:
248
            field_value = gen_binary_vectors(nb, dimension)[1]
249
        elif field["type"] == DataType.FLOAT_VECTOR:
250
            field_value = gen_vectors(nb, dimension)
251
        field.update({"values": field_value})
0 ignored issues
show
introduced by
The variable field_value does not seem to be defined for all execution paths.
Loading history...
252
        entities.append(field)
253
    return entities
254
255
256
def assert_equal_entity(a, b):
257
    pass
258
259
260
def gen_query_vectors_inside_entities(field_name, entities, top_k, nq, search_params={"nprobe": 10}):
261
    query_vectors = entities[-1]["values"][:nq]
262
    query = {
263
        "bool": {
264
            "must": [
265
                {"vector": {field_name: {"topk": top_k, "query": query_vectors, "params": search_params}}}
266
            ]
267
        }
268
    }
269
    return query, query_vectors
270
271
272
def gen_query_vectors_rand_entities(field_name, entities, top_k, nq, search_params={"nprobe": 10}):
273
    dimension = len(entities[-1]["values"][0])
274
    query_vectors = gen_vectors(nq, dimension)
275
    query = {
276
        "bool": {
277
            "must": [
278
                {"vector": {field_name: {"topk": top_k, "query": query_vectors, "params": search_params}}}
279
            ]
280
        }
281
    }
282
    return query, query_vectors
283
284
285
286
def add_field(entities):
287
    nb = len(entities[0]["values"])
288
    field = {
289
        "field": gen_unique_str(), 
290
        "type": DataType.INT64, 
291
        "values": [1 for i in range(nb)]
292
    }
293
    entities.append(field)
294
    return entities
295
296
297
def add_vector_field(entities, is_normal=False):
298
    nb = len(entities[0]["values"])
299
    vectors = gen_vectors(nb, dimension, is_normal)
300
    field = {
301
        "field": gen_unique_str(), 
302
        "type": DataType.FLOAT_VECTOR,
303
        "values": vectors
304
    }
305
    entities.append(field)
306
    return entities
307
308
309
# def update_fields_metric_type(fields, metric_type):
310
#     tmp_fields = copy.deepcopy(fields)
311
#     if metric_type in ["L2", "IP"]:
312
#         tmp_fields["fields"][-1]["type"] = DataType.FLOAT_VECTOR
313
#     else:
314
#         tmp_fields["fields"][-1]["type"] = DataType.BINARY_VECTOR
315
#     tmp_fields["fields"][-1]["params"]["metric_type"] = metric_type
316
#     return tmp_fields
317
318
319
def remove_field(entities):
320
    del entities[0]
321
    return entities
322
323
324
def remove_vector_field(entities):
325
    del entities[-1]
326
    return entities
327
328
329
def update_field_name(entities, old_name, new_name):
330
    for item in entities:
331
        if item["field"] == old_name:
332
            item["field"] = new_name
333
    return entities
334
335
336
def update_field_type(entities, old_name, new_name):
337
    for item in entities:
338
        if item["field"] == old_name:
339
            item["type"] = new_name
340
    return entities
341
342
343
def update_field_value(entities, old_type, new_value):
344
    for item in entities:
345
        if item["type"] == old_type:
346
            for i in item["values"]:
347
                item["values"][i] = new_value
348
    return entities
349
350
351
def add_vector_field(nb, dimension=dimension):
352
    field_name = gen_unique_str()
353
    field = {
354
        "field": field_name,
355
        "type": DataType.FLOAT_VECTOR,
356
        "values": gen_vectors(nb, dimension)
357
    }
358
    return field_name
359
        
360
361
def gen_segment_row_counts():
362
    sizes = [
363
            1,
364
            2,
365
            1024,
366
            4096
367
    ]
368
    return sizes
369
370
371
def gen_invalid_ips():
372
    ips = [
373
            # "255.0.0.0",
374
            # "255.255.0.0",
375
            # "255.255.255.0",
376
            # "255.255.255.255",
377
            "127.0.0",
378
            # "123.0.0.2",
379
            "12-s",
380
            " ",
381
            "12 s",
382
            "BB。A",
383
            " siede ",
384
            "(mn)",
385
            "中文",
386
            "a".join("a" for _ in range(256))
387
    ]
388
    return ips
389
390
391
def gen_invalid_uris():
392
    ip = None
393
    uris = [
394
            " ",
395
            "中文",
396
            # invalid protocol
397
            # "tc://%s:%s" % (ip, port),
398
            # "tcp%s:%s" % (ip, port),
399
400
            # # invalid port
401
            # "tcp://%s:100000" % ip,
402
            # "tcp://%s: " % ip,
403
            # "tcp://%s:19540" % ip,
404
            # "tcp://%s:-1" % ip,
405
            # "tcp://%s:string" % ip,
406
407
            # invalid ip
408
            "tcp:// :19530",
409
            # "tcp://123.0.0.1:%s" % port,
410
            "tcp://127.0.0:19530",
411
            # "tcp://255.0.0.0:%s" % port,
412
            # "tcp://255.255.0.0:%s" % port,
413
            # "tcp://255.255.255.0:%s" % port,
414
            # "tcp://255.255.255.255:%s" % port,
415
            "tcp://\n:19530",
416
    ]
417
    return uris
418
419
420
def gen_invalid_strs():
421
    strings = [
422
            1,
423
            [1],
424
            None,
425
            "12-s",
426
            " ",
427
            # "",
428
            # None,
429
            "12 s",
430
            "BB。A",
431
            "c|c",
432
            " siede ",
433
            "(mn)",
434
            "pip+",
435
            "=c",
436
            "中文",
437
            "a".join("a" for i in range(256))
438
    ]
439
    return strings
440
441
442
def gen_invalid_field_types():
443
    field_types = [
444
            # 1,
445
            "=c",
446
            # 0,
447
            None,
448
            "",
449
            "a".join("a" for i in range(256))
450
    ]
451
    return field_types
452
453
454
def gen_invalid_metric_types():
455
    metric_types = [
456
            1,
457
            "=c",
458
            0,
459
            None,
460
            "",
461
            "a".join("a" for i in range(256))
462
    ]
463
    return metric_types
464
465
466
# TODO:
467
def gen_invalid_ints():
468
    top_ks = [
469
            # 1.0,
470
            None,
471
            "stringg",
472
            [1,2,3],
473
            (1,2),
474
            {"a": 1},
475
            " ",
476
            "",
477
            "String",
478
            "12-s",
479
            "BB。A",
480
            " siede ",
481
            "(mn)",
482
            "pip+",
483
            "=c",
484
            "中文",
485
            "a".join("a" for i in range(256))
486
    ]
487
    return top_ks
488
489
490
def gen_invalid_params():
491
    params = [
492
            9999999999,
493
            -1,
494
            # None,
495
            [1,2,3],
496
            (1,2),
497
            {"a": 1},
498
            " ",
499
            "",
500
            "String",
501
            "12-s",
502
            "BB。A",
503
            " siede ",
504
            "(mn)",
505
            "pip+",
506
            "=c",
507
            "中文"
508
    ]
509
    return params
510
511
512
def gen_invalid_vectors():
513
    invalid_vectors = [
514
            "1*2",
515
            [],
516
            [1],
517
            [1,2],
518
            [" "],
519
            ['a'],
520
            [None],
521
            None,
522
            (1,2),
523
            {"a": 1},
524
            " ",
525
            "",
526
            "String",
527
            "12-s",
528
            "BB。A",
529
            " siede ",
530
            "(mn)",
531
            "pip+",
532
            "=c",
533
            "中文",
534
            "a".join("a" for i in range(256))
535
    ]
536
    return invalid_vectors
537
538
539
def gen_invaild_search_params():
540
    invalid_search_key = 100
541
    search_params = []
542
    for index_type in all_index_types:
543
        if index_type == "FLAT":
544
            continue
545
        search_params.append({"index_type": index_type, "search_params": {"invalid_key": invalid_search_key}})
546
        if index_type in delete_support():
547
            for nprobe in gen_invalid_params():
548
                ivf_search_params = {"index_type": index_type, "search_params": {"nprobe": nprobe}}
549
                search_params.append(ivf_search_params)
550
        elif index_type == "HNSW":
551
            for ef in gen_invalid_params():
552
                hnsw_search_param = {"index_type": index_type, "search_params": {"ef": ef}}
553
                search_params.append(hnsw_search_param)
554
        elif index_type == "NSG":
555
            for search_length in gen_invalid_params():
556
                nsg_search_param = {"index_type": index_type, "search_params": {"search_length": search_length}}
557
                search_params.append(nsg_search_param)
558
            search_params.append({"index_type": index_type, "search_params": {"invalid_key": 100}})
559
        elif index_type == "ANNOY":
560
            for search_k in gen_invalid_params():
561
                if isinstance(search_k, int):
562
                    continue
563
                annoy_search_param = {"index_type": index_type, "search_params": {"search_k": search_k}}
564
                search_params.append(annoy_search_param)
565
    return search_params
566
567
568
def gen_invalid_index():
569
    index_params = []
570
    for index_type in gen_invalid_strs():
571
        index_param = {"index_type": index_type, "params": {"nlist": 1024}}
572
        index_params.append(index_param)
573
    for nlist in gen_invalid_params():
574
        index_param = {"index_type": "IVF_FLAT", "params": {"nlist": nlist}}
575
        index_params.append(index_param)
576
    for M in gen_invalid_params():
577
        index_param = {"index_type": "HNSW", "params": {"M": M, "efConstruction": 100}}
578
        index_params.append(index_param)
579
    for efConstruction in gen_invalid_params():
580
        index_param = {"index_type": "HNSW", "params": {"M": 16, "efConstruction": efConstruction}}
581
        index_params.append(index_param)
582
    for search_length in gen_invalid_params():
583
        index_param = {"index_type": "NSG",
584
                       "params": {"search_length": search_length, "out_degree": 40, "candidate_pool_size": 50,
585
                                       "knng": 100}}
586
        index_params.append(index_param)
587
    for out_degree in gen_invalid_params():
588
        index_param = {"index_type": "NSG",
589
                       "params": {"search_length": 100, "out_degree": out_degree, "candidate_pool_size": 50,
590
                                       "knng": 100}}
591
        index_params.append(index_param)
592
    for candidate_pool_size in gen_invalid_params():
593
        index_param = {"index_type": "NSG", "params": {"search_length": 100, "out_degree": 40,
594
                                                                     "candidate_pool_size": candidate_pool_size,
595
                                                                     "knng": 100}}
596
        index_params.append(index_param)
597
    index_params.append({"index_type": "IVF_FLAT", "params": {"invalid_key": 1024}})
598
    index_params.append({"index_type": "HNSW", "params": {"invalid_key": 16, "efConstruction": 100}})
599
    index_params.append({"index_type": "NSG",
600
                         "params": {"invalid_key": 100, "out_degree": 40, "candidate_pool_size": 300,
601
                                         "knng": 100}})
602
    for invalid_n_trees in gen_invalid_params():
603
        index_params.append({"index_type": "ANNOY", "params": {"n_trees": invalid_n_trees}})
604
605
    return index_params
606
607
608
def gen_index():
609
    nlists = [1, 1024, 16384]
610
    pq_ms = [128, 64, 32, 16, 8, 4]
611
    Ms = [5, 24, 48]
612
    efConstructions = [100, 300, 500]
613
    search_lengths = [10, 100, 300]
614
    out_degrees = [5, 40, 300]
615
    candidate_pool_sizes = [50, 100, 300]
616
    knngs = [5, 100, 300]
617
618
    index_params = []
619
    for index_type in all_index_types:
620
        if index_type in ["FLAT", "BIN_FLAT", "BIN_IVF_FLAT"]:
621
            index_params.append({"index_type": index_type, "index_param": {"nlist": 1024}})
622
        elif index_type in ["IVF_FLAT", "IVF_SQ8", "IVF_SQ8_HYBRID"]:
623
            ivf_params = [{"index_type": index_type, "index_param": {"nlist": nlist}} \
624
                          for nlist in nlists]
625
            index_params.extend(ivf_params)
626
        elif index_type == "IVF_PQ":
627
            IVFPQ_params = [{"index_type": index_type, "index_param": {"nlist": nlist, "m": m}} \
628
                        for nlist in nlists \
629
                        for m in pq_ms]
630
            index_params.extend(IVFPQ_params)
631
        elif index_type == "HNSW":
632
            hnsw_params = [{"index_type": index_type, "index_param": {"M": M, "efConstruction": efConstruction}} \
633
                           for M in Ms \
634
                           for efConstruction in efConstructions]
635
            index_params.extend(hnsw_params)
636
        elif index_type == "NSG":
637
            nsg_params = [{"index_type": index_type,
638
                           "index_param": {"search_length": search_length, "out_degree": out_degree,
639
                                           "candidate_pool_size": candidate_pool_size, "knng": knng}} \
640
                          for search_length in search_lengths \
641
                          for out_degree in out_degrees \
642
                          for candidate_pool_size in candidate_pool_sizes \
643
                          for knng in knngs]
644
            index_params.extend(nsg_params)
645
646
    return index_params
647
648
649
def gen_simple_index():
650
    index_params = []
651
    for i in range(len(all_index_types)):
652
        if all_index_types[i] in binary_support():
653
            continue
654
        dic = {"index_type": all_index_types[i]}
655
        dic.update(default_index_params[i])
656
        index_params.append(dic)
657
    return index_params
658
659
660
def gen_binary_index():
661
    index_params = []
662
    for i in range(len(all_index_types)):
663
        if all_index_types[i] in binary_support():
664
            dic = {"index_type": all_index_types[i]}
665
            dic.update(default_index_params[i])
666
            index_params.append(dic)
667
    return index_params
668
669
670
def get_search_param(index_type):
671
    if index_type in ivf() or index_type in binary_support():
672
        return {"nprobe": 32}
673
    elif index_type == "HNSW":
674
        return {"ef": 64}
675
    elif index_type == "NSG":
676
        return {"search_length": 100}
677
    elif index_type == "ANNOY":
678
        return {"search_k": 100}
679
    else:
680
        logging.getLogger().info("Invalid index_type.")
681
682
683
def assert_equal_vector(v1, v2):
684
    if len(v1) != len(v2):
685
        assert False
686
    for i in range(len(v1)):
687
        assert abs(v1[i] - v2[i]) < epsilon
688
689
690
def restart_server(helm_release_name):
691
    res = True
692
    timeout = 120
693
    from kubernetes import client, config
694
    client.rest.logger.setLevel(logging.WARNING)
695
696
    namespace = "milvus"
697
    # service_name = "%s.%s.svc.cluster.local" % (helm_release_name, namespace)
698
    config.load_kube_config()
699
    v1 = client.CoreV1Api()
700
    pod_name = None
701
    # config_map_names = v1.list_namespaced_config_map(namespace, pretty='true')
702
    # body = {"replicas": 0}
703
    pods = v1.list_namespaced_pod(namespace)
704
    for i in pods.items:
705
        if i.metadata.name.find(helm_release_name) != -1 and i.metadata.name.find("mysql") == -1:
706
            pod_name = i.metadata.name
707
            break
708
            # v1.patch_namespaced_config_map(config_map_name, namespace, body, pretty='true')
709
    # status_res = v1.read_namespaced_service_status(helm_release_name, namespace, pretty='true')
710
    # print(status_res)
711
    if pod_name is not None:
712
        try:
713
            v1.delete_namespaced_pod(pod_name, namespace)
714
        except Exception as e:
715
            logging.error(str(e))
716
            logging.error("Exception when calling CoreV1Api->delete_namespaced_pod")
717
            res = False
718
            return res
719
        time.sleep(5)
720
        # check if restart successfully
721
        pods = v1.list_namespaced_pod(namespace)
722
        for i in pods.items:
723
            pod_name_tmp = i.metadata.name
724
            if pod_name_tmp.find(helm_release_name) != -1:
725
                logging.debug(pod_name_tmp)
726
                start_time = time.time()
727
                while time.time() - start_time > timeout:
728
                    status_res = v1.read_namespaced_pod_status(pod_name_tmp, namespace, pretty='true')
729
                    if status_res.status.phase == "Running":
730
                        break
731
                    time.sleep(1)
732
                if time.time() - start_time > timeout:
733
                    logging.error("Restart pod: %s timeout" % pod_name_tmp)
734
                    res = False
735
                    return res
736
    else:
737
        logging.error("Pod: %s not found" % helm_release_name)
738
        res = False
739
    return res
740