Passed
Push — master ( 88f0f6...dc9352 )
by
unknown
02:01
created

test_mysql   A

Complexity

Total Complexity 3

Size/Duplication

Total Lines 54
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 42
dl 0
loc 54
rs 10
c 0
b 0
f 0
wmc 3

2 Methods

Rating   Name   Duplication   Size   Complexity  
A TestMysql.skip_check() 0 6 2
A TestMysql._test_kill_mysql_during_index() 0 21 1
1
import time
2
import random
3
import pdb
4
import threading
5
import logging
6
from multiprocessing import Pool, Process
7
import pytest
8
from milvus import IndexType, MetricType
9
from utils import *
10
11
12
dim = 128
13
index_file_size = 10
14
collection_id = "mysql_failure"
15
nprobe = 1
16
tag = "1970-01-01"
17
18
19
class TestMysql:
20
21
    """
22
    ******************************************************************
23
      The following cases are used to test mysql failure
24
    ******************************************************************
25
    """
26
    @pytest.fixture(scope="function", autouse=True)
27
    def skip_check(self, connect, args):
28
        if args["service_name"].find("shards") != -1:
29
            reason = "Skip restart cases in shards mode"
30
            logging.getLogger().info(reason)
31
            pytest.skip(reason)
32
33
    def _test_kill_mysql_during_index(self, connect, collection, args):
34
        big_nb = 20000
35
        index_param = {"nlist": 1024, "m": 16}
36
        index_type = IndexType.IVF_PQ
37
        vectors = gen_vectors(big_nb, dim)
0 ignored issues
show
Comprehensibility Best Practice introduced by
The variable gen_vectors does not seem to be defined.
Loading history...
38
        status, ids = connect.insert(collection, vectors, ids=[i for i in range(big_nb)])
39
        status = connect.flush([collection])
40
        assert status.OK()
41
        status, res_count = connect.count_entities(collection)
42
        logging.getLogger().info(res_count)
43
        assert status.OK()
44
        assert res_count == big_nb
45
        logging.getLogger().info("Start create index async")
46
        status = connect.create_index(collection, index_type, index_param, _async=True)
47
        time.sleep(2)
48
        logging.getLogger().info("Start play mysql failure")
49
        # pass
50
        new_connect = get_milvus(args["ip"], args["port"], handler=args["handler"])
0 ignored issues
show
Comprehensibility Best Practice introduced by
The variable get_milvus does not seem to be defined.
Loading history...
51
        status, res_count = new_connect.count_entities(collection)
52
        assert status.OK()
53
        assert res_count == big_nb
54