Total Complexity | 158 |
Total Lines | 1131 |
Duplicated Lines | 9.11 % |
Changes | 0 |
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like test_bulk_insert often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | import logging |
||
2 | import time |
||
3 | import pdb |
||
4 | import copy |
||
5 | import threading |
||
6 | from multiprocessing import Pool, Process |
||
7 | import pytest |
||
8 | from milvus import DataType |
||
9 | from utils import * |
||
10 | from constants import * |
||
11 | |||
12 | ADD_TIMEOUT = 60 |
||
13 | uid = "test_insert" |
||
14 | field_name = default_float_vec_field_name |
||
|
|||
15 | binary_field_name = default_binary_vec_field_name |
||
16 | default_single_query = { |
||
17 | "bool": { |
||
18 | "must": [ |
||
19 | {"vector": {field_name: {"topk": 10, "query": gen_vectors(1, default_dim), "metric_type": "L2", |
||
20 | "params": {"nprobe": 10}}}} |
||
21 | ] |
||
22 | } |
||
23 | } |
||
24 | |||
25 | |||
26 | class TestInsertBase: |
||
27 | """ |
||
28 | ****************************************************************** |
||
29 | The following cases are used to test `insert` function |
||
30 | ****************************************************************** |
||
31 | """ |
||
32 | |||
33 | @pytest.fixture( |
||
34 | scope="function", |
||
35 | params=gen_simple_index() |
||
36 | ) |
||
37 | def get_simple_index(self, request, connect): |
||
38 | if str(connect._cmd("mode")) == "CPU": |
||
39 | if request.param["index_type"] in index_cpu_not_support(): |
||
40 | pytest.skip("CPU not support index_type: ivf_sq8h") |
||
41 | return request.param |
||
42 | |||
43 | @pytest.fixture( |
||
44 | scope="function", |
||
45 | params=gen_single_filter_fields() |
||
46 | ) |
||
47 | def get_filter_field(self, request): |
||
48 | yield request.param |
||
49 | |||
50 | @pytest.fixture( |
||
51 | scope="function", |
||
52 | params=gen_single_vector_fields() |
||
53 | ) |
||
54 | def get_vector_field(self, request): |
||
55 | yield request.param |
||
56 | |||
57 | def test_add_vector_with_empty_vector(self, connect, collection): |
||
58 | ''' |
||
59 | target: test add vectors with empty vectors list |
||
60 | method: set empty vectors list as add method params |
||
61 | expected: raises a Exception |
||
62 | ''' |
||
63 | vector = [] |
||
64 | with pytest.raises(Exception) as e: |
||
65 | status, ids = connect.bulk_insert(collection, vector) |
||
66 | |||
67 | def test_add_vector_with_None(self, connect, collection): |
||
68 | ''' |
||
69 | target: test add vectors with None |
||
70 | method: set None as add method params |
||
71 | expected: raises a Exception |
||
72 | ''' |
||
73 | vector = None |
||
74 | with pytest.raises(Exception) as e: |
||
75 | status, ids = connect.bulk_insert(collection, vector) |
||
76 | |||
77 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
78 | def test_insert_collection_not_existed(self, connect): |
||
79 | ''' |
||
80 | target: test insert, with collection not existed |
||
81 | method: insert entity into a random named collection |
||
82 | expected: error raised |
||
83 | ''' |
||
84 | collection_name = gen_unique_str(uid) |
||
85 | with pytest.raises(Exception) as e: |
||
86 | connect.bulk_insert(collection_name, default_entities) |
||
87 | |||
88 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
89 | def test_insert_drop_collection(self, connect, collection): |
||
90 | ''' |
||
91 | target: test delete collection after insert vector |
||
92 | method: insert vector and delete collection |
||
93 | expected: no error raised |
||
94 | ''' |
||
95 | ids = connect.bulk_insert(collection, default_entity) |
||
96 | assert len(ids) == 1 |
||
97 | connect.drop_collection(collection) |
||
98 | |||
99 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
100 | def test_insert_sleep_drop_collection(self, connect, collection): |
||
101 | ''' |
||
102 | target: test delete collection after insert vector for a while |
||
103 | method: insert vector, sleep, and delete collection |
||
104 | expected: no error raised |
||
105 | ''' |
||
106 | ids = connect.bulk_insert(collection, default_entity) |
||
107 | assert len(ids) == 1 |
||
108 | connect.flush([collection]) |
||
109 | connect.drop_collection(collection) |
||
110 | |||
111 | View Code Duplication | @pytest.mark.timeout(ADD_TIMEOUT) |
|
112 | def test_insert_create_index(self, connect, collection, get_simple_index): |
||
113 | ''' |
||
114 | target: test build index insert after vector |
||
115 | method: insert vector and build index |
||
116 | expected: no error raised |
||
117 | ''' |
||
118 | ids = connect.bulk_insert(collection, default_entities) |
||
119 | assert len(ids) == default_nb |
||
120 | connect.flush([collection]) |
||
121 | connect.create_index(collection, field_name, get_simple_index) |
||
122 | info = connect.get_collection_info(collection) |
||
123 | fields = info["fields"] |
||
124 | for field in fields: |
||
125 | if field["name"] == field_name: |
||
126 | assert field["indexes"][0] == get_simple_index |
||
127 | |||
128 | View Code Duplication | @pytest.mark.timeout(ADD_TIMEOUT) |
|
129 | def test_insert_create_index_new(self, connect, collection, get_simple_index): |
||
130 | ''' |
||
131 | target: test build index insert after vector |
||
132 | method: insert vector and build index |
||
133 | expected: no error raised |
||
134 | ''' |
||
135 | ids = connect.bulk_insert(collection, default_entities_new) |
||
136 | assert len(ids) == default_nb |
||
137 | connect.flush([collection]) |
||
138 | connect.create_index(collection, field_name, get_simple_index) |
||
139 | info = connect.get_collection_info(collection) |
||
140 | fields = info["fields"] |
||
141 | for field in fields: |
||
142 | if field["name"] == field_name: |
||
143 | assert field["indexes"][0] == get_simple_index |
||
144 | |||
145 | View Code Duplication | @pytest.mark.timeout(ADD_TIMEOUT) |
|
146 | def test_insert_after_create_index(self, connect, collection, get_simple_index): |
||
147 | ''' |
||
148 | target: test build index insert after vector |
||
149 | method: insert vector and build index |
||
150 | expected: no error raised |
||
151 | ''' |
||
152 | connect.create_index(collection, field_name, get_simple_index) |
||
153 | ids = connect.bulk_insert(collection, default_entities) |
||
154 | assert len(ids) == default_nb |
||
155 | info = connect.get_collection_info(collection) |
||
156 | fields = info["fields"] |
||
157 | for field in fields: |
||
158 | if field["name"] == field_name: |
||
159 | assert field["indexes"][0] == get_simple_index |
||
160 | |||
161 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
162 | def test_insert_search(self, connect, collection): |
||
163 | ''' |
||
164 | target: test search vector after insert vector after a while |
||
165 | method: insert vector, sleep, and search collection |
||
166 | expected: no error raised |
||
167 | ''' |
||
168 | ids = connect.bulk_insert(collection, default_entities) |
||
169 | connect.flush([collection]) |
||
170 | res = connect.search(collection, default_single_query) |
||
171 | logging.getLogger().debug(res) |
||
172 | assert res |
||
173 | |||
174 | def test_insert_segment_row_count(self, connect, collection): |
||
175 | nb = default_segment_row_limit + 1 |
||
176 | res_ids = connect.bulk_insert(collection, gen_entities(nb)) |
||
177 | connect.flush([collection]) |
||
178 | assert len(res_ids) == nb |
||
179 | stats = connect.get_collection_stats(collection) |
||
180 | assert len(stats['partitions'][0]['segments']) == 2 |
||
181 | for segment in stats['partitions'][0]['segments']: |
||
182 | assert segment['row_count'] in [default_segment_row_limit, 1] |
||
183 | |||
184 | @pytest.fixture( |
||
185 | scope="function", |
||
186 | params=[ |
||
187 | 1, |
||
188 | 2000 |
||
189 | ], |
||
190 | ) |
||
191 | def insert_count(self, request): |
||
192 | yield request.param |
||
193 | |||
194 | View Code Duplication | @pytest.mark.timeout(ADD_TIMEOUT) |
|
195 | def test_insert_ids(self, connect, id_collection, insert_count): |
||
196 | ''' |
||
197 | target: test insert vectors in collection, use customize ids |
||
198 | method: create collection and insert vectors in it, check the ids returned and the collection length after vectors inserted |
||
199 | expected: the length of ids and the collection row count |
||
200 | ''' |
||
201 | nb = insert_count |
||
202 | ids = [i for i in range(nb)] |
||
203 | res_ids = connect.bulk_insert(id_collection, gen_entities(nb), ids) |
||
204 | connect.flush([id_collection]) |
||
205 | assert len(res_ids) == nb |
||
206 | assert res_ids == ids |
||
207 | res_count = connect.count_entities(id_collection) |
||
208 | assert res_count == nb |
||
209 | |||
210 | View Code Duplication | @pytest.mark.timeout(ADD_TIMEOUT) |
|
211 | def test_insert_the_same_ids(self, connect, id_collection, insert_count): |
||
212 | ''' |
||
213 | target: test insert vectors in collection, use customize the same ids |
||
214 | method: create collection and insert vectors in it, check the ids returned and the collection length after vectors inserted |
||
215 | expected: the length of ids and the collection row count |
||
216 | ''' |
||
217 | nb = insert_count |
||
218 | ids = [1 for i in range(nb)] |
||
219 | res_ids = connect.bulk_insert(id_collection, gen_entities(nb), ids) |
||
220 | connect.flush([id_collection]) |
||
221 | assert len(res_ids) == nb |
||
222 | assert res_ids == ids |
||
223 | res_count = connect.count_entities(id_collection) |
||
224 | assert res_count == nb |
||
225 | |||
226 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
227 | def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field): |
||
228 | ''' |
||
229 | target: test create normal collection with different fields, insert entities into id with ids |
||
230 | method: create collection with diff fields: metric/field_type/..., insert, and count |
||
231 | expected: row count correct |
||
232 | ''' |
||
233 | nb = 5 |
||
234 | filter_field = get_filter_field |
||
235 | vector_field = get_vector_field |
||
236 | collection_name = gen_unique_str("test_collection") |
||
237 | fields = { |
||
238 | "fields": [filter_field, vector_field], |
||
239 | "segment_row_limit": default_segment_row_limit, |
||
240 | "auto_id": True |
||
241 | } |
||
242 | connect.create_collection(collection_name, fields) |
||
243 | ids = [i for i in range(nb)] |
||
244 | entities = gen_entities_by_fields(fields["fields"], nb, default_dim) |
||
245 | res_ids = connect.bulk_insert(collection_name, entities, ids) |
||
246 | assert res_ids == ids |
||
247 | connect.flush([collection_name]) |
||
248 | res_count = connect.count_entities(collection_name) |
||
249 | assert res_count == nb |
||
250 | |||
251 | # TODO: assert exception && enable |
||
252 | @pytest.mark.level(2) |
||
253 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
254 | def test_insert_twice_ids_no_ids(self, connect, id_collection): |
||
255 | ''' |
||
256 | target: check the result of insert, with params ids and no ids |
||
257 | method: test insert vectors twice, use customize ids first, and then use no ids |
||
258 | expected: error raised |
||
259 | ''' |
||
260 | ids = [i for i in range(default_nb)] |
||
261 | res_ids = connect.bulk_insert(id_collection, default_entities, ids) |
||
262 | with pytest.raises(Exception) as e: |
||
263 | res_ids_new = connect.bulk_insert(id_collection, default_entities) |
||
264 | |||
265 | # TODO: assert exception && enable |
||
266 | @pytest.mark.level(2) |
||
267 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
268 | def test_insert_twice_not_ids_ids(self, connect, id_collection): |
||
269 | ''' |
||
270 | target: check the result of insert, with params ids and no ids |
||
271 | method: test insert vectors twice, use not ids first, and then use customize ids |
||
272 | expected: error raised |
||
273 | ''' |
||
274 | with pytest.raises(Exception) as e: |
||
275 | res_ids = connect.bulk_insert(id_collection, default_entities) |
||
276 | |||
277 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
278 | def test_insert_ids_length_not_match_batch(self, connect, id_collection): |
||
279 | ''' |
||
280 | target: test insert vectors in collection, use customize ids, len(ids) != len(vectors) |
||
281 | method: create collection and insert vectors in it |
||
282 | expected: raise an exception |
||
283 | ''' |
||
284 | ids = [i for i in range(1, default_nb)] |
||
285 | logging.getLogger().info(len(ids)) |
||
286 | with pytest.raises(Exception) as e: |
||
287 | res_ids = connect.bulk_insert(id_collection, default_entities, ids) |
||
288 | |||
289 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
290 | def test_insert_ids_length_not_match_single(self, connect, collection): |
||
291 | ''' |
||
292 | target: test insert vectors in collection, use customize ids, len(ids) != len(vectors) |
||
293 | method: create collection and insert vectors in it |
||
294 | expected: raise an exception |
||
295 | ''' |
||
296 | ids = [i for i in range(1, default_nb)] |
||
297 | logging.getLogger().info(len(ids)) |
||
298 | with pytest.raises(Exception) as e: |
||
299 | res_ids = connect.bulk_insert(collection, default_entity, ids) |
||
300 | |||
301 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
302 | def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field): |
||
303 | ''' |
||
304 | target: test create normal collection with different fields, insert entities into id without ids |
||
305 | method: create collection with diff fields: metric/field_type/..., insert, and count |
||
306 | expected: row count correct |
||
307 | ''' |
||
308 | nb = 5 |
||
309 | filter_field = get_filter_field |
||
310 | vector_field = get_vector_field |
||
311 | collection_name = gen_unique_str("test_collection") |
||
312 | fields = { |
||
313 | "fields": [filter_field, vector_field], |
||
314 | "segment_row_limit": default_segment_row_limit |
||
315 | } |
||
316 | connect.create_collection(collection_name, fields) |
||
317 | entities = gen_entities_by_fields(fields["fields"], nb, default_dim) |
||
318 | res_ids = connect.bulk_insert(collection_name, entities) |
||
319 | connect.flush([collection_name]) |
||
320 | res_count = connect.count_entities(collection_name) |
||
321 | assert res_count == nb |
||
322 | |||
323 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
324 | def test_insert_tag(self, connect, collection): |
||
325 | ''' |
||
326 | target: test insert entities in collection created before |
||
327 | method: create collection and insert entities in it, with the partition_tag param |
||
328 | expected: the collection row count equals to nq |
||
329 | ''' |
||
330 | connect.create_partition(collection, default_tag) |
||
331 | ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag) |
||
332 | assert len(ids) == default_nb |
||
333 | assert connect.has_partition(collection, default_tag) |
||
334 | |||
335 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
336 | def test_insert_tag_with_ids(self, connect, id_collection): |
||
337 | ''' |
||
338 | target: test insert entities in collection created before, insert with ids |
||
339 | method: create collection and insert entities in it, with the partition_tag param |
||
340 | expected: the collection row count equals to nq |
||
341 | ''' |
||
342 | connect.create_partition(id_collection, default_tag) |
||
343 | ids = [i for i in range(default_nb)] |
||
344 | res_ids = connect.bulk_insert(id_collection, default_entities, ids, partition_tag=default_tag) |
||
345 | assert res_ids == ids |
||
346 | |||
347 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
348 | def test_insert_default_tag(self, connect, collection): |
||
349 | ''' |
||
350 | target: test insert entities into default partition |
||
351 | method: create partition and insert info collection without tag params |
||
352 | expected: the collection row count equals to nb |
||
353 | ''' |
||
354 | connect.create_partition(collection, default_tag) |
||
355 | ids = connect.bulk_insert(collection, default_entities) |
||
356 | connect.flush([collection]) |
||
357 | assert len(ids) == default_nb |
||
358 | res_count = connect.count_entities(collection) |
||
359 | assert res_count == default_nb |
||
360 | |||
361 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
362 | def test_insert_tag_not_existed(self, connect, collection): |
||
363 | ''' |
||
364 | target: test insert entities in collection created before |
||
365 | method: create collection and insert entities in it, with the not existed partition_tag param |
||
366 | expected: error raised |
||
367 | ''' |
||
368 | tag = gen_unique_str() |
||
369 | with pytest.raises(Exception) as e: |
||
370 | ids = connect.bulk_insert(collection, default_entities, partition_tag=tag) |
||
371 | |||
372 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
373 | def test_insert_tag_existed(self, connect, collection): |
||
374 | ''' |
||
375 | target: test insert entities in collection created before |
||
376 | method: create collection and insert entities in it repeatly, with the partition_tag param |
||
377 | expected: the collection row count equals to nq |
||
378 | ''' |
||
379 | connect.create_partition(collection, default_tag) |
||
380 | ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag) |
||
381 | ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag) |
||
382 | connect.flush([collection]) |
||
383 | res_count = connect.count_entities(collection) |
||
384 | assert res_count == 2 * default_nb |
||
385 | |||
386 | @pytest.mark.level(2) |
||
387 | def test_insert_without_connect(self, dis_connect, collection): |
||
388 | ''' |
||
389 | target: test insert entities without connection |
||
390 | method: create collection and insert entities in it, check if inserted successfully |
||
391 | expected: raise exception |
||
392 | ''' |
||
393 | with pytest.raises(Exception) as e: |
||
394 | ids = dis_connect.bulk_insert(collection, default_entities) |
||
395 | |||
396 | def test_insert_collection_not_existed(self, connect): |
||
397 | ''' |
||
398 | target: test insert entities in collection, which not existed before |
||
399 | method: insert entities collection not existed, check the status |
||
400 | expected: error raised |
||
401 | ''' |
||
402 | with pytest.raises(Exception) as e: |
||
403 | ids = connect.bulk_insert(gen_unique_str("not_exist_collection"), default_entities) |
||
404 | |||
405 | def test_insert_dim_not_matched(self, connect, collection): |
||
406 | ''' |
||
407 | target: test insert entities, the vector dimension is not equal to the collection dimension |
||
408 | method: the entities dimension is half of the collection dimension, check the status |
||
409 | expected: error raised |
||
410 | ''' |
||
411 | vectors = gen_vectors(default_nb, int(default_dim) // 2) |
||
412 | insert_entities = copy.deepcopy(default_entities) |
||
413 | insert_entities[-1]["values"] = vectors |
||
414 | with pytest.raises(Exception) as e: |
||
415 | ids = connect.bulk_insert(collection, insert_entities) |
||
416 | |||
417 | def test_insert_with_field_name_not_match(self, connect, collection): |
||
418 | ''' |
||
419 | target: test insert entities, with the entity field name updated |
||
420 | method: update entity field name |
||
421 | expected: error raised |
||
422 | ''' |
||
423 | tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", "int64new") |
||
424 | with pytest.raises(Exception): |
||
425 | connect.bulk_insert(collection, tmp_entity) |
||
426 | |||
427 | def test_insert_with_field_type_not_match(self, connect, collection): |
||
428 | ''' |
||
429 | target: test insert entities, with the entity field type updated |
||
430 | method: update entity field type |
||
431 | expected: error raised |
||
432 | ''' |
||
433 | tmp_entity = update_field_type(copy.deepcopy(default_entity), "int64", DataType.FLOAT) |
||
434 | with pytest.raises(Exception): |
||
435 | connect.bulk_insert(collection, tmp_entity) |
||
436 | |||
437 | @pytest.mark.level(2) |
||
438 | def test_insert_with_field_type_not_match_B(self, connect, collection): |
||
439 | ''' |
||
440 | target: test insert entities, with the entity field type updated |
||
441 | method: update entity field type |
||
442 | expected: error raised |
||
443 | ''' |
||
444 | tmp_entity = update_field_type(copy.deepcopy(default_entity), "int64", DataType.DOUBLE) |
||
445 | with pytest.raises(Exception): |
||
446 | connect.bulk_insert(collection, tmp_entity) |
||
447 | |||
448 | @pytest.mark.level(2) |
||
449 | def test_insert_with_field_value_not_match(self, connect, collection): |
||
450 | ''' |
||
451 | target: test insert entities, with the entity field value updated |
||
452 | method: update entity field value |
||
453 | expected: error raised |
||
454 | ''' |
||
455 | tmp_entity = update_field_value(copy.deepcopy(default_entity), DataType.FLOAT, 's') |
||
456 | with pytest.raises(Exception): |
||
457 | connect.bulk_insert(collection, tmp_entity) |
||
458 | |||
459 | def test_insert_with_field_more(self, connect, collection): |
||
460 | ''' |
||
461 | target: test insert entities, with more fields than collection schema |
||
462 | method: add entity field |
||
463 | expected: error raised |
||
464 | ''' |
||
465 | tmp_entity = add_field(copy.deepcopy(default_entity)) |
||
466 | with pytest.raises(Exception): |
||
467 | connect.bulk_insert(collection, tmp_entity) |
||
468 | |||
469 | def test_insert_with_field_vector_more(self, connect, collection): |
||
470 | ''' |
||
471 | target: test insert entities, with more fields than collection schema |
||
472 | method: add entity vector field |
||
473 | expected: error raised |
||
474 | ''' |
||
475 | tmp_entity = add_vector_field(default_nb, default_dim) |
||
476 | with pytest.raises(Exception): |
||
477 | connect.bulk_insert(collection, tmp_entity) |
||
478 | |||
479 | def test_insert_with_field_less(self, connect, collection): |
||
480 | ''' |
||
481 | target: test insert entities, with less fields than collection schema |
||
482 | method: remove entity field |
||
483 | expected: error raised |
||
484 | ''' |
||
485 | tmp_entity = remove_field(copy.deepcopy(default_entity)) |
||
486 | with pytest.raises(Exception): |
||
487 | connect.bulk_insert(collection, tmp_entity) |
||
488 | |||
489 | def test_insert_with_field_vector_less(self, connect, collection): |
||
490 | ''' |
||
491 | target: test insert entities, with less fields than collection schema |
||
492 | method: remove entity vector field |
||
493 | expected: error raised |
||
494 | ''' |
||
495 | tmp_entity = remove_vector_field(copy.deepcopy(default_entity)) |
||
496 | with pytest.raises(Exception): |
||
497 | connect.bulk_insert(collection, tmp_entity) |
||
498 | |||
499 | def test_insert_with_no_field_vector_value(self, connect, collection): |
||
500 | ''' |
||
501 | target: test insert entities, with no vector field value |
||
502 | method: remove entity vector field |
||
503 | expected: error raised |
||
504 | ''' |
||
505 | tmp_entity = copy.deepcopy(default_entity) |
||
506 | del tmp_entity[-1]["values"] |
||
507 | with pytest.raises(Exception): |
||
508 | connect.bulk_insert(collection, tmp_entity) |
||
509 | |||
510 | def test_insert_with_no_field_vector_type(self, connect, collection): |
||
511 | ''' |
||
512 | target: test insert entities, with no vector field type |
||
513 | method: remove entity vector field |
||
514 | expected: error raised |
||
515 | ''' |
||
516 | tmp_entity = copy.deepcopy(default_entity) |
||
517 | del tmp_entity[-1]["type"] |
||
518 | with pytest.raises(Exception): |
||
519 | connect.bulk_insert(collection, tmp_entity) |
||
520 | |||
521 | def test_insert_with_no_field_vector_name(self, connect, collection): |
||
522 | ''' |
||
523 | target: test insert entities, with no vector field name |
||
524 | method: remove entity vector field |
||
525 | expected: error raised |
||
526 | ''' |
||
527 | tmp_entity = copy.deepcopy(default_entity) |
||
528 | del tmp_entity[-1]["name"] |
||
529 | with pytest.raises(Exception): |
||
530 | connect.bulk_insert(collection, tmp_entity) |
||
531 | |||
532 | @pytest.mark.level(2) |
||
533 | @pytest.mark.timeout(30) |
||
534 | def test_collection_insert_rows_count_multi_threading(self, args, collection): |
||
535 | ''' |
||
536 | target: test collection rows_count is correct or not with multi threading |
||
537 | method: create collection and insert entities in it(idmap), |
||
538 | assert the value returned by count_entities method is equal to length of entities |
||
539 | expected: the count is equal to the length of entities |
||
540 | ''' |
||
541 | if args["handler"] == "HTTP": |
||
542 | pytest.skip("Skip test in http mode") |
||
543 | thread_num = 8 |
||
544 | threads = [] |
||
545 | milvus = get_milvus(host=args["ip"], port=args["port"], handler=args["handler"], try_connect=False) |
||
546 | |||
547 | def insert(thread_i): |
||
548 | logging.getLogger().info("In thread-%d" % thread_i) |
||
549 | milvus.bulk_insert(collection, default_entities) |
||
550 | milvus.flush([collection]) |
||
551 | |||
552 | for i in range(thread_num): |
||
553 | t = TestThread(target=insert, args=(i,)) |
||
554 | threads.append(t) |
||
555 | t.start() |
||
556 | for t in threads: |
||
557 | t.join() |
||
558 | res_count = milvus.count_entities(collection) |
||
559 | assert res_count == thread_num * default_nb |
||
560 | |||
561 | # TODO: unable to set config |
||
562 | @pytest.mark.level(2) |
||
563 | def _test_insert_disable_auto_flush(self, connect, collection): |
||
564 | ''' |
||
565 | target: test insert entities, with disable autoflush |
||
566 | method: disable autoflush and insert, get entity |
||
567 | expected: the count is equal to 0 |
||
568 | ''' |
||
569 | delete_nums = 500 |
||
570 | disable_flush(connect) |
||
571 | ids = connect.bulk_insert(collection, default_entities) |
||
572 | res = connect.get_entity_by_id(collection, ids[:delete_nums]) |
||
573 | assert len(res) == delete_nums |
||
574 | assert res[0] is None |
||
575 | |||
576 | |||
577 | class TestInsertBinary: |
||
578 | @pytest.fixture( |
||
579 | scope="function", |
||
580 | params=gen_binary_index() |
||
581 | ) |
||
582 | def get_binary_index(self, request): |
||
583 | request.param["metric_type"] = "JACCARD" |
||
584 | return request.param |
||
585 | |||
586 | def test_insert_binary_entities(self, connect, binary_collection): |
||
587 | ''' |
||
588 | target: test insert entities in binary collection |
||
589 | method: create collection and insert binary entities in it |
||
590 | expected: the collection row count equals to nb |
||
591 | ''' |
||
592 | ids = connect.bulk_insert(binary_collection, default_binary_entities) |
||
593 | assert len(ids) == default_nb |
||
594 | connect.flush() |
||
595 | assert connect.count_entities(binary_collection) == default_nb |
||
596 | |||
597 | def test_insert_binary_entities_new(self, connect, binary_collection): |
||
598 | ''' |
||
599 | target: test insert entities in binary collection |
||
600 | method: create collection and insert binary entities in it |
||
601 | expected: the collection row count equals to nb |
||
602 | ''' |
||
603 | ids = connect.bulk_insert(binary_collection, default_binary_entities_new) |
||
604 | assert len(ids) == default_nb |
||
605 | connect.flush() |
||
606 | assert connect.count_entities(binary_collection) == default_nb |
||
607 | |||
608 | def test_insert_binary_tag(self, connect, binary_collection): |
||
609 | ''' |
||
610 | target: test insert entities and create partition tag |
||
611 | method: create collection and insert binary entities in it, with the partition_tag param |
||
612 | expected: the collection row count equals to nb |
||
613 | ''' |
||
614 | connect.create_partition(binary_collection, default_tag) |
||
615 | ids = connect.bulk_insert(binary_collection, default_binary_entities, partition_tag=default_tag) |
||
616 | assert len(ids) == default_nb |
||
617 | assert connect.has_partition(binary_collection, default_tag) |
||
618 | |||
619 | # TODO |
||
620 | @pytest.mark.level(2) |
||
621 | def test_insert_binary_multi_times(self, connect, binary_collection): |
||
622 | ''' |
||
623 | target: test insert entities multi times and final flush |
||
624 | method: create collection and insert binary entity multi and final flush |
||
625 | expected: the collection row count equals to nb |
||
626 | ''' |
||
627 | for i in range(default_nb): |
||
628 | ids = connect.bulk_insert(binary_collection, default_binary_entity) |
||
629 | assert len(ids) == 1 |
||
630 | connect.flush([binary_collection]) |
||
631 | assert connect.count_entities(binary_collection) == default_nb |
||
632 | |||
633 | def test_insert_binary_after_create_index(self, connect, binary_collection, get_binary_index): |
||
634 | ''' |
||
635 | target: test insert binary entities after build index |
||
636 | method: build index and insert entities |
||
637 | expected: no error raised |
||
638 | ''' |
||
639 | connect.create_index(binary_collection, binary_field_name, get_binary_index) |
||
640 | ids = connect.bulk_insert(binary_collection, default_binary_entities) |
||
641 | assert len(ids) == default_nb |
||
642 | connect.flush([binary_collection]) |
||
643 | info = connect.get_collection_info(binary_collection) |
||
644 | fields = info["fields"] |
||
645 | for field in fields: |
||
646 | if field["name"] == binary_field_name: |
||
647 | assert field["indexes"][0] == get_binary_index |
||
648 | |||
649 | View Code Duplication | @pytest.mark.timeout(ADD_TIMEOUT) |
|
650 | def test_insert_binary_create_index(self, connect, binary_collection, get_binary_index): |
||
651 | ''' |
||
652 | target: test build index insert after vector |
||
653 | method: insert vector and build index |
||
654 | expected: no error raised |
||
655 | ''' |
||
656 | ids = connect.bulk_insert(binary_collection, default_binary_entities) |
||
657 | assert len(ids) == default_nb |
||
658 | connect.flush([binary_collection]) |
||
659 | connect.create_index(binary_collection, binary_field_name, get_binary_index) |
||
660 | info = connect.get_collection_info(binary_collection) |
||
661 | fields = info["fields"] |
||
662 | for field in fields: |
||
663 | if field["name"] == binary_field_name: |
||
664 | assert field["indexes"][0] == get_binary_index |
||
665 | |||
666 | def test_insert_binary_search(self, connect, binary_collection): |
||
667 | ''' |
||
668 | target: test search vector after insert vector after a while |
||
669 | method: insert vector, sleep, and search collection |
||
670 | expected: no error raised |
||
671 | ''' |
||
672 | ids = connect.bulk_insert(binary_collection, default_binary_entities) |
||
673 | connect.flush([binary_collection]) |
||
674 | query, vecs = gen_query_vectors(binary_field_name, default_binary_entities, default_top_k, 1, metric_type="JACCARD") |
||
675 | res = connect.search(binary_collection, query) |
||
676 | logging.getLogger().debug(res) |
||
677 | assert res |
||
678 | |||
679 | |||
680 | class TestInsertAsync: |
||
681 | @pytest.fixture(scope="function", autouse=True) |
||
682 | def skip_http_check(self, args): |
||
683 | if args["handler"] == "HTTP": |
||
684 | pytest.skip("skip in http mode") |
||
685 | |||
686 | @pytest.fixture( |
||
687 | scope="function", |
||
688 | params=[ |
||
689 | 1, |
||
690 | 1000 |
||
691 | ], |
||
692 | ) |
||
693 | def insert_count(self, request): |
||
694 | yield request.param |
||
695 | |||
696 | def check_status(self, result): |
||
697 | logging.getLogger().info("In callback check status") |
||
698 | assert not result |
||
699 | |||
700 | def check_result(self, result): |
||
701 | logging.getLogger().info("In callback check status") |
||
702 | assert result |
||
703 | |||
704 | def test_insert_async(self, connect, collection, insert_count): |
||
705 | ''' |
||
706 | target: test insert vectors with different length of vectors |
||
707 | method: set different vectors as insert method params |
||
708 | expected: length of ids is equal to the length of vectors |
||
709 | ''' |
||
710 | nb = insert_count |
||
711 | future = connect.bulk_insert(collection, gen_entities(nb), _async=True) |
||
712 | ids = future.result() |
||
713 | connect.flush([collection]) |
||
714 | assert len(ids) == nb |
||
715 | |||
716 | @pytest.mark.level(2) |
||
717 | def test_insert_async_false(self, connect, collection, insert_count): |
||
718 | ''' |
||
719 | target: test insert vectors with different length of vectors |
||
720 | method: set different vectors as insert method params |
||
721 | expected: length of ids is equal to the length of vectors |
||
722 | ''' |
||
723 | nb = insert_count |
||
724 | ids = connect.bulk_insert(collection, gen_entities(nb), _async=False) |
||
725 | # ids = future.result() |
||
726 | connect.flush([collection]) |
||
727 | assert len(ids) == nb |
||
728 | |||
729 | def test_insert_async_callback(self, connect, collection, insert_count): |
||
730 | ''' |
||
731 | target: test insert vectors with different length of vectors |
||
732 | method: set different vectors as insert method params |
||
733 | expected: length of ids is equal to the length of vectors |
||
734 | ''' |
||
735 | nb = insert_count |
||
736 | future = connect.bulk_insert(collection, gen_entities(nb), _async=True, _callback=self.check_status) |
||
737 | future.done() |
||
738 | |||
739 | @pytest.mark.level(2) |
||
740 | def test_insert_async_long(self, connect, collection): |
||
741 | ''' |
||
742 | target: test insert vectors with different length of vectors |
||
743 | method: set different vectors as insert method params |
||
744 | expected: length of ids is equal to the length of vectors |
||
745 | ''' |
||
746 | nb = 50000 |
||
747 | future = connect.bulk_insert(collection, gen_entities(nb), _async=True, _callback=self.check_result) |
||
748 | result = future.result() |
||
749 | assert len(result) == nb |
||
750 | connect.flush([collection]) |
||
751 | count = connect.count_entities(collection) |
||
752 | logging.getLogger().info(count) |
||
753 | assert count == nb |
||
754 | |||
755 | @pytest.mark.level(2) |
||
756 | def test_insert_async_callback_timeout(self, connect, collection): |
||
757 | ''' |
||
758 | target: test insert vectors with different length of vectors |
||
759 | method: set different vectors as insert method params |
||
760 | expected: length of ids is equal to the length of vectors |
||
761 | ''' |
||
762 | nb = 100000 |
||
763 | future = connect.bulk_insert(collection, gen_entities(nb), _async=True, _callback=self.check_status, timeout=1) |
||
764 | with pytest.raises(Exception) as e: |
||
765 | result = future.result() |
||
766 | count = connect.count_entities(collection) |
||
767 | assert count == 0 |
||
768 | |||
769 | def test_insert_async_invalid_params(self, connect): |
||
770 | ''' |
||
771 | target: test insert vectors with different length of vectors |
||
772 | method: set different vectors as insert method params |
||
773 | expected: length of ids is equal to the length of vectors |
||
774 | ''' |
||
775 | collection_new = gen_unique_str() |
||
776 | future = connect.bulk_insert(collection_new, default_entities, _async=True) |
||
777 | with pytest.raises(Exception) as e: |
||
778 | result = future.result() |
||
779 | |||
780 | def test_insert_async_invalid_params_raise_exception(self, connect, collection): |
||
781 | ''' |
||
782 | target: test insert vectors with different length of vectors |
||
783 | method: set different vectors as insert method params |
||
784 | expected: length of ids is equal to the length of vectors |
||
785 | ''' |
||
786 | entities = [] |
||
787 | future = connect.bulk_insert(collection, entities, _async=True) |
||
788 | with pytest.raises(Exception) as e: |
||
789 | future.result() |
||
790 | |||
791 | |||
792 | class TestInsertMultiCollections: |
||
793 | """ |
||
794 | ****************************************************************** |
||
795 | The following cases are used to test `insert` function |
||
796 | ****************************************************************** |
||
797 | """ |
||
798 | |||
799 | View Code Duplication | @pytest.fixture( |
|
800 | scope="function", |
||
801 | params=gen_simple_index() |
||
802 | ) |
||
803 | def get_simple_index(self, request, connect): |
||
804 | logging.getLogger().info(request.param) |
||
805 | if str(connect._cmd("mode")) == "CPU": |
||
806 | if request.param["index_type"] in index_cpu_not_support(): |
||
807 | pytest.skip("sq8h not support in CPU mode") |
||
808 | return request.param |
||
809 | |||
810 | def test_insert_vector_multi_collections(self, connect): |
||
811 | ''' |
||
812 | target: test insert entities |
||
813 | method: create 10 collections and insert entities into them in turn |
||
814 | expected: row count |
||
815 | ''' |
||
816 | collection_num = 10 |
||
817 | collection_list = [] |
||
818 | for i in range(collection_num): |
||
819 | collection_name = gen_unique_str(uid) |
||
820 | collection_list.append(collection_name) |
||
821 | connect.create_collection(collection_name, default_fields) |
||
822 | ids = connect.bulk_insert(collection_name, default_entities) |
||
823 | connect.flush([collection_name]) |
||
824 | assert len(ids) == default_nb |
||
825 | count = connect.count_entities(collection_name) |
||
826 | assert count == default_nb |
||
827 | |||
828 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
829 | def test_drop_collection_insert_vector_another(self, connect, collection): |
||
830 | ''' |
||
831 | target: test insert vector to collection_1 after collection_2 deleted |
||
832 | method: delete collection_2 and insert vector to collection_1 |
||
833 | expected: row count equals the length of entities inserted |
||
834 | ''' |
||
835 | collection_name = gen_unique_str(uid) |
||
836 | connect.create_collection(collection_name, default_fields) |
||
837 | connect.drop_collection(collection) |
||
838 | ids = connect.bulk_insert(collection_name, default_entity) |
||
839 | connect.flush([collection_name]) |
||
840 | assert len(ids) == 1 |
||
841 | |||
842 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
843 | def test_create_index_insert_vector_another(self, connect, collection, get_simple_index): |
||
844 | ''' |
||
845 | target: test insert vector to collection_2 after build index for collection_1 |
||
846 | method: build index and insert vector |
||
847 | expected: status ok |
||
848 | ''' |
||
849 | collection_name = gen_unique_str(uid) |
||
850 | connect.create_collection(collection_name, default_fields) |
||
851 | connect.create_index(collection, field_name, get_simple_index) |
||
852 | ids = connect.bulk_insert(collection, default_entity) |
||
853 | connect.drop_collection(collection_name) |
||
854 | |||
855 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
856 | def test_insert_vector_create_index_another(self, connect, collection, get_simple_index): |
||
857 | ''' |
||
858 | target: test insert vector to collection_2 after build index for collection_1 |
||
859 | method: build index and insert vector |
||
860 | expected: status ok |
||
861 | ''' |
||
862 | collection_name = gen_unique_str(uid) |
||
863 | connect.create_collection(collection_name, default_fields) |
||
864 | ids = connect.bulk_insert(collection, default_entity) |
||
865 | connect.create_index(collection, field_name, get_simple_index) |
||
866 | count = connect.count_entities(collection_name) |
||
867 | assert count == 0 |
||
868 | |||
869 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
870 | def test_insert_vector_sleep_create_index_another(self, connect, collection, get_simple_index): |
||
871 | ''' |
||
872 | target: test insert vector to collection_2 after build index for collection_1 for a while |
||
873 | method: build index and insert vector |
||
874 | expected: status ok |
||
875 | ''' |
||
876 | collection_name = gen_unique_str(uid) |
||
877 | connect.create_collection(collection_name, default_fields) |
||
878 | ids = connect.bulk_insert(collection, default_entity) |
||
879 | connect.flush([collection]) |
||
880 | connect.create_index(collection, field_name, get_simple_index) |
||
881 | count = connect.count_entities(collection) |
||
882 | assert count == 1 |
||
883 | |||
884 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
885 | def test_search_vector_insert_vector_another(self, connect, collection): |
||
886 | ''' |
||
887 | target: test insert vector to collection_1 after search collection_2 |
||
888 | method: search collection and insert vector |
||
889 | expected: status ok |
||
890 | ''' |
||
891 | collection_name = gen_unique_str(uid) |
||
892 | connect.create_collection(collection_name, default_fields) |
||
893 | res = connect.search(collection, default_single_query) |
||
894 | logging.getLogger().debug(res) |
||
895 | ids = connect.bulk_insert(collection_name, default_entity) |
||
896 | connect.flush() |
||
897 | count = connect.count_entities(collection_name) |
||
898 | assert count == 1 |
||
899 | |||
900 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
901 | def test_insert_vector_search_vector_another(self, connect, collection): |
||
902 | ''' |
||
903 | target: test insert vector to collection_1 after search collection_2 |
||
904 | method: search collection and insert vector |
||
905 | expected: status ok |
||
906 | ''' |
||
907 | collection_name = gen_unique_str(uid) |
||
908 | connect.create_collection(collection_name, default_fields) |
||
909 | ids = connect.bulk_insert(collection, default_entity) |
||
910 | result = connect.search(collection_name, default_single_query) |
||
911 | |||
912 | @pytest.mark.timeout(ADD_TIMEOUT) |
||
913 | def test_insert_vector_sleep_search_vector_another(self, connect, collection): |
||
914 | ''' |
||
915 | target: test insert vector to collection_1 after search collection_2 a while |
||
916 | method: search collection , sleep, and insert vector |
||
917 | expected: status ok |
||
918 | ''' |
||
919 | collection_name = gen_unique_str(uid) |
||
920 | connect.create_collection(collection_name, default_fields) |
||
921 | ids = connect.bulk_insert(collection, default_entity) |
||
922 | connect.flush([collection]) |
||
923 | result = connect.search(collection_name, default_single_query) |
||
924 | |||
925 | |||
926 | class TestInsertInvalid(object): |
||
927 | """ |
||
928 | Test inserting vectors with invalid collection names |
||
929 | """ |
||
930 | |||
931 | @pytest.fixture( |
||
932 | scope="function", |
||
933 | params=gen_invalid_strs() |
||
934 | ) |
||
935 | def get_collection_name(self, request): |
||
936 | yield request.param |
||
937 | |||
938 | @pytest.fixture( |
||
939 | scope="function", |
||
940 | params=gen_invalid_strs() |
||
941 | ) |
||
942 | def get_tag_name(self, request): |
||
943 | yield request.param |
||
944 | |||
945 | @pytest.fixture( |
||
946 | scope="function", |
||
947 | params=gen_invalid_strs() |
||
948 | ) |
||
949 | def get_field_name(self, request): |
||
950 | yield request.param |
||
951 | |||
952 | @pytest.fixture( |
||
953 | scope="function", |
||
954 | params=gen_invalid_strs() |
||
955 | ) |
||
956 | def get_field_type(self, request): |
||
957 | yield request.param |
||
958 | |||
959 | @pytest.fixture( |
||
960 | scope="function", |
||
961 | params=gen_invalid_strs() |
||
962 | ) |
||
963 | def get_field_int_value(self, request): |
||
964 | yield request.param |
||
965 | |||
966 | @pytest.fixture( |
||
967 | scope="function", |
||
968 | params=gen_invalid_ints() |
||
969 | ) |
||
970 | def get_entity_id(self, request): |
||
971 | yield request.param |
||
972 | |||
973 | @pytest.fixture( |
||
974 | scope="function", |
||
975 | params=gen_invalid_vectors() |
||
976 | ) |
||
977 | def get_field_vectors_value(self, request): |
||
978 | yield request.param |
||
979 | |||
980 | def test_insert_ids_invalid(self, connect, id_collection, get_entity_id): |
||
981 | ''' |
||
982 | target: test insert, with using customize ids, which are not int64 |
||
983 | method: create collection and insert entities in it |
||
984 | expected: raise an exception |
||
985 | ''' |
||
986 | entity_id = get_entity_id |
||
987 | ids = [entity_id for _ in range(default_nb)] |
||
988 | with pytest.raises(Exception): |
||
989 | connect.bulk_insert(id_collection, default_entities, ids) |
||
990 | |||
991 | def test_insert_with_invalid_collection_name(self, connect, get_collection_name): |
||
992 | collection_name = get_collection_name |
||
993 | with pytest.raises(Exception): |
||
994 | connect.bulk_insert(collection_name, default_entity) |
||
995 | |||
996 | def test_insert_with_invalid_tag_name(self, connect, collection, get_tag_name): |
||
997 | tag_name = get_tag_name |
||
998 | connect.create_partition(collection, default_tag) |
||
999 | if tag_name is not None: |
||
1000 | with pytest.raises(Exception): |
||
1001 | connect.bulk_insert(collection, default_entity, partition_tag=tag_name) |
||
1002 | else: |
||
1003 | connect.bulk_insert(collection, default_entity, partition_tag=tag_name) |
||
1004 | |||
1005 | def test_insert_with_invalid_field_name(self, connect, collection, get_field_name): |
||
1006 | field_name = get_field_name |
||
1007 | tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", get_field_name) |
||
1008 | with pytest.raises(Exception): |
||
1009 | connect.bulk_insert(collection, tmp_entity) |
||
1010 | |||
1011 | def test_insert_with_invalid_field_type(self, connect, collection, get_field_type): |
||
1012 | field_type = get_field_type |
||
1013 | tmp_entity = update_field_type(copy.deepcopy(default_entity), 'float', field_type) |
||
1014 | with pytest.raises(Exception): |
||
1015 | connect.bulk_insert(collection, tmp_entity) |
||
1016 | |||
1017 | def test_insert_with_invalid_field_value(self, connect, collection, get_field_int_value): |
||
1018 | field_value = get_field_int_value |
||
1019 | tmp_entity = update_field_type(copy.deepcopy(default_entity), 'int64', field_value) |
||
1020 | with pytest.raises(Exception): |
||
1021 | connect.bulk_insert(collection, tmp_entity) |
||
1022 | |||
1023 | def test_insert_with_invalid_field_vector_value(self, connect, collection, get_field_vectors_value): |
||
1024 | tmp_entity = copy.deepcopy(default_entity) |
||
1025 | src_vector = tmp_entity[-1]["values"] |
||
1026 | src_vector[0][1] = get_field_vectors_value |
||
1027 | with pytest.raises(Exception): |
||
1028 | connect.bulk_insert(collection, tmp_entity) |
||
1029 | |||
1030 | |||
1031 | class TestInsertInvalidBinary(object): |
||
1032 | """ |
||
1033 | Test inserting vectors with invalid collection names |
||
1034 | """ |
||
1035 | |||
1036 | @pytest.fixture( |
||
1037 | scope="function", |
||
1038 | params=gen_invalid_strs() |
||
1039 | ) |
||
1040 | def get_collection_name(self, request): |
||
1041 | yield request.param |
||
1042 | |||
1043 | @pytest.fixture( |
||
1044 | scope="function", |
||
1045 | params=gen_invalid_strs() |
||
1046 | ) |
||
1047 | def get_tag_name(self, request): |
||
1048 | yield request.param |
||
1049 | |||
1050 | @pytest.fixture( |
||
1051 | scope="function", |
||
1052 | params=gen_invalid_strs() |
||
1053 | ) |
||
1054 | def get_field_name(self, request): |
||
1055 | yield request.param |
||
1056 | |||
1057 | @pytest.fixture( |
||
1058 | scope="function", |
||
1059 | params=gen_invalid_strs() |
||
1060 | ) |
||
1061 | def get_field_type(self, request): |
||
1062 | yield request.param |
||
1063 | |||
1064 | @pytest.fixture( |
||
1065 | scope="function", |
||
1066 | params=gen_invalid_strs() |
||
1067 | ) |
||
1068 | def get_field_int_value(self, request): |
||
1069 | yield request.param |
||
1070 | |||
1071 | @pytest.fixture( |
||
1072 | scope="function", |
||
1073 | params=gen_invalid_ints() |
||
1074 | ) |
||
1075 | def get_entity_id(self, request): |
||
1076 | yield request.param |
||
1077 | |||
1078 | @pytest.fixture( |
||
1079 | scope="function", |
||
1080 | params=gen_invalid_vectors() |
||
1081 | ) |
||
1082 | def get_field_vectors_value(self, request): |
||
1083 | yield request.param |
||
1084 | |||
1085 | @pytest.mark.level(2) |
||
1086 | def test_insert_with_invalid_field_name(self, connect, binary_collection, get_field_name): |
||
1087 | tmp_entity = update_field_name(copy.deepcopy(default_binary_entity), "int64", get_field_name) |
||
1088 | with pytest.raises(Exception): |
||
1089 | connect.bulk_insert(binary_collection, tmp_entity) |
||
1090 | |||
1091 | @pytest.mark.level(2) |
||
1092 | def test_insert_with_invalid_field_value(self, connect, binary_collection, get_field_int_value): |
||
1093 | tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', get_field_int_value) |
||
1094 | with pytest.raises(Exception): |
||
1095 | connect.bulk_insert(binary_collection, tmp_entity) |
||
1096 | |||
1097 | @pytest.mark.level(2) |
||
1098 | def test_insert_with_invalid_field_vector_value(self, connect, binary_collection, get_field_vectors_value): |
||
1099 | tmp_entity = copy.deepcopy(default_binary_entity) |
||
1100 | src_vector = tmp_entity[-1]["values"] |
||
1101 | src_vector[0][1] = get_field_vectors_value |
||
1102 | with pytest.raises(Exception): |
||
1103 | connect.bulk_insert(binary_collection, tmp_entity) |
||
1104 | |||
1105 | @pytest.mark.level(2) |
||
1106 | def test_insert_ids_invalid(self, connect, binary_id_collection, get_entity_id): |
||
1107 | ''' |
||
1108 | target: test insert, with using customize ids, which are not int64 |
||
1109 | method: create collection and insert entities in it |
||
1110 | expected: raise an exception |
||
1111 | ''' |
||
1112 | entity_id = get_entity_id |
||
1113 | ids = [entity_id for _ in range(default_nb)] |
||
1114 | with pytest.raises(Exception): |
||
1115 | connect.bulk_insert(binary_id_collection, default_binary_entities, ids) |
||
1116 | |||
1117 | @pytest.mark.level(2) |
||
1118 | def test_insert_with_invalid_field_type(self, connect, binary_collection, get_field_type): |
||
1119 | field_type = get_field_type |
||
1120 | tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', field_type) |
||
1121 | with pytest.raises(Exception): |
||
1122 | connect.bulk_insert(binary_collection, tmp_entity) |
||
1123 | |||
1124 | @pytest.mark.level(2) |
||
1125 | def test_insert_with_invalid_field_vector_value(self, connect, binary_collection, get_field_vectors_value): |
||
1126 | tmp_entity = copy.deepcopy(default_binary_entities) |
||
1127 | src_vector = tmp_entity[-1]["values"] |
||
1128 | src_vector[1] = get_field_vectors_value |
||
1129 | with pytest.raises(Exception): |
||
1130 | connect.bulk_insert(binary_collection, tmp_entity) |
||
1131 |