rename interfaces in cases (#2298)

Signed-off-by: zw <zw@zilliz.com>
This commit is contained in:
del-zhenwu 2020-05-11 21:13:06 +08:00 committed by GitHub
parent 15a603fa46
commit 69eeb4dfc0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 556 additions and 556 deletions

View File

@ -108,7 +108,7 @@ def collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
@ -134,7 +134,7 @@ def ip_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
@ -160,7 +160,7 @@ def jac_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
@ -185,7 +185,7 @@ def ham_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
@ -210,7 +210,7 @@ def tanimoto_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
@ -234,7 +234,7 @@ def substructure_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)
@ -258,7 +258,7 @@ def superstructure_collection(request, connect):
pytest.exit("collection can not be created, exit pytest ...")
def teardown():
status, collection_names = connect.show_collections()
status, collection_names = connect.list_collections()
for collection_name in collection_names:
connect.drop_collection(collection_name)

View File

@ -615,7 +615,7 @@ class TestAddBase:
for p in processes:
p.join()
time.sleep(2)
status, count = milvus.count_collection(collection)
status, count = milvus.count_entities(collection)
assert count == process_num * loop_num
@pytest.mark.level(2)
@ -624,7 +624,7 @@ class TestAddBase:
'''
target: test collection rows_count is correct or not with multi threading
method: create collection and add vectors in it(idmap),
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
if args["handler"] == "HTTP":
@ -652,7 +652,7 @@ class TestAddBase:
x.start()
for th in threads:
th.join()
status, res = milvus.count_collection(collection)
status, res = milvus.count_entities(collection)
assert res == thread_num * nb
def test_add_vector_multi_collections(self, connect):
@ -755,7 +755,7 @@ class TestAddAsync:
assert status.OK()
assert len(result) == nb
connect.flush([collection])
status, count = connect.count_collection(collection)
status, count = connect.count_entities(collection)
assert status.OK()
logging.getLogger().info(status)
logging.getLogger().info(count)

View File

@ -189,7 +189,7 @@ class TestCollection:
'metric_type': MetricType.L2}
status = connect.create_collection(param)
logging.getLogger().info(status)
status, result = connect.describe_collection(collection_name)
status, result = connect.get_collection_info(collection_name)
logging.getLogger().info(result)
assert result.index_file_size == 1024
@ -204,13 +204,13 @@ class TestCollection:
'dimension': dim,
'index_file_size': index_file_size}
status = connect.create_collection(param)
status, result = connect.describe_collection(collection_name)
status, result = connect.get_collection_info(collection_name)
logging.getLogger().info(result)
assert result.metric_type == MetricType.L2
"""
******************************************************************
The following cases are used to test `describe_collection` function
The following cases are used to test `get_collection_info` function
******************************************************************
"""
@ -226,12 +226,12 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.L2}
connect.create_collection(param)
status, res = connect.describe_collection(collection_name)
status, res = connect.get_collection_info(collection_name)
assert res.collection_name == collection_name
assert res.metric_type == MetricType.L2
@pytest.mark.level(2)
def test_collection_describe_collection_name_ip(self, connect):
def test_collection_get_collection_info_name_ip(self, connect):
'''
target: test describe collection created with correct params
method: create collection, assert the value returned by describe method
@ -243,12 +243,12 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.IP}
connect.create_collection(param)
status, res = connect.describe_collection(collection_name)
status, res = connect.get_collection_info(collection_name)
assert res.collection_name == collection_name
assert res.metric_type == MetricType.IP
@pytest.mark.level(2)
def test_collection_describe_collection_name_jaccard(self, connect):
def test_collection_get_collection_info_name_jaccard(self, connect):
'''
target: test describe collection created with correct params
method: create collection, assert the value returned by describe method
@ -260,12 +260,12 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.JACCARD}
connect.create_collection(param)
status, res = connect.describe_collection(collection_name)
status, res = connect.get_collection_info(collection_name)
assert res.collection_name == collection_name
assert res.metric_type == MetricType.JACCARD
@pytest.mark.level(2)
def test_collection_describe_collection_name_hamming(self, connect):
def test_collection_get_collection_info_name_hamming(self, connect):
'''
target: test describe collection created with correct params
method: create collection, assert the value returned by describe method
@ -277,11 +277,11 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.HAMMING}
connect.create_collection(param)
status, res = connect.describe_collection(collection_name)
status, res = connect.get_collection_info(collection_name)
assert res.collection_name == collection_name
assert res.metric_type == MetricType.HAMMING
def test_collection_describe_collection_name_substructure(self, connect):
def test_collection_get_collection_info_name_substructure(self, connect):
'''
target: test describe collection created with correct params
method: create collection, assert the value returned by describe method
@ -293,11 +293,11 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.SUBSTRUCTURE}
connect.create_collection(param)
status, res = connect.describe_collection(collection_name)
status, res = connect.get_collection_info(collection_name)
assert res.collection_name == collection_name
assert res.metric_type == MetricType.SUBSTRUCTURE
def test_collection_describe_collection_name_superstructure(self, connect):
def test_collection_get_collection_info_name_superstructure(self, connect):
'''
target: test describe collection created with correct params
method: create collection, assert the value returned by describe method
@ -309,13 +309,13 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.SUPERSTRUCTURE}
connect.create_collection(param)
status, res = connect.describe_collection(collection_name)
status, res = connect.get_collection_info(collection_name)
assert res.collection_name == collection_name
assert res.metric_type == MetricType.SUPERSTRUCTURE
# TODO: enable
@pytest.mark.level(2)
def _test_collection_describe_collection_name_multiprocessing(self, connect, args):
def _test_collection_get_collection_info_name_multiprocessing(self, connect, args):
'''
target: test describe collection created with multiprocess
method: create collection, assert the value returned by describe method
@ -329,7 +329,7 @@ class TestCollection:
connect.create_collection(param)
def describecollection(milvus):
status, res = milvus.describe_collection(collection_name)
status, res = milvus.get_collection_info(collection_name)
assert res.collection_name == collection_name
process_num = 4
@ -350,7 +350,7 @@ class TestCollection:
# expected: describe raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.describe_collection(collection)
# status = dis_connect.get_collection_info(collection)
def test_collection_describe_dimension(self, connect):
'''
@ -364,7 +364,7 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.L2}
connect.create_collection(param)
status, res = connect.describe_collection(collection_name)
status, res = connect.get_collection_info(collection_name)
assert res.dimension == dim+1
"""
@ -614,14 +614,14 @@ class TestCollection:
"""
******************************************************************
The following cases are used to test `show_collections` function
The following cases are used to test `list_collections` function
******************************************************************
"""
def test_show_collections(self, connect):
def test_list_collections(self, connect):
'''
target: test show collections is correct or not, if collection created
method: create collection, assert the value returned by show_collections method is equal to 0
method: create collection, assert the value returned by list_collections method is equal to 0
expected: collection_name in show collections
'''
collection_name = gen_unique_str("test_collection")
@ -630,14 +630,14 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.L2}
connect.create_collection(param)
status, result = connect.show_collections()
status, result = connect.list_collections()
assert status.OK()
assert collection_name in result
def test_show_collections_ip(self, connect):
def test_list_collections_ip(self, connect):
'''
target: test show collections is correct or not, if collection created
method: create collection, assert the value returned by show_collections method is equal to 0
method: create collection, assert the value returned by list_collections method is equal to 0
expected: collection_name in show collections
'''
collection_name = gen_unique_str("test_collection")
@ -646,14 +646,14 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.IP}
connect.create_collection(param)
status, result = connect.show_collections()
status, result = connect.list_collections()
assert status.OK()
assert collection_name in result
def test_show_collections_jaccard(self, connect):
def test_list_collections_jaccard(self, connect):
'''
target: test show collections is correct or not, if collection created
method: create collection, assert the value returned by show_collections method is equal to 0
method: create collection, assert the value returned by list_collections method is equal to 0
expected: collection_name in show collections
'''
collection_name = gen_unique_str("test_collection")
@ -662,14 +662,14 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.JACCARD}
connect.create_collection(param)
status, result = connect.show_collections()
status, result = connect.list_collections()
assert status.OK()
assert collection_name in result
def test_show_collections_hamming(self, connect):
def test_list_collections_hamming(self, connect):
'''
target: test show collections is correct or not, if collection created
method: create collection, assert the value returned by show_collections method is equal to 0
method: create collection, assert the value returned by list_collections method is equal to 0
expected: collection_name in show collections
'''
collection_name = gen_unique_str("test_collection")
@ -678,14 +678,14 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.HAMMING}
connect.create_collection(param)
status, result = connect.show_collections()
status, result = connect.list_collections()
assert status.OK()
assert collection_name in result
def test_show_collections_substructure(self, connect):
def test_list_collections_substructure(self, connect):
'''
target: test show collections is correct or not, if collection created
method: create collection, assert the value returned by show_collections method is equal to 0
method: create collection, assert the value returned by list_collections method is equal to 0
expected: collection_name in show collections
'''
collection_name = gen_unique_str("test_collection")
@ -694,14 +694,14 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.SUBSTRUCTURE}
connect.create_collection(param)
status, result = connect.show_collections()
status, result = connect.list_collections()
assert status.OK()
assert collection_name in result
def test_show_collections_superstructure(self, connect):
def test_list_collections_superstructure(self, connect):
'''
target: test show collections is correct or not, if collection created
method: create collection, assert the value returned by show_collections method is equal to 0
method: create collection, assert the value returned by list_collections method is equal to 0
expected: collection_name in show collections
'''
collection_name = gen_unique_str("test_collection")
@ -710,43 +710,43 @@ class TestCollection:
'index_file_size': index_file_size,
'metric_type': MetricType.SUPERSTRUCTURE}
connect.create_collection(param)
status, result = connect.show_collections()
status, result = connect.list_collections()
assert status.OK()
assert collection_name in result
# @pytest.mark.level(2)
# def test_show_collections_without_connection(self, dis_connect):
# def test_list_collections_without_connection(self, dis_connect):
# '''
# target: test show_collections, without connection
# method: calling show_collections with correct params, with a disconnected instance
# expected: show_collections raise exception
# target: test list_collections, without connection
# method: calling list_collections with correct params, with a disconnected instance
# expected: list_collections raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.show_collections()
# status = dis_connect.list_collections()
@pytest.mark.level(2)
def test_show_collections_no_collection(self, connect):
def test_list_collections_no_collection(self, connect):
'''
target: test show collections is correct or not, if no collection in db
method: delete all collections,
assert the value returned by show_collections method is equal to []
assert the value returned by list_collections method is equal to []
expected: the status is ok, and the result is equal to []
'''
status, result = connect.show_collections()
status, result = connect.list_collections()
if result:
for collection_name in result:
connect.drop_collection(collection_name)
time.sleep(drop_collection_interval_time)
status, result = connect.show_collections()
status, result = connect.list_collections()
assert status.OK()
assert len(result) == 0
# TODO: enable
@pytest.mark.level(2)
def _test_show_collections_multiprocessing(self, connect, args):
def _test_list_collections_multiprocessing(self, connect, args):
'''
target: test show collections is correct or not with processes
method: create collection, assert the value returned by show_collections method is equal to 0
method: create collection, assert the value returned by list_collections method is equal to 0
expected: collection_name in show collections
'''
collection_name = gen_unique_str("test_collection")
@ -756,7 +756,7 @@ class TestCollection:
'metric_type': MetricType.L2}
connect.create_collection(param)
def showcollections(milvus):
status, result = milvus.show_collections()
status, result = milvus.list_collections()
assert status.OK()
assert collection_name in result
@ -773,7 +773,7 @@ class TestCollection:
"""
******************************************************************
The following cases are used to test `preload_collection` function
The following cases are used to test `load_collection` function
******************************************************************
"""
@ -793,74 +793,74 @@ class TestCollection:
return request.param
@pytest.mark.level(1)
def test_preload_collection(self, connect, collection, get_simple_index):
def test_load_collection(self, connect, collection, get_simple_index):
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
status, ids = connect.add_vectors(collection, vectors)
status = connect.create_index(collection, index_type, index_param)
status = connect.preload_collection(collection)
status = connect.load_collection(collection)
assert status.OK()
@pytest.mark.level(1)
def test_preload_collection_ip(self, connect, ip_collection, get_simple_index):
def test_load_collection_ip(self, connect, ip_collection, get_simple_index):
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
status, ids = connect.add_vectors(ip_collection, vectors)
status = connect.create_index(ip_collection, index_type, index_param)
status = connect.preload_collection(ip_collection)
status = connect.load_collection(ip_collection)
assert status.OK()
@pytest.mark.level(1)
def test_preload_collection_jaccard(self, connect, jac_collection, get_simple_index):
def test_load_collection_jaccard(self, connect, jac_collection, get_simple_index):
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
status, ids = connect.add_vectors(jac_collection, vectors)
status = connect.create_index(jac_collection, index_type, index_param)
status = connect.preload_collection(jac_collection)
status = connect.load_collection(jac_collection)
assert status.OK()
@pytest.mark.level(1)
def test_preload_collection_hamming(self, connect, ham_collection, get_simple_index):
def test_load_collection_hamming(self, connect, ham_collection, get_simple_index):
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
status, ids = connect.add_vectors(ham_collection, vectors)
status = connect.create_index(ham_collection, index_type, index_param)
status = connect.preload_collection(ham_collection)
status = connect.load_collection(ham_collection)
assert status.OK()
@pytest.mark.level(2)
def test_preload_collection_not_existed(self, connect, collection, get_simple_index):
def test_load_collection_not_existed(self, connect, collection, get_simple_index):
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
collection_name = gen_unique_str()
status, ids = connect.add_vectors(collection, vectors)
status = connect.create_index(collection, index_type, index_param)
status = connect.preload_collection(collection_name)
status = connect.load_collection(collection_name)
assert not status.OK()
@pytest.mark.level(2)
def test_preload_collection_not_existed_ip(self, connect, ip_collection, get_simple_index):
def test_load_collection_not_existed_ip(self, connect, ip_collection, get_simple_index):
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
collection_name = gen_unique_str()
status, ids = connect.add_vectors(ip_collection, vectors)
status = connect.create_index(ip_collection, index_type, index_param)
status = connect.preload_collection(collection_name)
status = connect.load_collection(collection_name)
assert not status.OK()
@pytest.mark.level(1)
def test_preload_collection_no_vectors(self, connect, collection):
status = connect.preload_collection(collection)
def test_load_collection_no_vectors(self, connect, collection):
status = connect.load_collection(collection)
assert status.OK()
@pytest.mark.level(2)
def test_preload_collection_no_vectors_ip(self, connect, ip_collection):
status = connect.preload_collection(ip_collection)
def test_load_collection_no_vectors_ip(self, connect, ip_collection):
status = connect.load_collection(ip_collection)
assert status.OK()
# TODO: psutils get memory usage
@pytest.mark.level(1)
def test_preload_collection_memory_usage(self, connect, collection):
def test_load_collection_memory_usage(self, connect, collection):
pass
@ -894,10 +894,10 @@ class TestCollectionInvalid(object):
with pytest.raises(Exception) as e:
status = connect.create_collection(param)
def test_preload_collection_with_invalid_collectionname(self, connect):
def test_load_collection_with_invalid_collectionname(self, connect):
collection_name = ''
with pytest.raises(Exception) as e:
status = connect.preload_collection(collection_name)
status = connect.load_collection(collection_name)
class TestCreateCollectionDimInvalid(object):
@ -993,8 +993,8 @@ def search_collection(connect, **params):
params={"nprobe": params["nprobe"]})
return status
def preload_collection(connect, **params):
status = connect.preload_collection(params["collection_name"])
def load_collection(connect, **params):
status = connect.load_collection(params["collection_name"])
return status
def has(connect, **params):
@ -1002,7 +1002,7 @@ def has(connect, **params):
return status
def show(connect, **params):
status, result = connect.show_collections()
status, result = connect.list_collections()
return status
def delete(connect, **params):
@ -1010,11 +1010,11 @@ def delete(connect, **params):
return status
def describe(connect, **params):
status, result = connect.describe_collection(params["collection_name"])
status, result = connect.get_collection_info(params["collection_name"])
return status
def rowcount(connect, **params):
status, result = connect.count_collection(params["collection_name"])
status, result = connect.count_entities(params["collection_name"])
return status
def create_index(connect, **params):
@ -1028,7 +1028,7 @@ func_map = {
11:describe,
12:rowcount,
13:search_collection,
14:preload_collection,
14:load_collection,
15:create_index,
30:delete
}

View File

@ -48,21 +48,21 @@ class TestCollectionCount:
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
vectors = gen_vectors(nb, dim)
res = connect.add_vectors(collection_name=collection, records=vectors)
connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nb
def test_collection_rows_count_partition(self, connect, collection, add_vectors_nb):
'''
target: test collection rows_count is correct or not
method: create collection, create partition and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
@ -71,14 +71,14 @@ class TestCollectionCount:
assert status.OK()
res = connect.add_vectors(collection_name=collection, records=vectors, partition_tag=tag)
connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nb
def test_collection_rows_count_multi_partitions_A(self, connect, collection, add_vectors_nb):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
new_tag = "new_tag"
@ -89,14 +89,14 @@ class TestCollectionCount:
assert status.OK()
res = connect.add_vectors(collection_name=collection, records=vectors)
connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nb
def test_collection_rows_count_multi_partitions_B(self, connect, collection, add_vectors_nb):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add vectors in one of the partitions,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
new_tag = "new_tag"
@ -107,14 +107,14 @@ class TestCollectionCount:
assert status.OK()
res = connect.add_vectors(collection_name=collection, records=vectors, partition_tag=tag)
connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nb
def test_collection_rows_count_multi_partitions_C(self, connect, collection, add_vectors_nb):
'''
target: test collection rows_count is correct or not
method: create collection, create partitions and add vectors in one of the partitions,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the collection count is equal to the length of vectors
'''
new_tag = "new_tag"
@ -126,14 +126,14 @@ class TestCollectionCount:
res = connect.add_vectors(collection_name=collection, records=vectors, partition_tag=tag)
res = connect.add_vectors(collection_name=collection, records=vectors, partition_tag=new_tag)
connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nb * 2
def test_collection_rows_count_after_index_created(self, connect, collection, get_simple_index):
'''
target: test count_collection, after index have been created
method: add vectors in db, and create index, then calling count_collection with correct params
expected: count_collection raise exception
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
@ -142,24 +142,24 @@ class TestCollectionCount:
res = connect.add_vectors(collection_name=collection, records=vectors)
connect.flush([collection])
connect.create_index(collection, index_type, index_param)
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nb
# @pytest.mark.level(2)
# def test_count_without_connection(self, collection, dis_connect):
# '''
# target: test count_collection, without connection
# method: calling count_collection with correct params, with a disconnected instance
# expected: count_collection raise exception
# target: test count_entities, without connection
# method: calling count_entities with correct params, with a disconnected instance
# expected: count_entities raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.count_collection(collection)
# status = dis_connect.count_entities(collection)
def test_collection_rows_count_no_vectors(self, connect, collection):
'''
target: test collection rows_count is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by count_collection method is equal to 0
assert the value returned by count_entities method is equal to 0
expected: the count is equal to 0
'''
collection_name = gen_unique_str()
@ -167,7 +167,7 @@ class TestCollectionCount:
'dimension': dim,
'index_file_size': index_file_size}
connect.create_collection(param)
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == 0
# TODO: enable
@ -177,7 +177,7 @@ class TestCollectionCount:
'''
target: test collection rows_count is correct or not with multiprocess
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nq = 2
@ -186,7 +186,7 @@ class TestCollectionCount:
time.sleep(add_time_interval)
def rows_count(milvus):
status, res = milvus.count_collection(collection)
status, res = milvus.count_entities(collection)
logging.getLogger().info(status)
assert res == nq
@ -205,7 +205,7 @@ class TestCollectionCount:
'''
target: test collection rows_count is correct or not with multiple collections of L2
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nq = 100
@ -222,7 +222,7 @@ class TestCollectionCount:
res = connect.add_vectors(collection_name=collection_name, records=vectors)
connect.flush(collection_list)
for i in range(20):
status, res = connect.count_collection(collection_list[i])
status, res = connect.count_entities(collection_list[i])
assert status.OK()
assert res == nq
@ -263,21 +263,21 @@ class TestCollectionCountIP:
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
vectors = gen_vectors(nb, dim)
res = connect.add_vectors(collection_name=ip_collection, records=vectors)
connect.flush([ip_collection])
status, res = connect.count_collection(ip_collection)
status, res = connect.count_entities(ip_collection)
assert res == nb
def test_collection_rows_count_after_index_created(self, connect, ip_collection, get_simple_index):
'''
target: test count_collection, after index have been created
method: add vectors in db, and create index, then calling count_collection with correct params
expected: count_collection raise exception
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
index_param = get_simple_index["index_param"]
index_type = get_simple_index["index_type"]
@ -286,24 +286,24 @@ class TestCollectionCountIP:
res = connect.add_vectors(collection_name=ip_collection, records=vectors)
connect.flush([ip_collection])
connect.create_index(ip_collection, index_type, index_param)
status, res = connect.count_collection(ip_collection)
status, res = connect.count_entities(ip_collection)
assert res == nb
# @pytest.mark.level(2)
# def test_count_without_connection(self, ip_collection, dis_connect):
# '''
# target: test count_collection, without connection
# method: calling count_collection with correct params, with a disconnected instance
# expected: count_collection raise exception
# target: test count_entities, without connection
# method: calling count_entities with correct params, with a disconnected instance
# expected: count_entities raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.count_collection(ip_collection)
# status = dis_connect.count_entities(ip_collection)
def test_collection_rows_count_no_vectors(self, connect, ip_collection):
'''
target: test collection rows_count is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by count_collection method is equal to 0
assert the value returned by count_entities method is equal to 0
expected: the count is equal to 0
'''
collection_name = gen_unique_str("test_collection")
@ -311,7 +311,7 @@ class TestCollectionCountIP:
'dimension': dim,
'index_file_size': index_file_size}
connect.create_collection(param)
status, res = connect.count_collection(ip_collection)
status, res = connect.count_entities(ip_collection)
assert res == 0
# TODO: enable
@ -320,7 +320,7 @@ class TestCollectionCountIP:
'''
target: test collection rows_count is correct or not with multiprocess
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nq = 2
@ -329,7 +329,7 @@ class TestCollectionCountIP:
time.sleep(add_time_interval)
def rows_count(milvus):
status, res = milvus.count_collection(ip_collection)
status, res = milvus.count_entities(ip_collection)
logging.getLogger().info(status)
assert res == nq
@ -348,7 +348,7 @@ class TestCollectionCountIP:
'''
target: test collection rows_count is correct or not with multiple collections of IP
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nq = 100
@ -365,7 +365,7 @@ class TestCollectionCountIP:
res = connect.add_vectors(collection_name=collection_name, records=vectors)
connect.flush(collection_list)
for i in range(20):
status, res = connect.count_collection(collection_list[i])
status, res = connect.count_entities(collection_list[i])
assert status.OK()
assert res == nq
@ -405,21 +405,21 @@ class TestCollectionCountJAC:
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
tmp, vectors = gen_binary_vectors(nb, dim)
res = connect.add_vectors(collection_name=jac_collection, records=vectors)
connect.flush([jac_collection])
status, res = connect.count_collection(jac_collection)
status, res = connect.count_entities(jac_collection)
assert res == nb
def test_collection_rows_count_after_index_created(self, connect, jac_collection, get_jaccard_index):
'''
target: test count_collection, after index have been created
method: add vectors in db, and create index, then calling count_collection with correct params
expected: count_collection raise exception
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
nb = 100
index_param = get_jaccard_index["index_param"]
@ -428,24 +428,24 @@ class TestCollectionCountJAC:
res = connect.add_vectors(collection_name=jac_collection, records=vectors)
connect.flush([jac_collection])
connect.create_index(jac_collection, index_type, index_param)
status, res = connect.count_collection(jac_collection)
status, res = connect.count_entities(jac_collection)
assert res == nb
# @pytest.mark.level(2)
# def test_count_without_connection(self, jac_collection, dis_connect):
# '''
# target: test count_collection, without connection
# method: calling count_collection with correct params, with a disconnected instance
# expected: count_collection raise exception
# target: test count_entities, without connection
# method: calling count_entities with correct params, with a disconnected instance
# expected: count_entities raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.count_collection(jac_collection)
# status = dis_connect.count_entities(jac_collection)
def test_collection_rows_count_no_vectors(self, connect, jac_collection):
'''
target: test collection rows_count is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by count_collection method is equal to 0
assert the value returned by count_entities method is equal to 0
expected: the count is equal to 0
'''
collection_name = gen_unique_str("test_collection")
@ -453,14 +453,14 @@ class TestCollectionCountJAC:
'dimension': dim,
'index_file_size': index_file_size}
connect.create_collection(param)
status, res = connect.count_collection(jac_collection)
status, res = connect.count_entities(jac_collection)
assert res == 0
def test_collection_rows_count_multi_collections(self, connect):
'''
target: test collection rows_count is correct or not with multiple collections of IP
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nq = 100
@ -477,7 +477,7 @@ class TestCollectionCountJAC:
res = connect.add_vectors(collection_name=collection_name, records=vectors)
connect.flush(collection_list)
for i in range(20):
status, res = connect.count_collection(collection_list[i])
status, res = connect.count_entities(collection_list[i])
assert status.OK()
assert res == nq
@ -538,49 +538,49 @@ class TestCollectionCountBinary:
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
tmp, vectors = gen_binary_vectors(nb, dim)
res = connect.add_vectors(collection_name=ham_collection, records=vectors)
connect.flush([ham_collection])
status, res = connect.count_collection(ham_collection)
status, res = connect.count_entities(ham_collection)
assert res == nb
def test_collection_rows_count_substructure(self, connect, substructure_collection, add_vectors_nb):
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
tmp, vectors = gen_binary_vectors(nb, dim)
res = connect.add_vectors(collection_name=substructure_collection, records=vectors)
connect.flush([substructure_collection])
status, res = connect.count_collection(substructure_collection)
status, res = connect.count_entities(substructure_collection)
assert res == nb
def test_collection_rows_count_superstructure(self, connect, superstructure_collection, add_vectors_nb):
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
tmp, vectors = gen_binary_vectors(nb, dim)
res = connect.add_vectors(collection_name=superstructure_collection, records=vectors)
connect.flush([superstructure_collection])
status, res = connect.count_collection(superstructure_collection)
status, res = connect.count_entities(superstructure_collection)
assert res == nb
def test_collection_rows_count_after_index_created(self, connect, ham_collection, get_hamming_index):
'''
target: test count_collection, after index have been created
method: add vectors in db, and create index, then calling count_collection with correct params
expected: count_collection raise exception
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
nb = 100
index_type = get_hamming_index["index_type"]
@ -589,14 +589,14 @@ class TestCollectionCountBinary:
res = connect.add_vectors(collection_name=ham_collection, records=vectors)
connect.flush([ham_collection])
connect.create_index(ham_collection, index_type, index_param)
status, res = connect.count_collection(ham_collection)
status, res = connect.count_entities(ham_collection)
assert res == nb
def test_collection_rows_count_after_index_created_substructure(self, connect, substructure_collection, get_substructure_index):
'''
target: test count_collection, after index have been created
method: add vectors in db, and create index, then calling count_collection with correct params
expected: count_collection raise exception
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
nb = 100
index_type = get_substructure_index["index_type"]
@ -605,14 +605,14 @@ class TestCollectionCountBinary:
res = connect.add_vectors(collection_name=substructure_collection, records=vectors)
connect.flush([substructure_collection])
connect.create_index(substructure_collection, index_type, index_param)
status, res = connect.count_collection(substructure_collection)
status, res = connect.count_entities(substructure_collection)
assert res == nb
def test_collection_rows_count_after_index_created_superstructure(self, connect, superstructure_collection, get_superstructure_index):
'''
target: test count_collection, after index have been created
method: add vectors in db, and create index, then calling count_collection with correct params
expected: count_collection raise exception
target: test count_entities, after index have been created
method: add vectors in db, and create index, then calling count_entities with correct params
expected: count_entities raise exception
'''
nb = 100
index_type = get_superstructure_index["index_type"]
@ -621,24 +621,24 @@ class TestCollectionCountBinary:
res = connect.add_vectors(collection_name=superstructure_collection, records=vectors)
connect.flush([superstructure_collection])
connect.create_index(superstructure_collection, index_type, index_param)
status, res = connect.count_collection(superstructure_collection)
status, res = connect.count_entities(superstructure_collection)
assert res == nb
# @pytest.mark.level(2)
# def test_count_without_connection(self, ham_collection, dis_connect):
# '''
# target: test count_collection, without connection
# method: calling count_collection with correct params, with a disconnected instance
# expected: count_collection raise exception
# target: test count_entities, without connection
# method: calling count_entities with correct params, with a disconnected instance
# expected: count_entities raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.count_collection(ham_collection)
# status = dis_connect.count_entities(ham_collection)
def test_collection_rows_count_no_vectors(self, connect, ham_collection):
'''
target: test collection rows_count is correct or not, if collection is empty
method: create collection and no vectors in it,
assert the value returned by count_collection method is equal to 0
assert the value returned by count_entities method is equal to 0
expected: the count is equal to 0
'''
collection_name = gen_unique_str("test_collection")
@ -646,14 +646,14 @@ class TestCollectionCountBinary:
'dimension': dim,
'index_file_size': index_file_size}
connect.create_collection(param)
status, res = connect.count_collection(ham_collection)
status, res = connect.count_entities(ham_collection)
assert res == 0
def test_collection_rows_count_multi_collections(self, connect):
'''
target: test collection rows_count is correct or not with multiple collections of IP
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nq = 100
@ -670,7 +670,7 @@ class TestCollectionCountBinary:
res = connect.add_vectors(collection_name=collection_name, records=vectors)
connect.flush(collection_list)
for i in range(20):
status, res = connect.count_collection(collection_list[i])
status, res = connect.count_entities(collection_list[i])
assert status.OK()
assert res == nq
@ -710,13 +710,13 @@ class TestCollectionCountTANIMOTO:
'''
target: test collection rows_count is correct or not
method: create collection and add vectors in it,
assert the value returned by count_collection method is equal to length of vectors
assert the value returned by count_entities method is equal to length of vectors
expected: the count is equal to the length of vectors
'''
nb = add_vectors_nb
tmp, vectors = gen_binary_vectors(nb, dim)
res = connect.add_vectors(collection_name=tanimoto_collection, records=vectors)
connect.flush([tanimoto_collection])
status, res = connect.count_collection(tanimoto_collection)
status, res = connect.count_entities(tanimoto_collection)
assert status.OK()
assert res == nb

View File

@ -42,7 +42,7 @@ class TestCollectionInfoBase:
'''
collection_name = None
with pytest.raises(Exception) as e:
status, info = connect.collection_info(collection_name)
status, info = connect.get_collection_stats(collection_name)
@pytest.mark.timeout(INFO_TIMEOUT)
def test_get_collection_info_name_not_existed(self, connect, collection):
@ -52,7 +52,7 @@ class TestCollectionInfoBase:
expected: status not ok
'''
collection_name = gen_unique_str("not_existed_collection")
status, info = connect.collection_info(collection_name)
status, info = connect.get_collection_stats(collection_name)
assert not status.OK()
@pytest.fixture(
@ -70,7 +70,7 @@ class TestCollectionInfoBase:
expected: status not ok
'''
collection_name = get_collection_name
status, info = connect.collection_info(collection_name)
status, info = connect.get_collection_stats(collection_name)
assert not status.OK()
@pytest.mark.timeout(INFO_TIMEOUT)
@ -85,16 +85,16 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["row_count"] == nb
# delete a few vectors
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["row_count"] == nb - 2
@ -110,7 +110,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
assert len(info["partitions"]) == 1
@ -131,7 +131,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
assert len(info["partitions"]) == 2
@ -155,7 +155,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
for partition in info["partitions"]:
@ -183,7 +183,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["row_count"] == nb * 2
for partition in info["partitions"]:
@ -222,7 +222,7 @@ class TestCollectionInfoBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
index_string = info["partitions"][0]["segments"][0]["index_name"]
@ -248,7 +248,7 @@ class TestCollectionInfoBase:
status = connect.create_index(collection, index_type, index_param)
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
index_string = info["partitions"][0]["segments"][0]["index_name"]
@ -272,7 +272,7 @@ class TestCollectionInfoBase:
for index_type in [IndexType.FLAT, IndexType.IVFLAT, IndexType.IVF_SQ8]:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
index_string = info["partitions"][0]["segments"][0]["index_name"]

View File

@ -75,14 +75,14 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info)
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -100,13 +100,13 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -124,12 +124,12 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
size_before = info["partitions"][0]["segments"][0]["data_size"]
@ -137,7 +137,7 @@ class TestCompactBase:
status = connect.compact(collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
size_after = info["partitions"][0]["segments"][0]["data_size"]
@ -156,17 +156,17 @@ class TestCompactBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
status = connect.compact(collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
assert not info["partitions"][0]["segments"]
@ -202,19 +202,19 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
logging.getLogger().info(info["partitions"])
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.compact(collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
size_after = info["partitions"][0]["segments"][0]["data_size"]
@ -233,7 +233,7 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(collection)
@ -241,14 +241,14 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
status = connect.compact(collection)
assert status.OK()
# get collection info after compact twice
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_after_twice = info["partitions"][0]["segments"][0]["data_size"]
assert(size_after == size_after_twice)
@ -266,25 +266,25 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before >= size_after)
status = connect.compact(collection)
assert status.OK()
# get collection info after compact twice
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_after_twice = info["partitions"][0]["segments"][0]["data_size"]
assert(size_after == size_after_twice)
@ -328,13 +328,13 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -354,7 +354,7 @@ class TestCompactBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids[:10])
status = connect.delete_entity_by_id(collection, ids[:10])
assert status.OK()
status = connect.flush([collection])
assert status.OK()
@ -364,7 +364,7 @@ class TestCompactBase:
index_type = get_simple_index["index_type"]
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
assert result._collection_name == collection
assert result._index_type == index_type
@ -384,7 +384,7 @@ class TestCompactBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
@ -421,7 +421,7 @@ class TestCompactBase:
status = connect.flush([collection])
assert status.OK()
delete_ids = ids[0:1000]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
@ -431,7 +431,7 @@ class TestCompactBase:
# pdb.set_trace()
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["partitions"][0].count == nb * 100 - 1000
@ -455,13 +455,13 @@ class TestCompactJAC:
status = connect.flush([jac_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -479,13 +479,13 @@ class TestCompactJAC:
status = connect.flush([jac_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -503,12 +503,12 @@ class TestCompactJAC:
status = connect.flush([jac_collection])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(jac_collection, delete_ids)
status = connect.delete_entity_by_id(jac_collection, delete_ids)
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
size_before = info["partitions"][0]["segments"][0]["data_size"]
@ -516,7 +516,7 @@ class TestCompactJAC:
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
size_after = info["partitions"][0]["segments"][0]["data_size"]
@ -535,17 +535,17 @@ class TestCompactJAC:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, ids)
status = connect.delete_entity_by_id(jac_collection, ids)
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
assert not info["partitions"][0]["segments"]
@ -563,20 +563,20 @@ class TestCompactJAC:
status = connect.flush([jac_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact twice
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_after_twice = info["partitions"][0]["segments"][0]["data_size"]
assert(size_after == size_after_twice)
@ -594,25 +594,25 @@ class TestCompactJAC:
status = connect.flush([jac_collection])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(jac_collection, delete_ids)
status = connect.delete_entity_by_id(jac_collection, delete_ids)
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before >= size_after)
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact twice
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_after_twice = info["partitions"][0]["segments"][0]["data_size"]
assert(size_after == size_after_twice)
@ -640,7 +640,7 @@ class TestCompactJAC:
for i in range(num_collections):
status, ids = connect.add_vectors(collection_name=collection_list[i], records=vectors)
assert status.OK()
status = connect.delete_by_id(collection_list[i], [ids[0], ids[-1]])
status = connect.delete_entity_by_id(collection_list[i], [ids[0], ids[-1]])
assert status.OK()
status = connect.flush([collection_list[i]])
assert status.OK()
@ -660,13 +660,13 @@ class TestCompactJAC:
status = connect.flush([jac_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(jac_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -690,7 +690,7 @@ class TestCompactJAC:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, ids)
status = connect.delete_entity_by_id(jac_collection, ids)
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
@ -734,7 +734,7 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(ip_collection)
@ -742,7 +742,7 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -760,13 +760,13 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(ip_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -784,12 +784,12 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(ip_collection, delete_ids)
status = connect.delete_entity_by_id(ip_collection, delete_ids)
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
size_before = info["partitions"][0]["segments"][0]["data_size"]
@ -797,7 +797,7 @@ class TestCompactIP:
status = connect.compact(ip_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
size_after = info["partitions"][0]["segments"][0]["data_size"]
@ -816,17 +816,17 @@ class TestCompactIP:
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
status = connect.delete_by_id(ip_collection, ids)
status = connect.delete_entity_by_id(ip_collection, ids)
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
status = connect.compact(ip_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
logging.getLogger().info(info["partitions"])
assert not info["partitions"][0]["segments"]
@ -844,20 +844,20 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(ip_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
status = connect.compact(ip_collection)
assert status.OK()
# get collection info after compact twice
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_after_twice = info["partitions"][0]["segments"][0]["data_size"]
assert(size_after == size_after_twice)
@ -875,12 +875,12 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
delete_ids = [ids[0], ids[-1]]
status = connect.delete_by_id(ip_collection, delete_ids)
status = connect.delete_entity_by_id(ip_collection, delete_ids)
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(ip_collection)
@ -888,7 +888,7 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before >= size_after)
@ -897,7 +897,7 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
# get collection info after compact twice
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_after_twice = info["partitions"][0]["segments"][0]["data_size"]
assert(size_after == size_after_twice)
@ -941,13 +941,13 @@ class TestCompactIP:
status = connect.flush([ip_collection])
assert status.OK()
# get collection info before compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_before = info["partitions"][0]["segments"][0]["data_size"]
status = connect.compact(ip_collection)
assert status.OK()
# get collection info after compact
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
@ -969,7 +969,7 @@ class TestCompactIP:
assert status.OK()
status = connect.compact(ip_collection)
assert status.OK()
status = connect.delete_by_id(ip_collection, ids)
status = connect.delete_entity_by_id(ip_collection, ids)
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()

View File

@ -208,7 +208,7 @@ class TestConnect:
b. data_set not too large incase disconnection happens when data is underd-preparing
c. data_set not too small incase disconnection happens when data has already been transferred
d. make sure disconnection happens when data is in-transport
Expected: Failure, count_collection == 0
Expected: Failure, count_entities == 0
'''
pass

View File

@ -22,7 +22,7 @@ nb = 6000
class TestDeleteBase:
"""
******************************************************************
The following cases are used to test `delete_by_id` function
The following cases are used to test `delete_entity_by_id` function
******************************************************************
"""
@ -52,7 +52,7 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
@ -76,7 +76,7 @@ class TestDeleteBase:
status = connect.flush([collection])
# Bloom filter error
assert status.OK()
status = connect.delete_by_id(collection, [1])
status = connect.delete_entity_by_id(collection, [1])
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
@ -96,10 +96,10 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 0
@ -116,10 +116,10 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
time.sleep(2)
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 0
@ -136,7 +136,7 @@ class TestDeleteBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status = connect.delete_by_id(collection, [0])
status = connect.delete_entity_by_id(collection, [0])
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
@ -156,7 +156,7 @@ class TestDeleteBase:
status = connect.flush([collection])
assert status.OK()
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, [0])
status = connect.delete_entity_by_id(collection_new, [0])
assert not status.OK()
def test_add_vectors_delete_vector(self, connect, collection, get_simple_index):
@ -173,7 +173,7 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
@ -199,7 +199,7 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
status = connect.create_index(collection, index_type, index_param)
@ -231,7 +231,7 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
status, tmp_ids = connect.add_vectors(collection, [vectors[0], vectors[-1]])
@ -259,11 +259,11 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
for i in range(10):
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
def test_delete_no_flush_multiable_times(self, connect, collection):
@ -278,10 +278,10 @@ class TestDeleteBase:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
for i in range(10):
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
assert status.OK()
@ -289,7 +289,7 @@ class TestDeleteBase:
class TestDeleteIndexedVectors:
"""
******************************************************************
The following cases are used to test `delete_by_id` function
The following cases are used to test `delete_entity_by_id` function
******************************************************************
"""
@pytest.fixture(
@ -320,7 +320,7 @@ class TestDeleteIndexedVectors:
assert status.OK()
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
@ -347,7 +347,7 @@ class TestDeleteIndexedVectors:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
assert status.OK()
status = connect.flush([collection])
search_param = get_search_param(index_type)
@ -365,7 +365,7 @@ class TestDeleteIndexedVectors:
class TestDeleteBinary:
"""
******************************************************************
The following cases are used to test `delete_by_id` function
The following cases are used to test `delete_entity_by_id` function
******************************************************************
"""
@pytest.fixture(
@ -392,7 +392,7 @@ class TestDeleteBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, ids)
status = connect.delete_entity_by_id(jac_collection, ids)
assert status.OK()
status = connect.flush([jac_collection])
search_param = get_search_param(index_type)
@ -415,10 +415,10 @@ class TestDeleteBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, ids)
status = connect.delete_entity_by_id(jac_collection, ids)
assert status.OK()
status = connect.flush([jac_collection])
status, res = connect.count_collection(jac_collection)
status, res = connect.count_entities(jac_collection)
assert status.OK()
assert res == 0
@ -435,7 +435,7 @@ class TestDeleteBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status = connect.delete_by_id(jac_collection, [0])
status = connect.delete_entity_by_id(jac_collection, [0])
assert status.OK()
status = connect.flush([jac_collection])
status = connect.flush([jac_collection])
@ -456,9 +456,9 @@ class TestDeleteBinary:
status = connect.flush([jac_collection])
assert status.OK()
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, [0])
status = connect.delete_entity_by_id(collection_new, [0])
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, [0])
status = connect.delete_entity_by_id(collection_new, [0])
assert not status.OK()
def test_add_vectors_delete_vector(self, connect, jac_collection, get_simple_index):
@ -475,7 +475,7 @@ class TestDeleteBinary:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(jac_collection, delete_ids)
status = connect.delete_entity_by_id(jac_collection, delete_ids)
assert status.OK()
status = connect.flush([jac_collection])
search_param = get_search_param(index_type)
@ -500,7 +500,7 @@ class TestDeleteBinary:
assert status.OK()
delete_ids = [ids[0], ids[-1]]
query_vecs = [vectors[0], vectors[1], vectors[-1]]
status = connect.delete_by_id(jac_collection, delete_ids)
status = connect.delete_entity_by_id(jac_collection, delete_ids)
assert status.OK()
status = connect.flush([jac_collection])
status, tmp_ids = connect.add_vectors(jac_collection, [vectors[0], vectors[-1]])
@ -533,13 +533,13 @@ class TestDeleteIdsIngalid(object):
def test_delete_vector_id_invalid(self, connect, collection, gen_invalid_id):
invalid_id = gen_invalid_id
with pytest.raises(Exception) as e:
status = connect.delete_by_id(collection, [invalid_id])
status = connect.delete_entity_by_id(collection, [invalid_id])
@pytest.mark.level(2)
def test_delete_vector_ids_invalid(self, connect, collection, gen_invalid_id):
invalid_id = gen_invalid_id
with pytest.raises(Exception) as e:
status = connect.delete_by_id(collection, [1, invalid_id])
status = connect.delete_entity_by_id(collection, [1, invalid_id])
class TestCollectionNameInvalid(object):
@ -556,6 +556,6 @@ class TestCollectionNameInvalid(object):
@pytest.mark.level(2)
def test_delete_vectors_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
status = connect.delete_by_id(collection_name, [1])
status = connect.delete_entity_by_id(collection_name, [1])
assert not status.OK()

View File

@ -65,13 +65,13 @@ class TestFlushBase:
ids = [i for i in range(nb)]
status, ids = connect.insert(collection, vectors, ids)
status = connect.flush([collection])
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == nb
status, ids = connect.insert(collection, vectors, ids, partition_tag=tag)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == 2 * nb
def test_add_partitions_flush(self, connect, collection):
@ -91,7 +91,7 @@ class TestFlushBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == 2 * nb
def test_add_collections_flush(self, connect, collection):
@ -116,9 +116,9 @@ class TestFlushBase:
status = connect.flush([collection])
status = connect.flush([collection_new])
assert status.OK()
result, res = connect.count_collection(collection)
result, res = connect.count_entities(collection)
assert res == nb
result, res = connect.count_collection(collection_new)
result, res = connect.count_entities(collection_new)
assert res == nb
def test_add_flush_multiable_times(self, connect, collection):
@ -150,7 +150,7 @@ class TestFlushBase:
start_time = time.time()
while (time.time()-start_time < timeout):
time.sleep(1)
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
if res == nb:
assert status.OK()
break
@ -180,7 +180,7 @@ class TestFlushBase:
status, ids = connect.add_vectors(collection, vectors, ids)
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == nb
@ -192,7 +192,7 @@ class TestFlushBase:
vectors = gen_vectors(nb, dim)
status, ids = connect.add_vectors(collection, vectors)
assert status.OK()
status = connect.delete_by_id(collection, [ids[-1]])
status = connect.delete_entity_by_id(collection, [ids[-1]])
assert status.OK()
for i in range(10):
status = connect.flush([collection])
@ -204,7 +204,7 @@ class TestFlushBase:
# TODO: CI fail, LOCAL pass
def _test_collection_count_during_flush(self, connect, args):
'''
method: flush collection at background, call `count_collection`
method: flush collection at background, call `count_entities`
expected: status ok
'''
collection = gen_unique_str()
@ -218,16 +218,16 @@ class TestFlushBase:
status, ids = milvus.add_vectors(collection, vectors, ids=[i for i in range(nb)])
def flush(collection_name):
milvus = get_milvus(args["ip"], args["port"], handler=args["handler"])
status = milvus.delete_by_id(collection_name, [i for i in range(nb)])
status = milvus.delete_entity_by_id(collection_name, [i for i in range(nb)])
assert status.OK()
status = milvus.flush([collection_name])
assert status.OK()
p = Process(target=flush, args=(collection, ))
p.start()
status, res = milvus.count_collection(collection)
status, res = milvus.count_entities(collection)
assert status.OK()
p.join()
status, res = milvus.count_collection(collection)
status, res = milvus.count_entities(collection)
assert status.OK()
logging.getLogger().info(res)
assert res == 0

View File

@ -25,37 +25,37 @@ class TestGetVectorIdsBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
return info["partitions"][0]["segments"][0]["name"]
"""
******************************************************************
The following cases are used to test `get_vector_ids` function
The following cases are used to test `list_id_in_segment` function
******************************************************************
"""
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_collection_name_None(self, connect, collection):
def test_list_id_in_segment_collection_name_None(self, connect, collection):
'''
target: get vector ids where collection name is None
method: call get_vector_ids with the collection_name: None
method: call list_id_in_segment with the collection_name: None
expected: exception raised
'''
collection_name = None
name = self.get_valid_name(connect, collection)
with pytest.raises(Exception) as e:
status, vector_ids = connect.get_vector_ids(collection_name, name)
status, vector_ids = connect.list_id_in_segment(collection_name, name)
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_collection_name_not_existed(self, connect, collection):
def test_list_id_in_segment_collection_name_not_existed(self, connect, collection):
'''
target: get vector ids where collection name does not exist
method: call get_vector_ids with a random collection_name, which is not in db
method: call list_id_in_segment with a random collection_name, which is not in db
expected: status not ok
'''
collection_name = gen_unique_str("not_existed_collection")
name = self.get_valid_name(connect, collection)
status, vector_ids = connect.get_vector_ids(collection_name, name)
status, vector_ids = connect.list_id_in_segment(collection_name, name)
assert not status.OK()
@pytest.fixture(
@ -66,47 +66,47 @@ class TestGetVectorIdsBase:
yield request.param
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_collection_name_invalid(self, connect, collection, get_collection_name):
def test_list_id_in_segment_collection_name_invalid(self, connect, collection, get_collection_name):
'''
target: get vector ids where collection name is invalid
method: call get_vector_ids with invalid collection_name
method: call list_id_in_segment with invalid collection_name
expected: status not ok
'''
collection_name = get_collection_name
name = self.get_valid_name(connect, collection)
status, vector_ids = connect.get_vector_ids(collection_name, name)
status, vector_ids = connect.list_id_in_segment(collection_name, name)
assert not status.OK()
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_name_None(self, connect, collection):
def test_list_id_in_segment_name_None(self, connect, collection):
'''
target: get vector ids where segment name is None
method: call get_vector_ids with the name: None
method: call list_id_in_segment with the name: None
expected: exception raised
'''
valid_name = self.get_valid_name(connect, collection)
segment = None
with pytest.raises(Exception) as e:
status, vector_ids = connect.get_vector_ids(collection, segment)
status, vector_ids = connect.list_id_in_segment(collection, segment)
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_name_not_existed(self, connect, collection):
def test_list_id_in_segment_name_not_existed(self, connect, collection):
'''
target: get vector ids where segment name does not exist
method: call get_vector_ids with a random segment name
method: call list_id_in_segment with a random segment name
expected: status not ok
'''
valid_name = self.get_valid_name(connect, collection)
segment = gen_unique_str("not_existed_segment")
status, vector_ids = connect.get_vector_ids(collection, segment)
status, vector_ids = connect.list_id_in_segment(collection, segment)
logging.getLogger().info(vector_ids)
assert not status.OK()
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_without_index_A(self, connect, collection):
def test_list_id_in_segment_without_index_A(self, connect, collection):
'''
target: get vector ids when there is no index
method: call get_vector_ids and check if the segment contains vectors
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
vectors = gen_vector(10, dim)
@ -114,9 +114,9 @@ class TestGetVectorIdsBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(collection, info["partitions"][0]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
@ -124,10 +124,10 @@ class TestGetVectorIdsBase:
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_without_index_B(self, connect, collection):
def test_list_id_in_segment_without_index_B(self, connect, collection):
'''
target: get vector ids when there is no index but with partition
method: create partition, add vectors to it and call get_vector_ids, check if the segment contains vectors
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
status = connect.create_partition(collection, tag)
@ -137,10 +137,10 @@ class TestGetVectorIdsBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["partitions"][1]["tag"] == tag
status, vector_ids = connect.get_vector_ids(collection, info["partitions"][1]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(collection, info["partitions"][1]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
@ -159,10 +159,10 @@ class TestGetVectorIdsBase:
return request.param
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_with_index_A(self, connect, collection, get_simple_index):
def test_list_id_in_segment_with_index_A(self, connect, collection, get_simple_index):
'''
target: get vector ids when there is index
method: call get_vector_ids and check if the segment contains vectors
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
index_param = get_simple_index["index_param"]
@ -174,19 +174,19 @@ class TestGetVectorIdsBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(collection, info["partitions"][0]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
assert vector_ids[i] == ids[i]
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_with_index_B(self, connect, collection, get_simple_index):
def test_list_id_in_segment_with_index_B(self, connect, collection, get_simple_index):
'''
target: get vector ids when there is index and with partition
method: create partition, add vectors to it and call get_vector_ids, check if the segment contains vectors
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
status = connect.create_partition(collection, tag)
@ -200,32 +200,32 @@ class TestGetVectorIdsBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
assert info["partitions"][1]["tag"] == tag
status, vector_ids = connect.get_vector_ids(collection, info["partitions"][1]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(collection, info["partitions"][1]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
assert vector_ids[i] == ids[i]
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_after_delete_vectors(self, connect, collection):
def test_list_id_in_segment_after_delete_vectors(self, connect, collection):
'''
target: get vector ids after vectors are deleted
method: add vectors and delete a few, call get_vector_ids
method: add vectors and delete a few, call list_id_in_segment
expected: status ok, vector_ids decreased after vectors deleted
'''
vectors = gen_vector(2, dim)
status, ids = connect.add_vectors(collection, vectors)
assert status.OK()
delete_ids = [ids[0]]
status = connect.delete_by_id(collection, delete_ids)
status = connect.delete_entity_by_id(collection, delete_ids)
status = connect.flush([collection])
assert status.OK()
status, info = connect.collection_info(collection)
status, info = connect.get_collection_stats(collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(collection, info["partitions"][0]["segments"][0]["name"])
assert len(vector_ids) == 1
assert vector_ids[0] == ids[1]
@ -233,14 +233,14 @@ class TestGetVectorIdsBase:
class TestGetVectorIdsIP:
"""
******************************************************************
The following cases are used to test `get_vector_ids` function
The following cases are used to test `list_id_in_segment` function
******************************************************************
"""
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_without_index_A(self, connect, ip_collection):
def test_list_id_in_segment_without_index_A(self, connect, ip_collection):
'''
target: get vector ids when there is no index
method: call get_vector_ids and check if the segment contains vectors
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
vectors = gen_vector(10, dim)
@ -248,9 +248,9 @@ class TestGetVectorIdsIP:
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(ip_collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(ip_collection, info["partitions"][0]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
@ -258,10 +258,10 @@ class TestGetVectorIdsIP:
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_without_index_B(self, connect, ip_collection):
def test_list_id_in_segment_without_index_B(self, connect, ip_collection):
'''
target: get vector ids when there is no index but with partition
method: create partition, add vectors to it and call get_vector_ids, check if the segment contains vectors
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
status = connect.create_partition(ip_collection, tag)
@ -271,10 +271,10 @@ class TestGetVectorIdsIP:
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
assert info["partitions"][1]["tag"] == tag
status, vector_ids = connect.get_vector_ids(ip_collection, info["partitions"][1]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(ip_collection, info["partitions"][1]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
@ -293,10 +293,10 @@ class TestGetVectorIdsIP:
return request.param
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_with_index_A(self, connect, ip_collection, get_simple_index):
def test_list_id_in_segment_with_index_A(self, connect, ip_collection, get_simple_index):
'''
target: get vector ids when there is index
method: call get_vector_ids and check if the segment contains vectors
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
index_param = get_simple_index["index_param"]
@ -308,19 +308,19 @@ class TestGetVectorIdsIP:
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(ip_collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(ip_collection, info["partitions"][0]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
assert vector_ids[i] == ids[i]
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_with_index_B(self, connect, ip_collection, get_simple_index):
def test_list_id_in_segment_with_index_B(self, connect, ip_collection, get_simple_index):
'''
target: get vector ids when there is index and with partition
method: create partition, add vectors to it and call get_vector_ids, check if the segment contains vectors
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
status = connect.create_partition(ip_collection, tag)
@ -334,32 +334,32 @@ class TestGetVectorIdsIP:
assert status.OK()
status = connect.flush([ip_collection])
assert status.OK()
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
assert info["partitions"][1]["tag"] == tag
status, vector_ids = connect.get_vector_ids(ip_collection, info["partitions"][1]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(ip_collection, info["partitions"][1]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
assert vector_ids[i] == ids[i]
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_after_delete_vectors(self, connect, ip_collection):
def test_list_id_in_segment_after_delete_vectors(self, connect, ip_collection):
'''
target: get vector ids after vectors are deleted
method: add vectors and delete a few, call get_vector_ids
method: add vectors and delete a few, call list_id_in_segment
expected: status ok, vector_ids decreased after vectors deleted
'''
vectors = gen_vector(2, dim)
status, ids = connect.add_vectors(ip_collection, vectors)
assert status.OK()
delete_ids = [ids[0]]
status = connect.delete_by_id(ip_collection, delete_ids)
status = connect.delete_entity_by_id(ip_collection, delete_ids)
status = connect.flush([ip_collection])
assert status.OK()
status, info = connect.collection_info(ip_collection)
status, info = connect.get_collection_stats(ip_collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(ip_collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(ip_collection, info["partitions"][0]["segments"][0]["name"])
assert len(vector_ids) == 1
assert vector_ids[0] == ids[1]
@ -367,14 +367,14 @@ class TestGetVectorIdsIP:
class TestGetVectorIdsJAC:
"""
******************************************************************
The following cases are used to test `get_vector_ids` function
The following cases are used to test `list_id_in_segment` function
******************************************************************
"""
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_without_index_A(self, connect, jac_collection):
def test_list_id_in_segment_without_index_A(self, connect, jac_collection):
'''
target: get vector ids when there is no index
method: call get_vector_ids and check if the segment contains vectors
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
tmp, vectors = gen_binary_vectors(10, dim)
@ -382,19 +382,19 @@ class TestGetVectorIdsJAC:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(jac_collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(jac_collection, info["partitions"][0]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
assert vector_ids[i] == ids[i]
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_without_index_B(self, connect, jac_collection):
def test_list_id_in_segment_without_index_B(self, connect, jac_collection):
'''
target: get vector ids when there is no index but with partition
method: create partition, add vectors to it and call get_vector_ids, check if the segment contains vectors
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
status = connect.create_partition(jac_collection, tag)
@ -404,10 +404,10 @@ class TestGetVectorIdsJAC:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
assert info["partitions"][1]["tag"] == tag
status, vector_ids = connect.get_vector_ids(jac_collection, info["partitions"][1]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(jac_collection, info["partitions"][1]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
@ -425,10 +425,10 @@ class TestGetVectorIdsJAC:
pytest.skip("Skip index Temporary")
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_with_index_A(self, connect, jac_collection, get_jaccard_index):
def test_list_id_in_segment_with_index_A(self, connect, jac_collection, get_jaccard_index):
'''
target: get vector ids when there is index
method: call get_vector_ids and check if the segment contains vectors
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
index_param = get_jaccard_index["index_param"]
@ -440,19 +440,19 @@ class TestGetVectorIdsJAC:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(jac_collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(jac_collection, info["partitions"][0]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
assert vector_ids[i] == ids[i]
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_with_index_B(self, connect, jac_collection, get_jaccard_index):
def test_list_id_in_segment_with_index_B(self, connect, jac_collection, get_jaccard_index):
'''
target: get vector ids when there is index and with partition
method: create partition, add vectors to it and call get_vector_ids, check if the segment contains vectors
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
status = connect.create_partition(jac_collection, tag)
@ -466,31 +466,31 @@ class TestGetVectorIdsJAC:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
assert info["partitions"][1]["tag"] == tag
status, vector_ids = connect.get_vector_ids(jac_collection, info["partitions"][1]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(jac_collection, info["partitions"][1]["segments"][0]["name"])
# vector_ids should match ids
assert len(vector_ids) == 10
for i in range(10):
assert vector_ids[i] == ids[i]
@pytest.mark.timeout(GET_TIMEOUT)
def test_get_vector_ids_after_delete_vectors(self, connect, jac_collection):
def test_list_id_in_segment_after_delete_vectors(self, connect, jac_collection):
'''
target: get vector ids after vectors are deleted
method: add vectors and delete a few, call get_vector_ids
method: add vectors and delete a few, call list_id_in_segment
expected: status ok, vector_ids decreased after vectors deleted
'''
tmp, vectors = gen_binary_vectors(2, dim)
status, ids = connect.add_vectors(jac_collection, vectors)
assert status.OK()
delete_ids = [ids[0]]
status = connect.delete_by_id(jac_collection, delete_ids)
status = connect.delete_entity_by_id(jac_collection, delete_ids)
status = connect.flush([jac_collection])
assert status.OK()
status, info = connect.collection_info(jac_collection)
status, info = connect.get_collection_stats(jac_collection)
assert status.OK()
status, vector_ids = connect.get_vector_ids(jac_collection, info["partitions"][0]["segments"][0]["name"])
status, vector_ids = connect.list_id_in_segment(jac_collection, info["partitions"][0]["segments"][0]["name"])
assert len(vector_ids) == 1
assert vector_ids[0] == ids[1]

View File

@ -11,7 +11,7 @@ from utils import *
dim = 128
index_file_size = 10
collection_id = "get_vectors_by_ids"
collection_id = "get_entity_by_id"
DELETE_TIMEOUT = 60
nprobe = 1
tag = "1970-01-01"
@ -22,12 +22,12 @@ tag = "tag"
class TestGetBase:
"""
******************************************************************
The following cases are used to test .get_vectors_by_ids` function
The following cases are used to test .get_entity_by_id` function
******************************************************************
"""
def test_get_vector_A(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
@ -36,13 +36,13 @@ class TestGetBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, ids)
status, res = connect.get_entity_by_id(collection, ids)
assert status.OK()
assert_equal_vector(res[0], vector[0])
def test_get_vector_B(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
@ -52,14 +52,14 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
length = 100
status, res = connect.get_vectors_by_ids(collection, ids[:length])
status, res = connect.get_entity_by_id(collection, ids[:length])
assert status.OK()
for i in range(length):
assert_equal_vector(res[i], vectors[i])
def test_get_vector_C_limit(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get, limit > 1000
expected: status ok, vector returned
'''
@ -68,12 +68,12 @@ class TestGetBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, ids)
status, res = connect.get_entity_by_id(collection, ids)
assert not status.OK()
def test_get_vector_partition(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
@ -85,14 +85,14 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
length = 100
status, res = connect.get_vectors_by_ids(collection, ids[:length])
status, res = connect.get_entity_by_id(collection, ids[:length])
assert status.OK()
for i in range(length):
assert_equal_vector(res[i], vectors[i])
def test_get_vector_multi_same_ids(self, connect, collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, with the same id, get vector by the given id
expected: status ok, get one vector
'''
@ -102,7 +102,7 @@ class TestGetBase:
status, ids = connect.add_vectors(collection, vectors, ids=ids)
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [0])
status, res = connect.get_entity_by_id(collection, [0])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
@ -121,7 +121,7 @@ class TestGetBase:
def test_get_vector_after_delete(self, connect, collection, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, and delete, get vector by the given id
expected: status ok, get one vector
'''
@ -131,17 +131,17 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
id = get_id
status = connect.delete_by_id(collection, [ids[id]])
status = connect.delete_entity_by_id(collection, [ids[id]])
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert not len(res[0])
def test_get_vector_after_delete_with_partition(self, connect, collection, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors into partition, and delete, get vector by the given id
expected: status ok, get one vector
'''
@ -152,11 +152,11 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
id = get_id
status = connect.delete_by_id(collection, [ids[id]])
status = connect.delete_entity_by_id(collection, [ids[id]])
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert not len(res[0])
@ -171,7 +171,7 @@ class TestGetBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [1])
status, res = connect.get_entity_by_id(collection, [1])
assert status.OK()
assert not len(res[0])
@ -187,14 +187,14 @@ class TestGetBase:
status = connect.flush([collection])
assert status.OK()
collection_new = gen_unique_str()
status, res = connect.get_vectors_by_ids(collection_new, [1])
status, res = connect.get_entity_by_id(collection_new, [1])
assert not status.OK()
class TestGetIndexedVectors:
"""
******************************************************************
The following cases are used to test .get_vectors_by_ids` function
The following cases are used to test .get_entity_by_id` function
******************************************************************
"""
@pytest.fixture(
@ -240,13 +240,13 @@ class TestGetIndexedVectors:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
id = get_id
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert_equal_vector(res[0], vectors[id])
def test_get_vector_after_delete(self, connect, collection, get_simple_index, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, and delete, get vector by the given id
expected: status ok, get one vector
'''
@ -260,17 +260,17 @@ class TestGetIndexedVectors:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
id = get_id
status = connect.delete_by_id(collection, [ids[id]])
status = connect.delete_entity_by_id(collection, [ids[id]])
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert not len(res[0])
def test_get_vector_partition(self, connect, collection, get_simple_index, get_id):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
@ -286,7 +286,7 @@ class TestGetIndexedVectors:
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
id = get_id
status, res = connect.get_vectors_by_ids(collection, [ids[id]])
status, res = connect.get_entity_by_id(collection, [ids[id]])
assert status.OK()
assert_equal_vector(res[0], vectors[id])
@ -294,12 +294,12 @@ class TestGetIndexedVectors:
class TestGetBinary:
"""
******************************************************************
The following cases are used to test .get_vectors_by_ids` function
The following cases are used to test .get_entity_by_id` function
******************************************************************
"""
def test_get_vector_A(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
@ -308,13 +308,13 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [ids[0]])
status, res = connect.get_entity_by_id(jac_collection, [ids[0]])
assert status.OK()
assert_equal_vector(res[0], vector[0])
def test_get_vector_B(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
@ -323,13 +323,13 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [ids[0]])
status, res = connect.get_entity_by_id(jac_collection, [ids[0]])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
def test_get_vector_multi_same_ids(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vectors, with the same id, get vector by the given id
expected: status ok, get one vector
'''
@ -339,7 +339,7 @@ class TestGetBinary:
status, ids = connect.add_vectors(jac_collection, vectors, ids=ids)
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [0])
status, res = connect.get_entity_by_id(jac_collection, [0])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
@ -354,7 +354,7 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [1])
status, res = connect.get_entity_by_id(jac_collection, [1])
assert status.OK()
assert not len(res[0])
@ -370,12 +370,12 @@ class TestGetBinary:
status = connect.flush([jac_collection])
assert status.OK()
collection_new = gen_unique_str()
status, res = connect.get_vectors_by_ids(collection_new, [1])
status, res = connect.get_entity_by_id(collection_new, [1])
assert not status.OK()
def test_get_vector_partition(self, connect, jac_collection):
'''
target: test.get_vectors_by_ids
target: test.get_entity_by_id
method: add vector, and get
expected: status ok, vector returned
'''
@ -385,7 +385,7 @@ class TestGetBinary:
assert status.OK()
status = connect.flush([jac_collection])
assert status.OK()
status, res = connect.get_vectors_by_ids(jac_collection, [ids[0]])
status, res = connect.get_entity_by_id(jac_collection, [ids[0]])
assert status.OK()
assert_equal_vector(res[0], vectors[0])
@ -407,7 +407,7 @@ class TestGetVectorIdIngalid(object):
def test_get_vector_id_invalid(self, connect, collection, gen_invalid_id):
invalid_id = gen_invalid_id
with pytest.raises(Exception) as e:
status = connect.get_vectors_by_ids(collection, [invalid_id])
status = connect.get_entity_by_id(collection, [invalid_id])
class TestCollectionNameInvalid(object):
@ -425,5 +425,5 @@ class TestCollectionNameInvalid(object):
def test_get_vectors_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
vectors = gen_vectors(1, dim)
status, result = connect.get_vectors_by_ids(collection_name, [1])
status, result = connect.get_entity_by_id(collection_name, [1])
assert not status.OK()

View File

@ -1,5 +1,5 @@
"""
For testing index operations, including `create_index`, `describe_index` and `drop_index` interfaces
For testing index operations, including `create_index`, `get_index_info` and `drop_index` interfaces
"""
import logging
import pytest
@ -141,7 +141,7 @@ class TestIndexBase:
logging.getLogger().info(get_simple_index)
status, ids = connect.add_vectors(collection, vectors)
status = connect.create_index(collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(collection))
logging.getLogger().info(connect.get_index_info(collection))
query_vecs = [vectors[0], vectors[1], vectors[2]]
top_k = 5
search_param = get_search_param(index_type)
@ -239,7 +239,7 @@ class TestIndexBase:
status = connect.create_index(collection, IndexType.IVFLAT, {"nlist": NLIST})
assert status.OK()
def count(connect):
status, count = connect.count_collection(collection)
status, count = connect.count_entities(collection)
assert status.OK()
assert count == nb
@ -415,18 +415,18 @@ class TestIndexBase:
for index in indexs:
status = connect.create_index(collection, index["index_type"], index["index_param"])
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
assert result._params["nlist"] == nlist
assert result._collection_name == collection
assert result._index_type == index_type_2
"""
******************************************************************
The following cases are used to test `describe_index` function
The following cases are used to test `get_index_info` function
******************************************************************
"""
def test_describe_index(self, connect, collection, get_index):
def test_get_index_info(self, connect, collection, get_index):
'''
target: test describe index interface
method: create collection and add vectors in it, create index, call describe index
@ -438,7 +438,7 @@ class TestIndexBase:
# status, ids = connect.add_vectors(collection, vectors)
status = connect.create_index(collection, index_type, index_param)
if status.OK():
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == collection
@ -469,7 +469,7 @@ class TestIndexBase:
assert status.OK()
for i in range(10):
status, result = connect.describe_index(collection_list[i])
status, result = connect.get_index_info(collection_list[i])
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == collection_list[i]
@ -478,22 +478,22 @@ class TestIndexBase:
for i in range(10):
status = connect.drop_index(collection_list[i])
assert status.OK()
status, result = connect.describe_index(collection_list[i])
status, result = connect.get_index_info(collection_list[i])
logging.getLogger().info(result)
assert result._collection_name == collection_list[i]
assert result._index_type == IndexType.FLAT
# @pytest.mark.level(2)
# def test_describe_index_without_connect(self, dis_connect, collection):
# def test_get_index_info_without_connect(self, dis_connect, collection):
# '''
# target: test describe index without connection
# method: describe index, and check if describe successfully
# expected: raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.describe_index(collection)
# status = dis_connect.get_index_info(collection)
def test_describe_index_collection_not_existed(self, connect):
def test_get_index_info_collection_not_existed(self, connect):
'''
target: test describe index interface when collection name not existed
method: create collection and add vectors in it, create index
@ -501,10 +501,10 @@ class TestIndexBase:
expected: return code not equals to 0, describe index failed
'''
collection_name = gen_unique_str(self.__class__.__name__)
status, result = connect.describe_index(collection_name)
status, result = connect.get_index_info(collection_name)
assert not status.OK()
def test_describe_index_collection_None(self, connect):
def test_get_index_info_collection_None(self, connect):
'''
target: test describe index interface when collection name is None
method: create collection and add vectors in it, create index with an collection_name: None
@ -512,9 +512,9 @@ class TestIndexBase:
'''
collection_name = None
with pytest.raises(Exception) as e:
status = connect.describe_index(collection_name)
status = connect.get_index_info(collection_name)
def test_describe_index_not_create(self, connect, collection):
def test_get_index_info_not_create(self, connect, collection):
'''
target: test describe index interface when index not created
method: create collection and add vectors in it, create index
@ -522,7 +522,7 @@ class TestIndexBase:
expected: return code not equals to 0, describe index failed
'''
status, ids = connect.add_vectors(collection, vectors)
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
assert status.OK()
# assert result._params["nlist"] == index_params["nlist"]
@ -546,11 +546,11 @@ class TestIndexBase:
# status, ids = connect.add_vectors(collection, vectors)
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
status = connect.drop_index(collection)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
assert result._collection_name == collection
assert result._index_type == IndexType.FLAT
@ -567,13 +567,13 @@ class TestIndexBase:
# status, ids = connect.add_vectors(collection, vectors)
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
status = connect.drop_index(collection)
assert status.OK()
status = connect.drop_index(collection)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
assert result._collection_name == collection
assert result._index_type == IndexType.FLAT
@ -616,7 +616,7 @@ class TestIndexBase:
expected: return code not equals to 0, drop index failed
'''
status, ids = connect.add_vectors(collection, vectors)
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
# no create index
status = connect.drop_index(collection)
@ -636,11 +636,11 @@ class TestIndexBase:
for i in range(2):
status = connect.create_index(collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
status = connect.drop_index(collection)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
assert result._collection_name == collection
assert result._index_type == IndexType.FLAT
@ -657,11 +657,11 @@ class TestIndexBase:
for i in range(2):
status = connect.create_index(collection, indexs[i]["index_type"], indexs[i]["index_param"])
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
status = connect.drop_index(collection)
assert status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
assert result._collection_name == collection
assert result._index_type == IndexType.FLAT
@ -757,7 +757,7 @@ class TestIndexIP:
logging.getLogger().info(get_simple_index)
status, ids = connect.add_vectors(ip_collection, vectors)
status = connect.create_index(ip_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(ip_collection))
logging.getLogger().info(connect.get_index_info(ip_collection))
query_vecs = [vectors[0], vectors[1], vectors[2]]
top_k = 5
search_param = get_search_param(index_type)
@ -911,18 +911,18 @@ class TestIndexIP:
for index in indexs:
status = connect.create_index(ip_collection, index["index_type"], index["index_param"])
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
assert result._params["nlist"] == nlist
assert result._collection_name == ip_collection
assert result._index_type == index_type_2
"""
******************************************************************
The following cases are used to test `describe_index` function
The following cases are used to test `get_index_info` function
******************************************************************
"""
def test_describe_index(self, connect, ip_collection, get_simple_index):
def test_get_index_info(self, connect, ip_collection, get_simple_index):
'''
target: test describe index interface
method: create collection and add vectors in it, create index, call describe index
@ -933,7 +933,7 @@ class TestIndexIP:
logging.getLogger().info(get_simple_index)
# status, ids = connect.add_vectors(ip_collection, vectors[:5000])
status = connect.create_index(ip_collection, index_type, index_param)
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._collection_name == ip_collection
status, mode = connect._cmd("mode")
@ -944,7 +944,7 @@ class TestIndexIP:
assert result._index_type == index_type
assert result._params == index_param
def test_describe_index_partition(self, connect, ip_collection, get_simple_index):
def test_get_index_info_partition(self, connect, ip_collection, get_simple_index):
'''
target: test describe index interface
method: create collection, create partition and add vectors in it, create index, call describe index
@ -956,13 +956,13 @@ class TestIndexIP:
status = connect.create_partition(ip_collection, tag)
status, ids = connect.add_vectors(ip_collection, vectors, partition_tag=tag)
status = connect.create_index(ip_collection, index_type, index_param)
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == ip_collection
assert result._index_type == index_type
def test_describe_index_partition_A(self, connect, ip_collection, get_simple_index):
def test_get_index_info_partition_A(self, connect, ip_collection, get_simple_index):
'''
target: test describe index interface
method: create collection, create partitions and add vectors in it, create index on partitions, call describe index
@ -977,7 +977,7 @@ class TestIndexIP:
# status, ids = connect.add_vectors(ip_collection, vectors, partition_tag=tag)
# status, ids = connect.add_vectors(ip_collection, vectors, partition_tag=new_tag)
status = connect.create_index(ip_collection, index_type, index_param)
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == ip_collection
@ -1007,7 +1007,7 @@ class TestIndexIP:
status = connect.create_index(collection_name, index_type, index_param)
assert status.OK()
for i in range(10):
status, result = connect.describe_index(collection_list[i])
status, result = connect.get_index_info(collection_list[i])
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == collection_list[i]
@ -1015,22 +1015,22 @@ class TestIndexIP:
for i in range(10):
status = connect.drop_index(collection_list[i])
assert status.OK()
status, result = connect.describe_index(collection_list[i])
status, result = connect.get_index_info(collection_list[i])
logging.getLogger().info(result)
assert result._collection_name == collection_list[i]
assert result._index_type == IndexType.FLAT
# @pytest.mark.level(2)
# def test_describe_index_without_connect(self, dis_connect, ip_collection):
# def test_get_index_info_without_connect(self, dis_connect, ip_collection):
# '''
# target: test describe index without connection
# method: describe index, and check if describe successfully
# expected: raise exception
# '''
# with pytest.raises(Exception) as e:
# status = dis_connect.describe_index(ip_collection)
# status = dis_connect.get_index_info(ip_collection)
def test_describe_index_not_create(self, connect, ip_collection):
def test_get_index_info_not_create(self, connect, ip_collection):
'''
target: test describe index interface when index not created
method: create collection and add vectors in it, create index
@ -1038,7 +1038,7 @@ class TestIndexIP:
expected: return code not equals to 0, describe index failed
'''
status, ids = connect.add_vectors(ip_collection, vectors)
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert status.OK()
# assert result._params["nlist"] == index_params["nlist"]
@ -1067,11 +1067,11 @@ class TestIndexIP:
assert not status.OK()
else:
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
status = connect.drop_index(ip_collection)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._collection_name == ip_collection
assert result._index_type == IndexType.FLAT
@ -1088,11 +1088,11 @@ class TestIndexIP:
status, ids = connect.add_vectors(ip_collection, vectors, partition_tag=tag)
status = connect.create_index(ip_collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
status = connect.drop_index(ip_collection)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._collection_name == ip_collection
assert result._index_type == IndexType.FLAT
@ -1113,7 +1113,7 @@ class TestIndexIP:
assert status.OK()
status = connect.drop_index(ip_collection)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._collection_name == ip_collection
assert result._index_type == IndexType.FLAT
@ -1136,13 +1136,13 @@ class TestIndexIP:
assert not status.OK()
else:
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
status = connect.drop_index(ip_collection)
assert status.OK()
status = connect.drop_index(ip_collection)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._collection_name == ip_collection
assert result._index_type == IndexType.FLAT
@ -1167,7 +1167,7 @@ class TestIndexIP:
expected: return code not equals to 0, drop index failed
'''
status, ids = connect.add_vectors(ip_collection, vectors)
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
# no create index
status = connect.drop_index(ip_collection)
@ -1187,11 +1187,11 @@ class TestIndexIP:
for i in range(2):
status = connect.create_index(ip_collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
status = connect.drop_index(ip_collection)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._collection_name == ip_collection
assert result._index_type == IndexType.FLAT
@ -1208,15 +1208,15 @@ class TestIndexIP:
for i in range(2):
status = connect.create_index(ip_collection, indexs[i]["index_type"], indexs[i]["index_param"])
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
assert result._params == indexs[i]["index_param"]
assert result._collection_name == ip_collection
assert result._index_type == indexs[i]["index_type"]
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
status = connect.drop_index(ip_collection)
assert status.OK()
status, result = connect.describe_index(ip_collection)
status, result = connect.get_index_info(ip_collection)
logging.getLogger().info(result)
assert result._collection_name == ip_collection
assert result._index_type == IndexType.FLAT
@ -1323,7 +1323,7 @@ class TestIndexJAC:
logging.getLogger().info(get_jaccard_index)
status, ids = connect.add_vectors(jac_collection, self.vectors)
status = connect.create_index(jac_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(jac_collection))
logging.getLogger().info(connect.get_index_info(jac_collection))
query_vecs = [self.vectors[0], self.vectors[1], self.vectors[2]]
top_k = 5
search_param = get_search_param(index_type)
@ -1334,11 +1334,11 @@ class TestIndexJAC:
"""
******************************************************************
The following cases are used to test `describe_index` function
The following cases are used to test `get_index_info` function
******************************************************************
"""
def test_describe_index(self, connect, jac_collection, get_jaccard_index):
def test_get_index_info(self, connect, jac_collection, get_jaccard_index):
'''
target: test describe index interface
method: create collection and add vectors in it, create index, call describe index
@ -1349,13 +1349,13 @@ class TestIndexJAC:
logging.getLogger().info(get_jaccard_index)
# status, ids = connect.add_vectors(jac_collection, vectors[:5000])
status = connect.create_index(jac_collection, index_type, index_param)
status, result = connect.describe_index(jac_collection)
status, result = connect.get_index_info(jac_collection)
logging.getLogger().info(result)
assert result._collection_name == jac_collection
assert result._index_type == index_type
assert result._params == index_param
def test_describe_index_partition(self, connect, jac_collection, get_jaccard_index):
def test_get_index_info_partition(self, connect, jac_collection, get_jaccard_index):
'''
target: test describe index interface
method: create collection, create partition and add vectors in it, create index, call describe index
@ -1367,7 +1367,7 @@ class TestIndexJAC:
status = connect.create_partition(jac_collection, tag)
status, ids = connect.add_vectors(jac_collection, vectors, partition_tag=tag)
status = connect.create_index(jac_collection, index_type, index_param)
status, result = connect.describe_index(jac_collection)
status, result = connect.get_index_info(jac_collection)
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == jac_collection
@ -1392,11 +1392,11 @@ class TestIndexJAC:
# status, ids = connect.add_vectors(ip_collection, vectors)
status = connect.create_index(jac_collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(jac_collection)
status, result = connect.get_index_info(jac_collection)
logging.getLogger().info(result)
status = connect.drop_index(jac_collection)
assert status.OK()
status, result = connect.describe_index(jac_collection)
status, result = connect.get_index_info(jac_collection)
logging.getLogger().info(result)
assert result._collection_name == jac_collection
assert result._index_type == IndexType.FLAT
@ -1413,11 +1413,11 @@ class TestIndexJAC:
status, ids = connect.add_vectors(jac_collection, vectors, partition_tag=tag)
status = connect.create_index(jac_collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(jac_collection)
status, result = connect.get_index_info(jac_collection)
logging.getLogger().info(result)
status = connect.drop_index(jac_collection)
assert status.OK()
status, result = connect.describe_index(jac_collection)
status, result = connect.get_index_info(jac_collection)
logging.getLogger().info(result)
assert result._collection_name == jac_collection
assert result._index_type == IndexType.FLAT
@ -1519,7 +1519,7 @@ class TestIndexBinary:
status, ids = connect.add_vectors(ham_collection, self.vectors, partition_tag=tag)
status = connect.create_index(ham_collection, index_type, index_param)
assert status.OK()
status, res = connect.count_collection(ham_collection)
status, res = connect.count_entities(ham_collection)
assert res == len(self.vectors)
@pytest.mark.timeout(BUILD_TIMEOUT)
@ -1536,7 +1536,7 @@ class TestIndexBinary:
status, ids = connect.add_vectors(substructure_collection, self.vectors, partition_tag=tag)
status = connect.create_index(substructure_collection, index_type, index_param)
assert status.OK()
status, res = connect.count_collection(substructure_collection,)
status, res = connect.count_entities(substructure_collection,)
assert res == len(self.vectors)
# @pytest.mark.level(2)
@ -1563,7 +1563,7 @@ class TestIndexBinary:
logging.getLogger().info(get_hamming_index)
status, ids = connect.add_vectors(ham_collection, self.vectors)
status = connect.create_index(ham_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(ham_collection))
logging.getLogger().info(connect.get_index_info(ham_collection))
query_vecs = [self.vectors[0], self.vectors[1], self.vectors[2]]
top_k = 5
search_param = get_search_param(index_type)
@ -1584,7 +1584,7 @@ class TestIndexBinary:
logging.getLogger().info(get_superstructure_index)
status, ids = connect.add_vectors(superstructure_collection, self.vectors)
status = connect.create_index(superstructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(superstructure_collection))
logging.getLogger().info(connect.get_index_info(superstructure_collection))
query_vecs = [self.vectors[0], self.vectors[1], self.vectors[2]]
top_k = 5
search_param = get_search_param(index_type)
@ -1595,11 +1595,11 @@ class TestIndexBinary:
"""
******************************************************************
The following cases are used to test `describe_index` function
The following cases are used to test `get_index_info` function
******************************************************************
"""
def test_describe_index(self, connect, ham_collection, get_hamming_index):
def test_get_index_info(self, connect, ham_collection, get_hamming_index):
'''
target: test describe index interface
method: create collection and add vectors in it, create index, call describe index
@ -1610,13 +1610,13 @@ class TestIndexBinary:
logging.getLogger().info(get_hamming_index)
# status, ids = connect.add_vectors(jac_collection, vectors[:5000])
status = connect.create_index(ham_collection, index_type, index_param)
status, result = connect.describe_index(ham_collection)
status, result = connect.get_index_info(ham_collection)
logging.getLogger().info(result)
assert result._collection_name == ham_collection
assert result._index_type == index_type
assert result._params == index_param
def test_describe_index_partition(self, connect, ham_collection, get_hamming_index):
def test_get_index_info_partition(self, connect, ham_collection, get_hamming_index):
'''
target: test describe index interface
method: create collection, create partition and add vectors in it, create index, call describe index
@ -1628,13 +1628,13 @@ class TestIndexBinary:
status = connect.create_partition(ham_collection, tag)
status, ids = connect.add_vectors(ham_collection, vectors, partition_tag=tag)
status = connect.create_index(ham_collection, index_type, index_param)
status, result = connect.describe_index(ham_collection)
status, result = connect.get_index_info(ham_collection)
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == ham_collection
assert result._index_type == index_type
def test_describe_index_partition_superstructrue(self, connect, superstructure_collection, get_superstructure_index):
def test_get_index_info_partition_superstructrue(self, connect, superstructure_collection, get_superstructure_index):
'''
target: test describe index interface
method: create collection, create partition and add vectors in it, create index, call describe index
@ -1646,7 +1646,7 @@ class TestIndexBinary:
status = connect.create_partition(superstructure_collection, tag)
status, ids = connect.add_vectors(superstructure_collection, vectors, partition_tag=tag)
status = connect.create_index(superstructure_collection, index_type, index_param)
status, result = connect.describe_index(superstructure_collection)
status, result = connect.get_index_info(superstructure_collection)
logging.getLogger().info(result)
assert result._params == index_param
assert result._collection_name == superstructure_collection
@ -1671,11 +1671,11 @@ class TestIndexBinary:
# status, ids = connect.add_vectors(ip_collection, vectors)
status = connect.create_index(ham_collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(ham_collection)
status, result = connect.get_index_info(ham_collection)
logging.getLogger().info(result)
status = connect.drop_index(ham_collection)
assert status.OK()
status, result = connect.describe_index(ham_collection)
status, result = connect.get_index_info(ham_collection)
logging.getLogger().info(result)
assert result._collection_name == ham_collection
assert result._index_type == IndexType.FLAT
@ -1692,11 +1692,11 @@ class TestIndexBinary:
assert status.OK()
status = connect.create_index(substructure_collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(substructure_collection)
status, result = connect.get_index_info(substructure_collection)
logging.getLogger().info(result)
status = connect.drop_index(substructure_collection)
assert status.OK()
status, result = connect.describe_index(substructure_collection)
status, result = connect.get_index_info(substructure_collection)
logging.getLogger().info(result)
assert result._collection_name == substructure_collection
assert result._index_type == IndexType.FLAT
@ -1713,11 +1713,11 @@ class TestIndexBinary:
status, ids = connect.add_vectors(ham_collection, vectors, partition_tag=tag)
status = connect.create_index(ham_collection, index_type, index_param)
assert status.OK()
status, result = connect.describe_index(ham_collection)
status, result = connect.get_index_info(ham_collection)
logging.getLogger().info(result)
status = connect.drop_index(ham_collection)
assert status.OK()
status, result = connect.describe_index(ham_collection)
status, result = connect.get_index_info(ham_collection)
logging.getLogger().info(result)
assert result._collection_name == ham_collection
assert result._index_type == IndexType.FLAT
@ -1742,9 +1742,9 @@ class TestIndexCollectionInvalid(object):
assert not status.OK()
@pytest.mark.level(1)
def test_describe_index_with_invalid_collectionname(self, connect, get_collection_name):
def test_get_index_info_with_invalid_collectionname(self, connect, get_collection_name):
collection_name = get_collection_name
status, result = connect.describe_index(collection_name)
status, result = connect.get_index_info(collection_name)
assert not status.OK()
@pytest.mark.level(1)
@ -1802,7 +1802,7 @@ class TestCreateIndexParamsInvalid(object):
status = connect.create_index(collection, get_index_type, {})
if get_index_type != IndexType.FLAT :
assert not status.OK()
status, result = connect.describe_index(collection)
status, result = connect.get_index_info(collection)
logging.getLogger().info(result)
assert result._collection_name == collection
assert result._index_type == IndexType.FLAT

View File

@ -48,7 +48,7 @@ class TestMixBase:
logging.getLogger().info("In create index")
status = milvus_instance.create_index(collection, index_params)
logging.getLogger().info(status)
status, result = milvus_instance.describe_index(collection)
status, result = milvus_instance.get_index_info(collection)
logging.getLogger().info(result)
def add_vectors(milvus_instance):
logging.getLogger().info("In add vectors")
@ -130,17 +130,17 @@ class TestMixBase:
#describe index
for i in range(10):
status, result = connect.describe_index(collection_list[i])
status, result = connect.get_index_info(collection_list[i])
assert result._index_type == IndexType.FLAT
status, result = connect.describe_index(collection_list[10 + i])
status, result = connect.get_index_info(collection_list[10 + i])
assert result._index_type == IndexType.IVFLAT
status, result = connect.describe_index(collection_list[20 + i])
status, result = connect.get_index_info(collection_list[20 + i])
assert result._index_type == IndexType.IVF_SQ8
status, result = connect.describe_index(collection_list[30 + i])
status, result = connect.get_index_info(collection_list[30 + i])
assert result._index_type == IndexType.FLAT
status, result = connect.describe_index(collection_list[40 + i])
status, result = connect.get_index_info(collection_list[40 + i])
assert result._index_type == IndexType.IVFLAT
status, result = connect.describe_index(collection_list[50 + i])
status, result = connect.get_index_info(collection_list[50 + i])
assert result._index_type == IndexType.IVF_SQ8
#search

View File

@ -88,7 +88,7 @@ class TestCreateBase:
tag_name = gen_unique_str()
status = connect.create_partition(collection, tag_name)
assert status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
tag_list = []
for item in res:
@ -157,7 +157,7 @@ class TestCreateBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nq * 2
def test_create_partition_insert_same_tags_two_collections(self, connect, collection):
@ -183,9 +183,9 @@ class TestCreateBase:
status, ids = connect.insert(collection_new, vectors, ids, partition_tag=tag)
status = connect.flush([collection, collection_new])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert res == nq
status, res = connect.count_collection(collection_new)
status, res = connect.count_entities(collection_new)
assert res == nq
@ -193,38 +193,38 @@ class TestShowBase:
"""
******************************************************************
The following cases are used to test `show_partitions` function
The following cases are used to test `list_partitions` function
******************************************************************
"""
def test_show_partitions(self, connect, collection):
def test_list_partitions(self, connect, collection):
'''
target: test show partitions, check status and partitions returned
method: create partition first, then call function: show_partitions
method: create partition first, then call function: list_partitions
expected: status ok, partition correct
'''
status = connect.create_partition(collection, tag)
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
def test_show_partitions_no_partition(self, connect, collection):
def test_list_partitions_no_partition(self, connect, collection):
'''
target: test show partitions with collection name, check status and partitions returned
method: call function: show_partitions
method: call function: list_partitions
expected: status ok, partitions correct
'''
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
def test_show_multi_partitions(self, connect, collection):
'''
target: test show partitions, check status and partitions returned
method: create partitions first, then call function: show_partitions
method: create partitions first, then call function: list_partitions
expected: status ok, partitions correct
'''
tag_new = gen_unique_str()
status = connect.create_partition(collection, tag)
status = connect.create_partition(collection, tag_new)
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
assert status.OK()
@ -316,7 +316,7 @@ class TestDropBase:
status = connect.create_partition(collection, tag)
status = connect.drop_partition(collection, tag)
assert status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
tag_list = []
for item in res:
tag_list.append(item.tag)
@ -356,7 +356,7 @@ class TestDropBase:
status = connect.drop_partition(collection, tag)
time.sleep(2)
assert not status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
tag_list = []
for item in res:
tag_list.append(item.tag)
@ -373,7 +373,7 @@ class TestDropBase:
time.sleep(2)
status = connect.create_partition(collection, tag)
assert status.OK()
status, res = connect.show_partitions(collection)
status, res = connect.list_partitions(collection)
tag_list = []
for item in res:
tag_list.append(item.tag)
@ -417,13 +417,13 @@ class TestNameInvalid(object):
status = connect.drop_partition(collection, tag_name)
assert not status.OK()
def test_show_partitions_with_invalid_collection_name(self, connect, collection, get_collection_name):
def test_list_partitions_with_invalid_collection_name(self, connect, collection, get_collection_name):
'''
target: test show partitions, with invalid collection name, check status returned
method: call function: show_partitions
method: call function: list_partitions
expected: status not ok
'''
collection_name = get_collection_name
status = connect.create_partition(collection, tag)
status, res = connect.show_partitions(collection_name)
status, res = connect.list_partitions(collection_name)
assert not status.OK()

View File

@ -220,7 +220,7 @@ class TestSearchBase:
query_ids = non_exist_id
logging.getLogger().info(query_ids)
logging.getLogger().info(collection)
logging.getLogger().info(connect.describe_collection(collection))
logging.getLogger().info(connect.get_collection_info(collection))
status, result = connect.search_by_ids(collection, query_ids, top_k, params={})
assert not status.OK()
@ -295,7 +295,7 @@ class TestSearchBase:
def test_search_index_delete(self, connect, collection):
vectors, ids = self.init_data(connect, collection)
query_ids = ids[0:nq]
status = connect.delete_by_id(collection, [query_ids[0]])
status = connect.delete_entity_by_id(collection, [query_ids[0]])
assert status.OK()
status = connect.flush([collection])
status, result = connect.search_by_ids(collection, query_ids, top_k, params={})

View File

@ -585,7 +585,7 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(ip_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(ip_collection))
logging.getLogger().info(connect.get_index_info(ip_collection))
query_vecs = [[0.50 for i in range(dim)]]
distance_0 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[0]))
distance_1 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[1]))
@ -607,8 +607,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(jac_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(jac_collection))
logging.getLogger().info(connect.describe_index(jac_collection))
logging.getLogger().info(connect.get_collection_info(jac_collection))
logging.getLogger().info(connect.get_index_info(jac_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, jac_collection, nb=1, insert=False)
distance_0 = jaccard(query_int_vectors[0], int_vectors[0])
distance_1 = jaccard(query_int_vectors[0], int_vectors[1])
@ -632,8 +632,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(ham_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(ham_collection))
logging.getLogger().info(connect.describe_index(ham_collection))
logging.getLogger().info(connect.get_collection_info(ham_collection))
logging.getLogger().info(connect.get_index_info(ham_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, ham_collection, nb=1, insert=False)
distance_0 = hamming(query_int_vectors[0], int_vectors[0])
distance_1 = hamming(query_int_vectors[0], int_vectors[1])
@ -657,8 +657,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(substructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(substructure_collection))
logging.getLogger().info(connect.describe_index(substructure_collection))
logging.getLogger().info(connect.get_collection_info(substructure_collection))
logging.getLogger().info(connect.get_index_info(substructure_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, substructure_collection, nb=1, insert=False)
distance_0 = substructure(query_int_vectors[0], int_vectors[0])
distance_1 = substructure(query_int_vectors[0], int_vectors[1])
@ -683,8 +683,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(substructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(substructure_collection))
logging.getLogger().info(connect.describe_index(substructure_collection))
logging.getLogger().info(connect.get_collection_info(substructure_collection))
logging.getLogger().info(connect.get_index_info(substructure_collection))
query_int_vectors, query_vecs = gen_binary_sub_vectors(int_vectors, 2)
search_param = get_search_param(index_type)
status, result = connect.search_vectors(substructure_collection, top_k, query_vecs, params=search_param)
@ -711,8 +711,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(superstructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(superstructure_collection))
logging.getLogger().info(connect.describe_index(superstructure_collection))
logging.getLogger().info(connect.get_collection_info(superstructure_collection))
logging.getLogger().info(connect.get_index_info(superstructure_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, superstructure_collection, nb=1, insert=False)
distance_0 = superstructure(query_int_vectors[0], int_vectors[0])
distance_1 = superstructure(query_int_vectors[0], int_vectors[1])
@ -737,8 +737,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(superstructure_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(superstructure_collection))
logging.getLogger().info(connect.describe_index(superstructure_collection))
logging.getLogger().info(connect.get_collection_info(superstructure_collection))
logging.getLogger().info(connect.get_index_info(superstructure_collection))
query_int_vectors, query_vecs = gen_binary_super_vectors(int_vectors, 2)
search_param = get_search_param(index_type)
status, result = connect.search_vectors(superstructure_collection, top_k, query_vecs, params=search_param)
@ -765,8 +765,8 @@ class TestSearchBase:
"nlist": 16384
}
connect.create_index(tanimoto_collection, index_type, index_param)
logging.getLogger().info(connect.describe_collection(tanimoto_collection))
logging.getLogger().info(connect.describe_index(tanimoto_collection))
logging.getLogger().info(connect.get_collection_info(tanimoto_collection))
logging.getLogger().info(connect.get_index_info(tanimoto_collection))
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, tanimoto_collection, nb=1, insert=False)
distance_0 = tanimoto(query_int_vectors[0], int_vectors[0])
distance_1 = tanimoto(query_int_vectors[0], int_vectors[1])
@ -790,7 +790,7 @@ class TestSearchBase:
pytest.skip("rnsg not support in ip")
vectors, ids = self.init_data(connect, ip_collection, nb=2)
connect.create_index(ip_collection, index_type, index_param)
logging.getLogger().info(connect.describe_index(ip_collection))
logging.getLogger().info(connect.get_index_info(ip_collection))
query_vecs = [[0.50 for i in range(dim)]]
search_param = get_search_param(index_type)
status, result = connect.search_vectors(ip_collection, top_k, query_vecs, params=search_param)

View File

@ -32,10 +32,10 @@ class TestWalBase:
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == nb
status, res = connect.get_vectors_by_ids(collection, [ids[0]])
status, res = connect.get_entity_by_id(collection, [ids[0]])
logging.getLogger().info(res)
assert status.OK()
assert_equal_vector(res[0], vectors[0])
@ -51,13 +51,13 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vectors)
assert status.OK()
connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
status = connect.delete_by_id(collection, ids)
status = connect.delete_entity_by_id(collection, ids)
assert status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 0
@ -72,10 +72,10 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vector)
assert status.OK()
connect.flush([collection])
status = connect.delete_by_id(collection, [0])
status = connect.delete_entity_by_id(collection, [0])
assert status.OK()
status = connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 1
@ -90,14 +90,14 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vectors)
assert status.OK()
status = connect.flush([collection])
status = connect.delete_by_id(collection, [0])
status = connect.delete_entity_by_id(collection, [0])
connect.flush([collection])
collection_new = gen_unique_str()
status = connect.delete_by_id(collection_new, ids)
status = connect.delete_entity_by_id(collection_new, ids)
assert not status.OK()
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == nb
@ -112,7 +112,7 @@ class TestWalBase:
status, ids = connect.add_vectors(collection, vector)
assert status.OK()
status = connect.flush([collection])
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
logging.getLogger().info(res) # should be 0 because no auto flush
logging.getLogger().info("Stop server and restart")
@ -120,10 +120,10 @@ class TestWalBase:
# time.sleep(15)
status = connect.flush([collection])
assert status.OK()
status, res = connect.count_collection(collection)
status, res = connect.count_entities(collection)
assert status.OK()
assert res == 1
status, res = connect.get_vectors_by_ids(collection, [ids[0]])
status, res = connect.get_entity_by_id(collection, [ids[0]])
logging.getLogger().info(res)
assert status.OK()
assert_equal_vector(res[0], vector[0])