mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-11-30 02:48:45 +08:00
parent
86e9f1818f
commit
aad3d47a06
@ -207,16 +207,17 @@ class ApiCollectionWrapper:
|
||||
return res, check_result
|
||||
|
||||
@trace()
|
||||
def query_iterator(self, batch_size, limit=-1, expr=None, output_fields=None, partition_names=None, timeout=None, check_task=None,
|
||||
check_items=None, **kwargs):
|
||||
def query_iterator(self, batch_size=1000, limit=-1, expr=None, output_fields=None, partition_names=None, timeout=None,
|
||||
check_task=None, check_items=None, **kwargs):
|
||||
# time.sleep(5)
|
||||
timeout = TIMEOUT if timeout is None else timeout
|
||||
|
||||
func_name = sys._getframe().f_code.co_name
|
||||
res, check = api_request([self.collection.query_iterator, batch_size, limit, expr, output_fields, partition_names, timeout], **kwargs)
|
||||
res, check = api_request([self.collection.query_iterator, batch_size, limit, expr, output_fields, partition_names,
|
||||
timeout], **kwargs)
|
||||
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
|
||||
expression=expr, partition_names=partition_names,
|
||||
output_fields=output_fields,
|
||||
batch_size=batch_size, limit=limit, expression=expr,
|
||||
output_fields=output_fields, partition_names=partition_names,
|
||||
timeout=timeout, **kwargs).run()
|
||||
return res, check_result
|
||||
|
||||
|
@ -449,6 +449,7 @@ class ResponseChecker:
|
||||
assert len(res) <= check_items["limit"]
|
||||
assert len(pk_list) == len(set(pk_list))
|
||||
if check_items.get("count", None):
|
||||
log.info(len(pk_list))
|
||||
assert len(pk_list) == check_items["count"]
|
||||
if check_items.get("exp_ids", None):
|
||||
assert pk_list == check_items["exp_ids"]
|
||||
|
@ -11,6 +11,7 @@ default_nb_medium = 5000
|
||||
default_top_k = 10
|
||||
default_nq = 2
|
||||
default_limit = 10
|
||||
default_batch_size = 1000
|
||||
max_limit = 16384
|
||||
default_search_params = {"metric_type": "COSINE", "params": {"nprobe": 10}}
|
||||
default_search_ip_params = {"metric_type": "IP", "params": {"nprobe": 10}}
|
||||
|
@ -1688,6 +1688,7 @@ class TestCreateCollection(TestcaseBase):
|
||||
assert item in self.utility_wrap.list_collections()[0]
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.skip("not support default_value now")
|
||||
def test_create_collection_using_default_value(self, auto_id):
|
||||
"""
|
||||
target: test create collection with default_value
|
||||
@ -1761,6 +1762,7 @@ class TestCreateCollectionInvalid(TestcaseBase):
|
||||
check_items={"schema": schema})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip("not support default_value now")
|
||||
@pytest.mark.parametrize("default_value", ["abc", 9.09, 1, False])
|
||||
def test_create_collection_with_invalid_default_value_float(self, default_value):
|
||||
"""
|
||||
@ -1779,6 +1781,7 @@ class TestCreateCollectionInvalid(TestcaseBase):
|
||||
ct.err_msg: "default value type mismatches field schema type"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip("not support default_value now")
|
||||
@pytest.mark.parametrize("default_value", ["abc", 9.09, 1, False])
|
||||
def test_create_collection_with_invalid_default_value_int8(self, default_value):
|
||||
"""
|
||||
@ -1797,6 +1800,7 @@ class TestCreateCollectionInvalid(TestcaseBase):
|
||||
ct.err_msg: "default value type mismatches field schema type"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip("not support default_value now")
|
||||
def test_create_collection_with_pk_field_using_default_value(self):
|
||||
"""
|
||||
target: test create collection with pk field using default value
|
||||
@ -1827,6 +1831,7 @@ class TestCreateCollectionInvalid(TestcaseBase):
|
||||
ct.err_msg: "pk field schema can not set default value"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip("not support default_value now")
|
||||
def test_create_collection_with_json_field_using_default_value(self):
|
||||
"""
|
||||
target: test create collection with json field using default value
|
||||
|
@ -1157,7 +1157,7 @@ class TestNewIndexBinary(TestcaseBase):
|
||||
ct.err_msg: "Invalid metric_type: L2, which does not match the index type: BIN_IVF_FLAT"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.parametrize("metric_type", ["L2", "IP", "COSINE", "JACCARD", "HAMMING", "TANIMOTO"])
|
||||
@pytest.mark.parametrize("metric_type", ["L2", "IP", "COSINE", "JACCARD", "HAMMING"])
|
||||
def test_create_binary_index_HNSW(self, metric_type):
|
||||
"""
|
||||
target: test create binary index hnsw
|
||||
|
@ -635,7 +635,7 @@ class TestPartitionOperations(TestcaseBase):
|
||||
assert collection_w.has_partition(partition_name)[0]
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip(reason="skip temporarily for debug")
|
||||
# @pytest.mark.skip(reason="skip temporarily for debug")
|
||||
def test_partition_maximum_partitions(self):
|
||||
"""
|
||||
target: verify create maximum partitions
|
||||
@ -661,6 +661,7 @@ class TestPartitionOperations(TestcaseBase):
|
||||
for t in threads:
|
||||
t.join()
|
||||
p_name = cf.gen_unique_str()
|
||||
log.info(f"partitions: {len(collection_w.partitions)}")
|
||||
self.partition_wrap.init_partition(
|
||||
collection_w.collection, p_name,
|
||||
check_task=CheckTasks.err_res,
|
||||
|
@ -672,7 +672,7 @@ class TestQueryParams(TestcaseBase):
|
||||
# 3. query
|
||||
collection_w.load()
|
||||
# test for int
|
||||
_id = random.randint(0, ct.default_nb)
|
||||
_id = random.randint(limit, ct.default_nb - limit)
|
||||
ids = [i for i in range(_id, _id + limit)]
|
||||
expression = f"{expr_prefix}({json_field}['listInt'], {ids})"
|
||||
res = collection_w.query(expression)[0]
|
||||
@ -736,7 +736,7 @@ class TestQueryParams(TestcaseBase):
|
||||
collection_w.load()
|
||||
|
||||
# test for int
|
||||
_id = random.randint(0, ct.default_nb)
|
||||
_id = random.randint(limit, ct.default_nb - limit)
|
||||
ids = [i for i in range(_id, _id + limit)]
|
||||
expression = f"{expr_prefix}(listInt, {ids})"
|
||||
res = collection_w.query(expression)[0]
|
||||
@ -2200,6 +2200,7 @@ class TestQueryOperation(TestcaseBase):
|
||||
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip("not support default_value now")
|
||||
def test_query_using_all_types_of_default_value(self):
|
||||
"""
|
||||
target: test create collection with default_value
|
||||
@ -3311,6 +3312,21 @@ class TestQueryIterator(TestcaseBase):
|
||||
check_items={"count": ct.default_nb,
|
||||
"batch_size": batch_size})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_query_iterator_using_default_batch_size(self):
|
||||
"""
|
||||
target: test query iterator normal
|
||||
method: 1. query iterator
|
||||
2. check the result, expect pk
|
||||
expected: query successfully
|
||||
"""
|
||||
# 1. initialize with data
|
||||
collection_w = self.init_collection_general(prefix, True)[0]
|
||||
# 2. query iterator
|
||||
collection_w.query_iterator(check_task=CheckTasks.check_query_iterator,
|
||||
check_items={"count": ct.default_nb,
|
||||
"batch_size": ct.default_batch_size})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.parametrize("offset", [500, 1000, 1777])
|
||||
def test_query_iterator_with_offset(self, offset):
|
||||
@ -3321,7 +3337,7 @@ class TestQueryIterator(TestcaseBase):
|
||||
expected: query successfully
|
||||
"""
|
||||
# 1. initialize with data
|
||||
batch_size = 100
|
||||
batch_size = 300
|
||||
collection_w = self.init_collection_general(prefix, True, is_index=False)[0]
|
||||
collection_w.create_index(ct.default_float_vec_field_name, {"metric_type": "L2"})
|
||||
collection_w.load()
|
||||
@ -3334,7 +3350,7 @@ class TestQueryIterator(TestcaseBase):
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.parametrize("batch_size", [10, 100, 777, 2000])
|
||||
def test_query_iterator_with_different_limit(self, batch_size):
|
||||
def test_query_iterator_with_different_batch_size(self, batch_size):
|
||||
"""
|
||||
target: test query iterator normal
|
||||
method: 1. query iterator
|
||||
@ -3348,14 +3364,33 @@ class TestQueryIterator(TestcaseBase):
|
||||
collection_w.load()
|
||||
# 2. search iterator
|
||||
expr = "int64 >= 0"
|
||||
collection_w.query_iterator(batch_size, expr=expr, offset=offset,
|
||||
collection_w.query_iterator(batch_size=batch_size, expr=expr, offset=offset,
|
||||
check_task=CheckTasks.check_query_iterator,
|
||||
check_items={"count": ct.default_nb - offset,
|
||||
"batch_size": batch_size})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip("issue #26397")
|
||||
def test_query_iterator_invalid_limit_offset(self):
|
||||
@pytest.mark.skip("issue #26767")
|
||||
@pytest.mark.parametrize("offset", [0, 10, 100, 1000])
|
||||
@pytest.mark.parametrize("limit", [0, 100, 1500, 2000, 10000])
|
||||
def test_query_iterator_with_different_limit(self, limit, offset):
|
||||
"""
|
||||
target: test query iterator normal
|
||||
method: 1. query iterator
|
||||
2. check the result, expect pk
|
||||
expected: query successfully
|
||||
"""
|
||||
# 1. initialize with data
|
||||
collection_w = self.init_collection_general(prefix, True)[0]
|
||||
# 2. query iterator
|
||||
Count = limit - offset if limit <= ct.default_nb else ct.default_nb - offset
|
||||
collection_w.query_iterator(limit=limit, expr="", offset=offset,
|
||||
check_task=CheckTasks.check_query_iterator,
|
||||
check_items={"count": max(Count, 0),
|
||||
"batch_size": ct.default_batch_size})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_query_iterator_invalid_batch_size(self):
|
||||
"""
|
||||
target: test query iterator invalid limit and offset
|
||||
method: query iterator using invalid limit and offset
|
||||
@ -3366,8 +3401,8 @@ class TestQueryIterator(TestcaseBase):
|
||||
collection_w = self.init_collection_general(prefix, True, nb=nb)[0]
|
||||
# 2. search iterator
|
||||
expr = "int64 >= 0"
|
||||
error = {"err_code": 1, "err_msg": "invalid max query result window, limit [-1] is invalid, should be greater than 0"}
|
||||
collection_w.query_iterator(-1, expr=expr, check_task=CheckTasks.err_res, check_items=error)
|
||||
error = {"err_code": 1, "err_msg": "batch size cannot be less than zero"}
|
||||
collection_w.query_iterator(batch_size=-1, expr=expr, check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
@pytest.mark.parametrize("batch_size", [100, 500])
|
||||
@ -3382,7 +3417,7 @@ class TestQueryIterator(TestcaseBase):
|
||||
collection_w, _, _, insert_ids = self.init_collection_general(prefix, True, auto_id=auto_id)[0:4]
|
||||
|
||||
# 2. query with limit
|
||||
collection_w.query_iterator(batch_size,
|
||||
collection_w.query_iterator(batch_size=batch_size,
|
||||
check_task=CheckTasks.check_query_iterator,
|
||||
check_items={"batch_size": batch_size,
|
||||
"count": ct.default_nb,
|
||||
|
@ -1220,10 +1220,8 @@ class TestCollectionSearch(TestcaseBase):
|
||||
def _async(self, request):
|
||||
yield request.param
|
||||
|
||||
@pytest.fixture(scope="function", params=["JACCARD", "HAMMING", "TANIMOTO"])
|
||||
@pytest.fixture(scope="function", params=["JACCARD", "HAMMING"])
|
||||
def metrics(self, request):
|
||||
if request.param == "TANIMOTO":
|
||||
pytest.skip("TANIMOTO not supported now")
|
||||
yield request.param
|
||||
|
||||
@pytest.fixture(scope="function", params=[False, True])
|
||||
@ -3992,6 +3990,7 @@ class TestCollectionSearch(TestcaseBase):
|
||||
assert [res1[i].ids for i in range(nq)] == [res2[i].ids for i in range(nq)]
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip("not support default_value now")
|
||||
def test_search_using_all_types_of_default_value(self, auto_id):
|
||||
"""
|
||||
target: test create collection with default_value
|
||||
@ -8258,7 +8257,7 @@ class TestCollectionSearchJSON(TestcaseBase):
|
||||
def _async(self, request):
|
||||
yield request.param
|
||||
|
||||
@pytest.fixture(scope="function", params=["JACCARD", "HAMMING", "TANIMOTO"])
|
||||
@pytest.fixture(scope="function", params=["JACCARD", "HAMMING"])
|
||||
def metrics(self, request):
|
||||
yield request.param
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user