mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-12-02 20:09:57 +08:00
Enable test cases after code fix (#7239)
Signed-off-by: Binbin Lv <binbin.lv@zilliz.com>
This commit is contained in:
parent
58b2089692
commit
e782ee944d
@ -78,7 +78,7 @@ pipeline {
|
||||
--install-extra-arg "--set etcd.enabled=false --set externalEtcd.enabled=true --set externalEtcd.endpoints={\$KRTE_POD_IP:2379}" \
|
||||
--skip-export-logs \
|
||||
--skip-cleanup \
|
||||
--test-extra-arg "--tags smoke L0 L1 L2"
|
||||
--test-extra-arg "--tags smoke L0 L1 L2" \
|
||||
--test-timeout ${e2e_timeout_seconds}
|
||||
"""
|
||||
// } else if ("${MILVUS_CLIENT}" == "pymilvus-orm") {
|
||||
|
@ -47,6 +47,8 @@ spec:
|
||||
env:
|
||||
- name: ETCD_LOG_LEVEL
|
||||
value: debug
|
||||
- name: ETCD_QUOTA_BACKEND_BYTES
|
||||
value: "4294967296"
|
||||
command:
|
||||
- etcd
|
||||
- -advertise-client-urls=http://127.0.0.1:2379
|
||||
@ -54,7 +56,7 @@ spec:
|
||||
resources:
|
||||
limits:
|
||||
cpu: "2"
|
||||
memory: "4Gi"
|
||||
memory: "6Gi"
|
||||
ports:
|
||||
- containerPort: 2379
|
||||
name: client
|
||||
|
@ -385,6 +385,10 @@ def tanimoto(x, y):
|
||||
y = np.asarray(y, np.bool)
|
||||
return -np.log2(np.double(np.bitwise_and(x, y).sum()) / np.double(np.bitwise_or(x, y).sum()))
|
||||
|
||||
def tanimoto_calc(x, y):
|
||||
x = np.asarray(x, np.bool)
|
||||
y = np.asarray(y, np.bool)
|
||||
return np.double((len(x) - np.bitwise_xor(x, y).sum())) / (len(y) + np.bitwise_xor(x, y).sum())
|
||||
|
||||
def substructure(x, y):
|
||||
x = np.asarray(x, np.bool)
|
||||
@ -409,7 +413,7 @@ def compare_distance_2d_vector(x, y, distance, metric, sqrt):
|
||||
elif metric == "HAMMING":
|
||||
distance_i = hamming(x[i], y[j])
|
||||
elif metric == "TANIMOTO":
|
||||
distance_i = tanimoto(x[i], y[j])
|
||||
distance_i = tanimoto_calc(x[i], y[j])
|
||||
elif metric == "JACCARD":
|
||||
distance_i = jaccard(x[i], y[j])
|
||||
else:
|
||||
|
@ -12,7 +12,7 @@ pytest-print==0.2.1
|
||||
pytest-level==0.1.1
|
||||
pytest-xdist==2.2.1
|
||||
# pytest-parallel
|
||||
pymilvus==2.0.0rc5.dev18
|
||||
pymilvus==2.0.0rc5.dev20
|
||||
pytest-rerunfailures==9.1.1
|
||||
git+https://github.com/Projectplace/pytest-tags
|
||||
ndg-httpsclient
|
||||
|
@ -398,6 +398,7 @@ class TestPartitionOperations(TestcaseBase):
|
||||
assert collection_w.has_partition(partition_name)[0]
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip(reason="skip for memory issue check")
|
||||
def test_partition_maximum_partitions(self):
|
||||
"""
|
||||
target: verify create maximum partitions
|
||||
|
@ -1512,9 +1512,9 @@ class TestCollectionSearch(TestcaseBase):
|
||||
assert abs(res[0]._distances[0] - min(distance_0, distance_1)) <= epsilon
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.xfail(expression="500 <= int64 < 1000",reason="issue:7142")
|
||||
@pytest.mark.parametrize("expression",cf.gen_normal_expressions())
|
||||
def test_search_with_expression(self, dim, expression , _async):
|
||||
@pytest.mark.xfail(expression="500 <= int64 < 1000", reason="issue:7142")
|
||||
@pytest.mark.parametrize("expression", cf.gen_normal_expressions())
|
||||
def test_search_with_expression(self, dim, expression, _async):
|
||||
"""
|
||||
target: test search with different expressions
|
||||
method: test search with different expressions
|
||||
@ -1523,14 +1523,14 @@ class TestCollectionSearch(TestcaseBase):
|
||||
# 1. initialize with data
|
||||
nb = 1000
|
||||
collection_w, _vectors, _, insert_ids = self.init_collection_general(prefix, True,
|
||||
nb, dim=dim,
|
||||
is_index=True)
|
||||
nb, dim=dim,
|
||||
is_index=True)
|
||||
|
||||
# filter result with expression in colllection
|
||||
_vectors = _vectors[0]
|
||||
expression = expression.replace("&&", "and").replace("||", "or")
|
||||
filter_ids = []
|
||||
for i,_id in enumerate(insert_ids):
|
||||
for i, _id in enumerate(insert_ids):
|
||||
int64 = _vectors.int64[i]
|
||||
float = _vectors.float[i]
|
||||
if not expression or eval(expression):
|
||||
@ -1545,13 +1545,13 @@ class TestCollectionSearch(TestcaseBase):
|
||||
log.info("test_search_with_expression: searching with expression: %s" % expression)
|
||||
vectors = [[random.random() for _ in range(dim)] for _ in range(default_nq)]
|
||||
search_res, _ = collection_w.search(vectors[:default_nq], default_search_field,
|
||||
default_search_params, nb, expression,
|
||||
_async=_async,
|
||||
check_task=CheckTasks.check_search_results,
|
||||
check_items={"nq": default_nq,
|
||||
"ids": insert_ids,
|
||||
"limit": min(nb,len(filter_ids)),
|
||||
"_async": _async})
|
||||
default_search_params, nb, expression,
|
||||
_async=_async,
|
||||
check_task=CheckTasks.check_search_results,
|
||||
check_items={"nq": default_nq,
|
||||
"ids": insert_ids,
|
||||
"limit": min(nb, len(filter_ids)),
|
||||
"_async": _async})
|
||||
if _async:
|
||||
search_res.done()
|
||||
search_res = search_res.result()
|
||||
@ -1562,8 +1562,8 @@ class TestCollectionSearch(TestcaseBase):
|
||||
assert set(ids).issubset(filter_ids_set)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(expression=f"500 <= {default_float_field_name} <= 1000",reason="issue:7142")
|
||||
@pytest.mark.parametrize("expression",cf.gen_normal_expressions_field(default_float_field_name))
|
||||
@pytest.mark.xfail(expression=f"500 <= {default_float_field_name} <= 1000", reason="issue:7142")
|
||||
@pytest.mark.parametrize("expression", cf.gen_normal_expressions_field(default_float_field_name))
|
||||
def test_search_with_expression_auto_id(self, dim, expression, _async):
|
||||
"""
|
||||
target: test search with different expressions
|
||||
@ -1573,14 +1573,14 @@ class TestCollectionSearch(TestcaseBase):
|
||||
# 1. initialize with data
|
||||
nb = 1000
|
||||
collection_w, _vectors, _, insert_ids = self.init_collection_general(prefix, True, nb,
|
||||
auto_id=True,
|
||||
dim=dim,
|
||||
is_index=True)
|
||||
auto_id=True,
|
||||
dim=dim,
|
||||
is_index=True)
|
||||
|
||||
|
||||
# filter result with expression in colllection
|
||||
_vectors = _vectors[0]
|
||||
expression = expression.replace("&&", "and").replace("||", "or")
|
||||
expression = expression.replace("&&", "and").replace("||", "or")
|
||||
filter_ids = []
|
||||
for i, _id in enumerate(insert_ids):
|
||||
exec(f"{default_float_field_name} = _vectors.{default_float_field_name}[i]")
|
||||
@ -1597,13 +1597,13 @@ class TestCollectionSearch(TestcaseBase):
|
||||
log.info("test_search_with_expression: searching with expression: %s" % expression)
|
||||
vectors = [[random.random() for _ in range(dim)] for _ in range(default_nq)]
|
||||
search_res, _ = collection_w.search(vectors[:default_nq], default_search_field,
|
||||
default_search_params, nb, expression,
|
||||
_async=_async,
|
||||
check_task=CheckTasks.check_search_results,
|
||||
check_items={"nq": default_nq,
|
||||
"ids": insert_ids,
|
||||
"limit": min(nb,len(filter_ids)),
|
||||
"_async": _async})
|
||||
default_search_params, nb, expression,
|
||||
_async=_async,
|
||||
check_task=CheckTasks.check_search_results,
|
||||
check_items={"nq": default_nq,
|
||||
"ids": insert_ids,
|
||||
"limit": min(nb, len(filter_ids)),
|
||||
"_async": _async})
|
||||
if _async:
|
||||
search_res.done()
|
||||
search_res = search_res.result()
|
||||
|
@ -22,6 +22,20 @@ class TestUtilityParams(TestcaseBase):
|
||||
def get_invalid_metric_type(self, request):
|
||||
if request.param == [] or request.param == "":
|
||||
pytest.skip("metric empty is valid for distance calculation")
|
||||
if isinstance(request.param, str):
|
||||
pytest.skip("string is valid type for metric")
|
||||
yield request.param
|
||||
|
||||
@pytest.fixture(scope="function", params=ct.get_invalid_strs)
|
||||
def get_invalid_metric_value(self, request):
|
||||
if request.param == [] or request.param == "":
|
||||
pytest.skip("metric empty is valid for distance calculation")
|
||||
if not isinstance(request.param, str):
|
||||
pytest.skip("Skip invalid type for metric")
|
||||
yield request.param
|
||||
|
||||
@pytest.fixture(scope="function", params=["JACCARD", "Superstructure", "Substructure"])
|
||||
def get_not_support_metric(self, request):
|
||||
yield request.param
|
||||
|
||||
"""
|
||||
@ -203,6 +217,7 @@ class TestUtilityParams(TestcaseBase):
|
||||
"is illegal".format(invalid_vector)})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.xfail(reason="issue 7038")
|
||||
def test_calc_distance_right_vector_invalid_value(self, get_invalid_vector_dict):
|
||||
"""
|
||||
target: test calculated distance with invalid vectors
|
||||
@ -221,8 +236,7 @@ class TestUtilityParams(TestcaseBase):
|
||||
"vectors with different dimension"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7086")
|
||||
def test_calc_distance_invalid_metric(self, get_invalid_metric_type):
|
||||
def test_calc_distance_invalid_metric_type(self, get_invalid_metric_type):
|
||||
"""
|
||||
target: test calculated distance with invalid metric
|
||||
method: input invalid metric
|
||||
@ -235,6 +249,45 @@ class TestUtilityParams(TestcaseBase):
|
||||
op_r = {"float_vectors": vectors_r}
|
||||
metric = get_invalid_metric_type
|
||||
params = {"metric": metric}
|
||||
self.utility_wrap.calc_distance(op_l, op_r, params,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={"err_code": 1,
|
||||
"err_msg": "params value {} "
|
||||
"is illegal".format(params)})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_calc_distance_invalid_metric_value(self, get_invalid_metric_value):
|
||||
"""
|
||||
target: test calculated distance with invalid metric
|
||||
method: input invalid metric
|
||||
expected: raise exception
|
||||
"""
|
||||
self._connect()
|
||||
vectors_l = cf.gen_vectors(default_nb, default_dim)
|
||||
vectors_r = cf.gen_vectors(default_nb, default_dim)
|
||||
op_l = {"float_vectors": vectors_l}
|
||||
op_r = {"float_vectors": vectors_r}
|
||||
metric = get_invalid_metric_value
|
||||
params = {"metric": metric}
|
||||
self.utility_wrap.calc_distance(op_l, op_r, params,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={"err_code": 1,
|
||||
"err_msg": "Invalid metric type"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_calc_distance_not_support_metric(self, get_not_support_metric):
|
||||
"""
|
||||
target: test calculated distance with invalid metric
|
||||
method: input invalid metric
|
||||
expected: raise exception
|
||||
"""
|
||||
self._connect()
|
||||
vectors_l = cf.gen_vectors(default_nb, default_dim)
|
||||
vectors_r = cf.gen_vectors(default_nb, default_dim)
|
||||
op_l = {"float_vectors": vectors_l}
|
||||
op_r = {"float_vectors": vectors_r}
|
||||
metric = get_not_support_metric
|
||||
params = {"metric": metric}
|
||||
self.utility_wrap.calc_distance(op_l, op_r, params,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={"err_code": 1,
|
||||
@ -279,7 +332,6 @@ class TestUtilityParams(TestcaseBase):
|
||||
"vectors with different dimension"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7036")
|
||||
def test_calc_distance_collection_before_load(self):
|
||||
"""
|
||||
target: test calculated distance when entities is not ready
|
||||
@ -299,7 +351,8 @@ class TestUtilityParams(TestcaseBase):
|
||||
self.utility_wrap.calc_distance(op_l, op_r, params,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={"err_code": 1,
|
||||
"err_msg": "Failed to fetch vectors"})
|
||||
"err_msg": "collection {} was not "
|
||||
"loaded into memory)".format(collection_w.name)})
|
||||
|
||||
class TestUtilityBase(TestcaseBase):
|
||||
""" Test case of index interface """
|
||||
@ -312,7 +365,7 @@ class TestUtilityBase(TestcaseBase):
|
||||
def metric(self, request):
|
||||
yield request.param
|
||||
|
||||
@pytest.fixture(scope="function", params=["HAMMING", "TANIMOTO", "JACCARD"])
|
||||
@pytest.fixture(scope="function", params=["HAMMING", "TANIMOTO"])
|
||||
def metric_binary(self, request):
|
||||
yield request.param
|
||||
|
||||
@ -596,6 +649,7 @@ class TestUtilityBase(TestcaseBase):
|
||||
"metric": metric})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7217")
|
||||
def test_calc_distance_default_metric(self, sqrt):
|
||||
"""
|
||||
target: test calculated distance with default param
|
||||
@ -615,7 +669,6 @@ class TestUtilityBase(TestcaseBase):
|
||||
"sqrt": sqrt})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7064")
|
||||
def test_calc_distance_binary_metric(self, metric_binary):
|
||||
"""
|
||||
target: test calculate distance with binary vectors
|
||||
@ -629,9 +682,8 @@ class TestUtilityBase(TestcaseBase):
|
||||
op_l = {"bin_vectors": vectors_l}
|
||||
op_r = {"bin_vectors": vectors_r}
|
||||
params = {"metric": metric_binary}
|
||||
if metric_binary == "HAMMING":
|
||||
vectors_l = raw_vectors_l
|
||||
vectors_r = raw_vectors_r
|
||||
vectors_l = raw_vectors_l
|
||||
vectors_r = raw_vectors_r
|
||||
self.utility_wrap.calc_distance(op_l, op_r, params,
|
||||
check_task=CheckTasks.check_distance,
|
||||
check_items={"vectors_l": vectors_l,
|
||||
@ -745,7 +797,6 @@ class TestUtilityBase(TestcaseBase):
|
||||
"sqrt": sqrt})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7046")
|
||||
def test_calc_distance_from_partition_ids(self, metric, sqrt):
|
||||
"""
|
||||
target: test calculated distance from one partition entities
|
||||
@ -773,7 +824,6 @@ class TestUtilityBase(TestcaseBase):
|
||||
"sqrt": sqrt})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7046")
|
||||
def test_calc_distance_from_partitions(self, metric, sqrt):
|
||||
"""
|
||||
target: test calculated distance between entities from partitions
|
||||
@ -800,7 +850,6 @@ class TestUtilityBase(TestcaseBase):
|
||||
"sqrt": sqrt})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7046")
|
||||
def test_calc_distance_left_vectors_and_partition_ids(self, metric, sqrt):
|
||||
"""
|
||||
target: test calculated distance between vectors and partition entities
|
||||
@ -827,7 +876,6 @@ class TestUtilityBase(TestcaseBase):
|
||||
"sqrt": sqrt})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 7046")
|
||||
def test_calc_distance_right_vectors_and_partition_ids(self, metric, sqrt):
|
||||
"""
|
||||
target: test calculated distance between vectors and partition entities
|
||||
|
Loading…
Reference in New Issue
Block a user