mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-11-30 10:59:32 +08:00
Unified python test log usage (#13619)
Signed-off-by: ThreadDao <yufen.zong@zilliz.com>
This commit is contained in:
parent
cc057bb115
commit
c4e3b3a19a
@ -1,5 +1,4 @@
|
||||
import copy
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import pytest
|
||||
@ -8,6 +7,7 @@ from utils import utils as ut
|
||||
from common.constants import default_entity, default_entities, default_binary_entity, default_binary_entities, \
|
||||
default_fields
|
||||
from common.common_type import CaseLabel
|
||||
from utils.util_log import test_log as log
|
||||
|
||||
ADD_TIMEOUT = 60
|
||||
uid = "test_insert"
|
||||
@ -38,7 +38,7 @@ class TestInsertBase:
|
||||
def get_simple_index(self, request, connect):
|
||||
if request.param["index_type"] in ut.index_cpu_not_support():
|
||||
pytest.skip("CPU not support index_type: ivf_sq8h")
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
return request.param
|
||||
|
||||
@pytest.fixture(
|
||||
@ -253,7 +253,7 @@ class TestInsertBase:
|
||||
connect.create_collection(collection_name, fields)
|
||||
ids = [i for i in range(nb)]
|
||||
entities = ut.gen_entities_by_fields(fields["fields"], nb, ut.default_dim, ids)
|
||||
logging.getLogger().info(entities)
|
||||
log.info(entities)
|
||||
result = connect.insert(collection_name, entities)
|
||||
assert result.primary_keys == ids
|
||||
connect.flush([collection_name])
|
||||
@ -313,7 +313,7 @@ class TestInsertBase:
|
||||
expected: raise exception
|
||||
"""
|
||||
ids = [i for i in range(1, default_nb)]
|
||||
logging.getLogger().info(len(ids))
|
||||
log.info(len(ids))
|
||||
entities = copy.deepcopy(default_entities)
|
||||
entities[0]["values"] = ids
|
||||
with pytest.raises(Exception) as e:
|
||||
@ -328,7 +328,7 @@ class TestInsertBase:
|
||||
expected: raise exception
|
||||
"""
|
||||
ids = [i for i in range(1, default_nb)]
|
||||
logging.getLogger().info(len(ids))
|
||||
log.info(len(ids))
|
||||
entity = copy.deepcopy(default_entity)
|
||||
entity[0]["values"] = ids
|
||||
with pytest.raises(Exception) as e:
|
||||
@ -365,7 +365,7 @@ class TestInsertBase:
|
||||
entities[0]["values"] = ids
|
||||
result = connect.insert(id_collection, entities, partition_name=default_tag)
|
||||
assert result.primary_keys == ids
|
||||
logging.getLogger().info(connect.describe_collection(id_collection))
|
||||
log.info(connect.describe_collection(id_collection))
|
||||
|
||||
@pytest.mark.timeout(ADD_TIMEOUT)
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@ -551,7 +551,7 @@ class TestInsertBase:
|
||||
milvus = ut.get_milvus(host=args["ip"], port=args["port"], handler=args["handler"], try_connect=False)
|
||||
|
||||
def insert(thread_i):
|
||||
logging.getLogger().info("In thread-%d" % thread_i)
|
||||
log.info("In thread-%d" % thread_i)
|
||||
result = milvus.insert(collection, default_entities)
|
||||
milvus.flush([collection])
|
||||
|
||||
@ -676,7 +676,7 @@ class TestInsertBinary:
|
||||
ut.default_top_k, 1, metric_type="JACCARD")
|
||||
connect.load_collection(binary_collection)
|
||||
res = connect.search(binary_collection, **query)
|
||||
logging.getLogger().debug(res)
|
||||
log.debug(res)
|
||||
assert len(res[0]) == ut.default_top_k
|
||||
|
||||
|
||||
@ -697,11 +697,11 @@ class TestInsertAsync:
|
||||
yield request.param
|
||||
|
||||
def check_status(self, result):
|
||||
logging.getLogger().info("In callback check status")
|
||||
log.info("In callback check status")
|
||||
assert not result
|
||||
|
||||
def check_result(self, result):
|
||||
logging.getLogger().info("In callback check results")
|
||||
log.info("In callback check results")
|
||||
assert result
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
@ -755,7 +755,7 @@ class TestInsertAsync:
|
||||
assert len(result.primary_keys) == nb
|
||||
connect.flush([collection])
|
||||
stats = connect.get_collection_stats(collection)
|
||||
logging.getLogger().info(stats)
|
||||
log.info(stats)
|
||||
assert stats[row_count] == nb
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@ -810,7 +810,7 @@ class TestInsertMultiCollections:
|
||||
params=ut.gen_simple_index()
|
||||
)
|
||||
def get_simple_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
# if str(connect._cmd("mode")) == "CPU":
|
||||
# if request.param["index_type"] in index_cpu_not_support():
|
||||
# pytest.skip("sq8h not support in CPU mode")
|
||||
|
@ -1662,7 +1662,7 @@ class TestGetCollectionStats:
|
||||
params=gen_binary_index()
|
||||
)
|
||||
def get_jaccard_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
if request.param["index_type"] in binary_support():
|
||||
request.param["metric_type"] = "JACCARD"
|
||||
return request.param
|
||||
@ -1782,12 +1782,12 @@ class TestGetCollectionStats:
|
||||
connect.delete_entity_by_id(collection, delete_ids)
|
||||
connect.flush([collection])
|
||||
stats = connect.get_collection_stats(collection)
|
||||
logging.getLogger().info(stats)
|
||||
log.info(stats)
|
||||
assert stats["row_count"] == default_nb - delete_length
|
||||
compact_before = stats["partitions"][0]["segments"][0]["data_size"]
|
||||
connect.compact(collection)
|
||||
stats = connect.get_collection_stats(collection)
|
||||
logging.getLogger().info(stats)
|
||||
log.info(stats)
|
||||
compact_after = stats["partitions"][0]["segments"][0]["data_size"]
|
||||
assert compact_before == compact_after
|
||||
|
||||
@ -1804,11 +1804,11 @@ class TestGetCollectionStats:
|
||||
connect.delete_entity_by_id(collection, delete_ids)
|
||||
connect.flush([collection])
|
||||
stats = connect.get_collection_stats(collection)
|
||||
logging.getLogger().info(stats)
|
||||
log.info(stats)
|
||||
compact_before = stats["partitions"][0]["row_count"]
|
||||
connect.compact(collection)
|
||||
stats = connect.get_collection_stats(collection)
|
||||
logging.getLogger().info(stats)
|
||||
log.info(stats)
|
||||
compact_after = stats["partitions"][0]["row_count"]
|
||||
# pdb.set_trace()
|
||||
assert compact_before == compact_after
|
||||
@ -2183,7 +2183,7 @@ class TestCreateCollectionInvalid(object):
|
||||
fields.pop("segment_row_limit")
|
||||
connect.create_collection(collection_name, fields)
|
||||
res = connect.get_collection_info(collection_name)
|
||||
logging.getLogger().info(res)
|
||||
log.info(res)
|
||||
assert res["segment_row_limit"] == default_server_segment_row_limit
|
||||
|
||||
# TODO: assert exception
|
||||
@ -2232,7 +2232,7 @@ class TestDescribeCollection:
|
||||
params=gen_simple_index()
|
||||
)
|
||||
def get_simple_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
# if str(connect._cmd("mode")) == "CPU":
|
||||
# if request.param["index_type"] in index_cpu_not_support():
|
||||
# pytest.skip("sq8h not support in CPU mode")
|
||||
@ -3166,7 +3166,7 @@ class TestLoadPartition:
|
||||
params=gen_binary_index()
|
||||
)
|
||||
def get_binary_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
if request.param["index_type"] in binary_support():
|
||||
return request.param
|
||||
else:
|
||||
@ -3184,7 +3184,7 @@ class TestLoadPartition:
|
||||
assert len(result.primary_keys) == default_nb
|
||||
connect.flush([binary_collection])
|
||||
for metric_type in binary_metrics():
|
||||
logging.getLogger().info(metric_type)
|
||||
log.info(metric_type)
|
||||
get_binary_index["metric_type"] = metric_type
|
||||
if get_binary_index["index_type"] == "BIN_IVF_FLAT" and metric_type in structure_metrics():
|
||||
with pytest.raises(Exception) as e:
|
||||
|
@ -2,6 +2,7 @@ import pytest
|
||||
from utils.utils import *
|
||||
from common.constants import default_entities
|
||||
from common.common_type import CaseLabel
|
||||
from utils.util_log import test_log as log
|
||||
|
||||
DELETE_TIMEOUT = 60
|
||||
default_single_query = {
|
||||
@ -170,7 +171,7 @@ class TestFlushBase:
|
||||
assert res["row_count"] == len(result.primary_keys)
|
||||
connect.load_collection(collection)
|
||||
res = connect.search(collection, **default_single_query)
|
||||
logging.getLogger().debug(res)
|
||||
log.debug(res)
|
||||
assert len(res) == 1
|
||||
assert len(res[0].ids) == 10
|
||||
assert len(res[0].distances) == 10
|
||||
@ -237,7 +238,7 @@ class TestFlushBase:
|
||||
assert len(res) == 1
|
||||
assert len(res[0].ids) == 10
|
||||
assert len(res[0].distances) == 10
|
||||
logging.getLogger().debug(res)
|
||||
log.debug(res)
|
||||
# assert res
|
||||
|
||||
# TODO: unable to set config
|
||||
@ -307,7 +308,7 @@ class TestFlushAsync:
|
||||
"""
|
||||
|
||||
def check_status(self):
|
||||
logging.getLogger().info("In callback check status")
|
||||
log.info("In callback check status")
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_flush_empty_collection(self, connect, collection):
|
||||
@ -344,7 +345,7 @@ class TestFlushAsync:
|
||||
assert len(result.primary_keys) == default_nb
|
||||
future = connect.flush([collection], _async=True)
|
||||
assert future.result() is None
|
||||
logging.getLogger().info("DROP")
|
||||
log.info("DROP")
|
||||
res = connect.drop_collection(collection)
|
||||
assert res is None
|
||||
|
||||
@ -356,9 +357,9 @@ class TestFlushAsync:
|
||||
expected: status ok
|
||||
"""
|
||||
connect.insert(collection, default_entities)
|
||||
logging.getLogger().info("before")
|
||||
log.info("before")
|
||||
future = connect.flush([collection], _async=True, _callback=self.check_status)
|
||||
logging.getLogger().info("after")
|
||||
log.info("after")
|
||||
future.done()
|
||||
status = future.result()
|
||||
assert status is None
|
||||
|
@ -382,7 +382,7 @@ class TestIndexBase:
|
||||
params=gen_simple_index()
|
||||
)
|
||||
def get_simple_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
# if str(connect._cmd("mode")) == "CPU":
|
||||
# if request.param["index_type"] in index_cpu_not_support():
|
||||
# pytest.skip("sq8h not support in CPU mode")
|
||||
@ -513,7 +513,7 @@ class TestIndexBase:
|
||||
result = connect.insert(collection, default_entities)
|
||||
connect.flush([collection])
|
||||
connect.create_index(collection, field_name, get_simple_index)
|
||||
logging.getLogger().info(connect.describe_index(collection, ""))
|
||||
log.info(connect.describe_index(collection, ""))
|
||||
nq = get_nq
|
||||
index_type = get_simple_index["index_type"]
|
||||
search_param = get_search_param(index_type)
|
||||
@ -714,7 +714,7 @@ class TestIndexBase:
|
||||
get_simple_index["metric_type"] = metric_type
|
||||
connect.create_index(collection, field_name, get_simple_index)
|
||||
connect.load_collection(collection)
|
||||
logging.getLogger().info(connect.describe_index(collection, ""))
|
||||
log.info(connect.describe_index(collection, ""))
|
||||
nq = get_nq
|
||||
index_type = get_simple_index["index_type"]
|
||||
search_param = get_search_param(index_type)
|
||||
@ -1066,7 +1066,7 @@ class TestIndexBinary:
|
||||
search_param = get_search_param(get_jaccard_index["index_type"], metric_type="JACCARD")
|
||||
params, _ = gen_search_vectors_params(binary_field_name, default_binary_entities, default_top_k, nq,
|
||||
search_params=search_param, metric_type="JACCARD")
|
||||
logging.getLogger().info(params)
|
||||
log.info(params)
|
||||
res = connect.search(binary_collection, **params)
|
||||
assert len(res) == nq
|
||||
|
||||
@ -1122,7 +1122,7 @@ class TestIndexBinary:
|
||||
connect.flush([binary_collection])
|
||||
connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
|
||||
stats = connect.get_collection_stats(binary_collection)
|
||||
logging.getLogger().info(stats)
|
||||
log.info(stats)
|
||||
assert stats["row_count"] == default_nb
|
||||
assert len(stats["partitions"]) == 2
|
||||
for partition in stats["partitions"]:
|
||||
@ -1148,7 +1148,7 @@ class TestIndexBinary:
|
||||
"""
|
||||
connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
|
||||
stats = connect.get_collection_stats(binary_collection)
|
||||
logging.getLogger().info(stats)
|
||||
log.info(stats)
|
||||
connect.drop_index(binary_collection, binary_field_name)
|
||||
binary_index = connect.describe_index(binary_collection, "")
|
||||
assert not binary_index
|
||||
@ -1218,7 +1218,7 @@ class TestIndexInvalid(object):
|
||||
method: create index with invalid index params
|
||||
expected: raise exception
|
||||
"""
|
||||
logging.getLogger().info(get_index)
|
||||
log.info(get_index)
|
||||
with pytest.raises(Exception) as e:
|
||||
connect.create_index(collection, field_name, get_index)
|
||||
|
||||
@ -1246,8 +1246,8 @@ class TestIndexAsync:
|
||||
return copy.deepcopy(request.param)
|
||||
|
||||
def check_result(self, res):
|
||||
logging.getLogger().info("In callback check search result")
|
||||
logging.getLogger().info(res)
|
||||
log.info("In callback check search result")
|
||||
log.info(res)
|
||||
|
||||
"""
|
||||
******************************************************************
|
||||
@ -1263,12 +1263,12 @@ class TestIndexAsync:
|
||||
expected: return search success
|
||||
"""
|
||||
result = connect.insert(collection, default_entities)
|
||||
logging.getLogger().info("start index")
|
||||
log.info("start index")
|
||||
future = connect.create_index(collection, field_name, get_simple_index, _async=True)
|
||||
logging.getLogger().info("before result")
|
||||
log.info("before result")
|
||||
res = future.result()
|
||||
# TODO:
|
||||
logging.getLogger().info(res)
|
||||
log.info(res)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
@pytest.mark.timeout(BUILD_TIMEOUT)
|
||||
@ -1300,10 +1300,10 @@ class TestIndexAsync:
|
||||
expected: return search success
|
||||
"""
|
||||
result = connect.insert(collection, default_entities)
|
||||
logging.getLogger().info("start index")
|
||||
log.info("start index")
|
||||
future = connect.create_index(collection, field_name, get_simple_index, _async=True,
|
||||
_callback=self.check_result)
|
||||
logging.getLogger().info("before result")
|
||||
log.info("before result")
|
||||
res = future.result()
|
||||
# TODO:
|
||||
logging.getLogger().info(res)
|
||||
log.info(res)
|
||||
|
@ -1,11 +1,11 @@
|
||||
import threading
|
||||
import logging
|
||||
import time
|
||||
from multiprocessing import Pool, Process
|
||||
import pytest
|
||||
from utils import utils as ut
|
||||
from common.constants import default_entities, default_fields
|
||||
from common.common_type import CaseLabel
|
||||
from utils.util_log import test_log as log
|
||||
|
||||
TIMEOUT = 120
|
||||
default_nb = ut.default_nb
|
||||
@ -264,7 +264,7 @@ class TestHasBase:
|
||||
"""
|
||||
connect.create_partition(collection, default_tag)
|
||||
res = connect.has_partition(collection, default_tag)
|
||||
logging.getLogger().info(res)
|
||||
log.info(res)
|
||||
assert res
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
@ -288,7 +288,7 @@ class TestHasBase:
|
||||
expected: status ok, result empty
|
||||
"""
|
||||
res = connect.has_partition(collection, default_tag)
|
||||
logging.getLogger().info(res)
|
||||
log.info(res)
|
||||
assert not res
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
|
@ -2242,7 +2242,7 @@ class TestSearchBase:
|
||||
params=gen_binary_index()
|
||||
)
|
||||
def get_jaccard_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
if request.param["index_type"] in binary_support():
|
||||
return request.param
|
||||
# else:
|
||||
@ -2253,7 +2253,7 @@ class TestSearchBase:
|
||||
params=gen_binary_index()
|
||||
)
|
||||
def get_hamming_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
if request.param["index_type"] in binary_support():
|
||||
return request.param
|
||||
# else:
|
||||
@ -2264,7 +2264,7 @@ class TestSearchBase:
|
||||
params=gen_binary_index()
|
||||
)
|
||||
def get_structure_index(self, request, connect):
|
||||
logging.getLogger().info(request.param)
|
||||
log.info(request.param)
|
||||
if request.param["index_type"] == "FLAT":
|
||||
return request.param
|
||||
# else:
|
||||
|
Loading…
Reference in New Issue
Block a user