2021-06-17 11:49:57 +08:00
|
|
|
from enum import Enum
|
2021-06-24 11:56:08 +08:00
|
|
|
from random import randint
|
2021-12-07 21:45:26 +08:00
|
|
|
import time
|
2021-06-04 10:33:34 +08:00
|
|
|
from time import sleep
|
2021-12-02 22:49:52 +08:00
|
|
|
from delayed_assert import expect
|
2021-06-17 11:49:57 +08:00
|
|
|
from base.collection_wrapper import ApiCollectionWrapper
|
2022-05-15 20:41:59 +08:00
|
|
|
from base.utility_wrapper import ApiUtilityWrapper
|
2021-06-04 10:33:34 +08:00
|
|
|
from common import common_func as cf
|
|
|
|
from common import common_type as ct
|
2021-11-08 21:41:09 +08:00
|
|
|
from chaos import constants
|
2021-08-18 16:06:10 +08:00
|
|
|
|
|
|
|
from common.common_type import CheckTasks
|
2021-06-24 11:56:08 +08:00
|
|
|
from utils.util_log import test_log as log
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-06-17 11:49:57 +08:00
|
|
|
|
|
|
|
class Op(Enum):
|
|
|
|
create = 'create'
|
|
|
|
insert = 'insert'
|
|
|
|
flush = 'flush'
|
|
|
|
index = 'index'
|
|
|
|
search = 'search'
|
|
|
|
query = 'query'
|
|
|
|
|
|
|
|
unknown = 'unknown'
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-06-09 16:19:48 +08:00
|
|
|
|
2021-07-03 10:02:17 +08:00
|
|
|
timeout = 20
|
2021-12-01 18:07:31 +08:00
|
|
|
enable_traceback = False
|
2021-07-03 10:02:17 +08:00
|
|
|
|
|
|
|
|
2021-06-04 10:33:34 +08:00
|
|
|
class Checker:
|
2021-09-15 10:41:48 +08:00
|
|
|
"""
|
|
|
|
A base class of milvus operation checker to
|
|
|
|
a. check whether milvus is servicing
|
|
|
|
b. count operations and success rate
|
|
|
|
"""
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2022-05-15 20:41:59 +08:00
|
|
|
def __init__(self, collection_name=None, shards_num=2):
|
2021-06-04 10:33:34 +08:00
|
|
|
self._succ = 0
|
|
|
|
self._fail = 0
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times = []
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = 0
|
2021-06-24 11:56:08 +08:00
|
|
|
self.c_wrap = ApiCollectionWrapper()
|
2022-05-15 20:41:59 +08:00
|
|
|
c_name = collection_name if collection_name is not None else cf.gen_unique_str('Checker_')
|
|
|
|
self.c_wrap.init_collection(name=c_name,
|
2021-07-03 10:02:17 +08:00
|
|
|
schema=cf.gen_default_collection_schema(),
|
2022-05-15 20:41:59 +08:00
|
|
|
shards_num=shards_num,
|
2021-12-01 18:07:31 +08:00
|
|
|
timeout=timeout,
|
|
|
|
enable_traceback=enable_traceback)
|
2021-06-24 11:56:08 +08:00
|
|
|
self.c_wrap.insert(data=cf.gen_default_list_data(nb=constants.ENTITIES_FOR_SEARCH),
|
2021-12-01 18:07:31 +08:00
|
|
|
timeout=timeout,
|
|
|
|
enable_traceback=enable_traceback)
|
2021-12-17 17:13:44 +08:00
|
|
|
self.initial_entities = self.c_wrap.num_entities # do as a flush
|
2021-06-04 10:33:34 +08:00
|
|
|
|
|
|
|
def total(self):
|
|
|
|
return self._succ + self._fail
|
|
|
|
|
2021-06-17 11:49:57 +08:00
|
|
|
def succ_rate(self):
|
2021-06-04 10:33:34 +08:00
|
|
|
return self._succ / self.total() if self.total() != 0 else 0
|
|
|
|
|
2022-03-07 16:20:04 +08:00
|
|
|
def check_result(self):
|
|
|
|
succ_rate = self.succ_rate()
|
|
|
|
total = self.total()
|
|
|
|
rsp_times = self.rsp_times
|
|
|
|
average_time = 0 if len(rsp_times) == 0 else sum(rsp_times) / len(rsp_times)
|
|
|
|
max_time = 0 if len(rsp_times) == 0 else max(rsp_times)
|
|
|
|
min_time = 0 if len(rsp_times) == 0 else min(rsp_times)
|
|
|
|
checkers_result = f"succ_rate: {succ_rate:.2f}, total: {total:03d}, average_time: {average_time:.4f}, max_time: {max_time:.4f}, min_time: {min_time:.4f}"
|
|
|
|
return checkers_result
|
|
|
|
|
2021-06-04 10:33:34 +08:00
|
|
|
def reset(self):
|
|
|
|
self._succ = 0
|
|
|
|
self._fail = 0
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times = []
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = 0
|
2021-06-04 10:33:34 +08:00
|
|
|
|
|
|
|
|
|
|
|
class SearchChecker(Checker):
|
2021-09-16 17:23:50 +08:00
|
|
|
"""check search operations in a dependent thread"""
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2022-05-15 20:41:59 +08:00
|
|
|
def __init__(self, collection_name=None, shards_num=2, replica_number=1):
|
|
|
|
super().__init__(collection_name=collection_name, shards_num=shards_num)
|
|
|
|
self.c_wrap.load(replica_number=replica_number) # do load before search
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-06-09 16:19:48 +08:00
|
|
|
def keep_running(self):
|
2021-11-08 13:01:34 +08:00
|
|
|
while True:
|
2021-06-04 10:33:34 +08:00
|
|
|
search_vec = cf.gen_vectors(5, ct.default_dim)
|
2021-12-07 21:45:26 +08:00
|
|
|
t0 = time.time()
|
2021-06-17 11:49:57 +08:00
|
|
|
_, result = self.c_wrap.search(
|
2021-12-01 18:07:31 +08:00
|
|
|
data=search_vec,
|
|
|
|
anns_field=ct.default_float_vec_field_name,
|
|
|
|
param={"nprobe": 32},
|
|
|
|
limit=1, timeout=timeout,
|
|
|
|
enable_traceback=enable_traceback,
|
|
|
|
check_task=CheckTasks.check_nothing
|
|
|
|
)
|
2021-12-07 21:45:26 +08:00
|
|
|
t1 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
if result:
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times.append(t1 - t0)
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = ((t1 - t0) + self.average_time * self._succ) / (self._succ + 1)
|
2021-06-04 10:33:34 +08:00
|
|
|
self._succ += 1
|
2021-12-08 13:24:06 +08:00
|
|
|
log.debug(f"search success, time: {t1 - t0:.4f}, average_time: {self.average_time:.4f}")
|
2021-06-04 10:33:34 +08:00
|
|
|
else:
|
|
|
|
self._fail += 1
|
2021-06-24 11:56:08 +08:00
|
|
|
sleep(constants.WAIT_PER_OP / 10)
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2021-06-17 11:49:57 +08:00
|
|
|
class InsertFlushChecker(Checker):
|
2021-10-02 19:44:11 +08:00
|
|
|
"""check Insert and flush operations in a dependent thread"""
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2022-05-15 20:41:59 +08:00
|
|
|
def __init__(self, collection_name=None, flush=False, shards_num=2):
|
|
|
|
super().__init__(collection_name=collection_name, shards_num=shards_num)
|
2021-06-24 11:56:08 +08:00
|
|
|
self._flush = flush
|
2021-08-03 11:01:25 +08:00
|
|
|
self.initial_entities = self.c_wrap.num_entities
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-06-09 16:19:48 +08:00
|
|
|
def keep_running(self):
|
2021-11-08 13:01:34 +08:00
|
|
|
while True:
|
2021-12-07 21:45:26 +08:00
|
|
|
t0 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
_, insert_result = \
|
|
|
|
self.c_wrap.insert(data=cf.gen_default_list_data(nb=constants.DELTA_PER_INS),
|
2021-12-01 18:07:31 +08:00
|
|
|
timeout=timeout,
|
|
|
|
enable_traceback=enable_traceback,
|
|
|
|
check_task=CheckTasks.check_nothing)
|
2021-12-07 21:45:26 +08:00
|
|
|
t1 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
if not self._flush:
|
|
|
|
if insert_result:
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times.append(t1 - t0)
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = ((t1 - t0) + self.average_time * self._succ) / (self._succ + 1)
|
2021-06-17 11:49:57 +08:00
|
|
|
self._succ += 1
|
2021-12-08 13:24:06 +08:00
|
|
|
log.debug(f"insert success, time: {t1 - t0:.4f}, average_time: {self.average_time:.4f}")
|
2021-06-09 16:19:48 +08:00
|
|
|
else:
|
2021-06-17 11:49:57 +08:00
|
|
|
self._fail += 1
|
2021-06-24 11:56:08 +08:00
|
|
|
sleep(constants.WAIT_PER_OP / 10)
|
2021-06-04 10:33:34 +08:00
|
|
|
else:
|
2021-10-06 20:53:03 +08:00
|
|
|
# call flush in property num_entities
|
2021-12-07 21:45:26 +08:00
|
|
|
t0 = time.time()
|
2021-11-06 22:06:54 +08:00
|
|
|
num_entities = self.c_wrap.num_entities
|
2021-12-07 21:45:26 +08:00
|
|
|
t1 = time.time()
|
2021-11-06 22:06:54 +08:00
|
|
|
if num_entities == (self.initial_entities + constants.DELTA_PER_INS):
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times.append(t1 - t0)
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = ((t1 - t0) + self.average_time * self._succ) / (self._succ + 1)
|
2021-06-17 11:49:57 +08:00
|
|
|
self._succ += 1
|
2021-12-08 13:24:06 +08:00
|
|
|
log.debug(f"flush success, time: {t1 - t0:.4f}, average_time: {self.average_time:.4f}")
|
2021-08-03 11:01:25 +08:00
|
|
|
self.initial_entities += constants.DELTA_PER_INS
|
2021-06-17 11:49:57 +08:00
|
|
|
else:
|
|
|
|
self._fail += 1
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2021-06-04 10:33:34 +08:00
|
|
|
class CreateChecker(Checker):
|
2021-10-03 08:47:55 +08:00
|
|
|
"""check create operations in a dependent thread"""
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2021-06-17 11:49:57 +08:00
|
|
|
def __init__(self):
|
2021-06-04 10:33:34 +08:00
|
|
|
super().__init__()
|
|
|
|
|
2021-06-09 16:19:48 +08:00
|
|
|
def keep_running(self):
|
2021-11-08 13:01:34 +08:00
|
|
|
while True:
|
2021-12-07 21:45:26 +08:00
|
|
|
t0 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
_, result = self.c_wrap.init_collection(
|
2021-12-01 18:07:31 +08:00
|
|
|
name=cf.gen_unique_str("CreateChecker_"),
|
|
|
|
schema=cf.gen_default_collection_schema(),
|
|
|
|
timeout=timeout,
|
|
|
|
enable_traceback=enable_traceback,
|
|
|
|
check_task=CheckTasks.check_nothing)
|
2021-12-07 21:45:26 +08:00
|
|
|
t1 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
if result:
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times.append(t1 - t0)
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = ((t1 - t0) + self.average_time * self._succ) / (self._succ + 1)
|
2021-06-04 10:33:34 +08:00
|
|
|
self._succ += 1
|
2021-12-08 13:24:06 +08:00
|
|
|
log.debug(f"create success, time: {t1 - t0:.4f}, average_time: {self.average_time:4f}")
|
2021-12-07 21:45:26 +08:00
|
|
|
self.c_wrap.drop(timeout=timeout)
|
|
|
|
|
2021-06-04 10:33:34 +08:00
|
|
|
else:
|
|
|
|
self._fail += 1
|
2021-06-24 11:56:08 +08:00
|
|
|
sleep(constants.WAIT_PER_OP / 10)
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2021-06-04 10:33:34 +08:00
|
|
|
class IndexChecker(Checker):
|
2021-10-04 08:30:17 +08:00
|
|
|
"""check Insert operations in a dependent thread"""
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2021-06-04 10:33:34 +08:00
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
2021-12-01 18:07:31 +08:00
|
|
|
self.c_wrap.insert(data=cf.gen_default_list_data(nb=5 * constants.ENTITIES_FOR_SEARCH),
|
|
|
|
timeout=timeout, enable_traceback=enable_traceback)
|
2021-12-17 17:13:44 +08:00
|
|
|
log.debug(f"Index ready entities: {self.c_wrap.num_entities}") # do as a flush before indexing
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-06-09 16:19:48 +08:00
|
|
|
def keep_running(self):
|
2021-11-08 13:01:34 +08:00
|
|
|
while True:
|
2021-12-07 21:45:26 +08:00
|
|
|
t0 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
_, result = self.c_wrap.create_index(ct.default_float_vec_field_name,
|
|
|
|
constants.DEFAULT_INDEX_PARAM,
|
|
|
|
name=cf.gen_unique_str('index_'),
|
2021-12-01 18:07:31 +08:00
|
|
|
timeout=timeout,
|
|
|
|
enable_traceback=enable_traceback,
|
|
|
|
check_task=CheckTasks.check_nothing)
|
2021-12-07 21:45:26 +08:00
|
|
|
t1 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
if result:
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times.append(t1 - t0)
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = ((t1 - t0) + self.average_time * self._succ) / (self._succ + 1)
|
2021-06-24 11:56:08 +08:00
|
|
|
self._succ += 1
|
2021-12-08 13:24:06 +08:00
|
|
|
log.debug(f"index success, time: {t1 - t0:.4f}, average_time: {self.average_time:.4f}")
|
2021-12-07 21:45:26 +08:00
|
|
|
self.c_wrap.drop_index(timeout=timeout)
|
2021-06-24 11:56:08 +08:00
|
|
|
else:
|
|
|
|
self._fail += 1
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2021-06-09 16:19:48 +08:00
|
|
|
class QueryChecker(Checker):
|
2021-10-05 20:39:14 +08:00
|
|
|
"""check query operations in a dependent thread"""
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2022-05-15 20:41:59 +08:00
|
|
|
def __init__(self, collection_name=None, shards_num=2, replica_number=1):
|
|
|
|
super().__init__(collection_name=collection_name, shards_num=shards_num)
|
|
|
|
self.c_wrap.load(replica_number=replica_number) # do load before query
|
2021-06-04 10:33:34 +08:00
|
|
|
|
2021-06-09 16:19:48 +08:00
|
|
|
def keep_running(self):
|
2021-11-08 13:01:34 +08:00
|
|
|
while True:
|
2021-06-24 11:56:08 +08:00
|
|
|
int_values = []
|
|
|
|
for _ in range(5):
|
|
|
|
int_values.append(randint(0, constants.ENTITIES_FOR_SEARCH))
|
2021-06-28 14:14:13 +08:00
|
|
|
term_expr = f'{ct.default_int64_field_name} in {int_values}'
|
2021-12-07 21:45:26 +08:00
|
|
|
t0 = time.time()
|
2021-12-01 18:07:31 +08:00
|
|
|
_, result = self.c_wrap.query(term_expr, timeout=timeout,
|
|
|
|
enable_traceback=enable_traceback,
|
|
|
|
check_task=CheckTasks.check_nothing)
|
2021-12-07 21:45:26 +08:00
|
|
|
t1 = time.time()
|
2021-06-24 11:56:08 +08:00
|
|
|
if result:
|
2022-03-07 16:20:04 +08:00
|
|
|
self.rsp_times.append(t1 - t0)
|
2021-12-07 21:45:26 +08:00
|
|
|
self.average_time = ((t1 - t0) + self.average_time * self._succ) / (self._succ + 1)
|
2021-06-24 11:56:08 +08:00
|
|
|
self._succ += 1
|
2021-12-08 13:24:06 +08:00
|
|
|
log.debug(f"query success, time: {t1 - t0:.4f}, average_time: {self.average_time:.4f}")
|
2021-06-24 11:56:08 +08:00
|
|
|
else:
|
|
|
|
self._fail += 1
|
2021-06-28 14:14:13 +08:00
|
|
|
sleep(constants.WAIT_PER_OP / 10)
|
2021-12-02 22:49:52 +08:00
|
|
|
|
2021-12-17 17:13:44 +08:00
|
|
|
|
2022-05-15 20:41:59 +08:00
|
|
|
class BulkLoadChecker(Checker):
|
|
|
|
"""check bulk load operations in a dependent thread"""
|
|
|
|
|
|
|
|
def __init__(self,):
|
|
|
|
super().__init__()
|
|
|
|
self.utility_wrap = ApiUtilityWrapper()
|
|
|
|
self.schema = cf.gen_default_collection_schema()
|
|
|
|
self.files = ["/tmp/test_data.json"]
|
|
|
|
self.failed_tasks = []
|
|
|
|
|
|
|
|
def update(self, files=None, schema=None):
|
|
|
|
if files:
|
|
|
|
self.files = files
|
|
|
|
if schema:
|
|
|
|
self.schema = schema
|
|
|
|
|
|
|
|
def keep_running(self):
|
|
|
|
while True:
|
|
|
|
c_name = cf.gen_unique_str("BulkLoadChecker_")
|
|
|
|
self.c_wrap.init_collection(name=c_name, schema=self.schema)
|
|
|
|
# import data
|
|
|
|
t0 = time.time()
|
|
|
|
task_ids, res_1 = self.utility_wrap.bulk_load(collection_name=c_name,
|
|
|
|
partition_name='',
|
|
|
|
row_based=True,
|
|
|
|
files=self.files)
|
|
|
|
log.info(f"bulk load task ids:{task_ids}")
|
|
|
|
completed, res_2 = self.utility_wrap.wait_for_bulk_load_tasks_completed(task_ids=task_ids,
|
|
|
|
timeout=30)
|
|
|
|
t1 = time.time() - t0
|
|
|
|
if completed and res_1 and res_2:
|
|
|
|
self.rsp_times.append(t1 - t0)
|
|
|
|
self.average_time = ((t1 - t0) + self.average_time * self._succ) / (self._succ + 1)
|
|
|
|
self._succ += 1
|
|
|
|
log.debug(f"bulk load success, time: {t1 - t0:.4f}, average_time: {self.average_time:4f}")
|
|
|
|
else:
|
|
|
|
self._fail += 1
|
|
|
|
self.failed_tasks.append(c_name)
|
|
|
|
sleep(constants.WAIT_PER_OP / 10)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2021-12-02 22:49:52 +08:00
|
|
|
def assert_statistic(checkers, expectations={}):
|
|
|
|
for k in checkers.keys():
|
|
|
|
# expect succ if no expectations
|
|
|
|
succ_rate = checkers[k].succ_rate()
|
|
|
|
total = checkers[k].total()
|
2022-03-07 16:20:04 +08:00
|
|
|
checker_result = k.check_result()
|
|
|
|
|
2021-12-02 22:49:52 +08:00
|
|
|
if expectations.get(k, '') == constants.FAIL:
|
2022-03-07 16:20:04 +08:00
|
|
|
log.info(f"Expect Fail: {str(k)} {checker_result}")
|
2021-12-02 22:49:52 +08:00
|
|
|
expect(succ_rate < 0.49 or total < 2,
|
2022-03-07 16:20:04 +08:00
|
|
|
f"Expect Fail: {str(k)} {checker_result}")
|
2021-12-02 22:49:52 +08:00
|
|
|
else:
|
2022-03-07 16:20:04 +08:00
|
|
|
log.info(f"Expect Succ: {str(k)} {checker_result}")
|
2021-12-02 22:49:52 +08:00
|
|
|
expect(succ_rate > 0.90 or total > 2,
|
2022-05-15 20:41:59 +08:00
|
|
|
f"Expect Succ: {str(k)} {checker_result}")
|