test: update restful v2 testcases (#31404)

* update restful v2 test cases
* add case to ci

---------

Signed-off-by: zhuwenxing <wenxing.zhu@zilliz.com>
This commit is contained in:
zhuwenxing 2024-03-21 15:31:09 +08:00 committed by GitHub
parent c13c96e321
commit f2c2877609
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 2922 additions and 84 deletions

View File

@ -5,7 +5,7 @@ import uuid
from utils.util_log import test_log as logger
from minio import Minio
from minio.error import S3Error
from minio.commonconfig import REPLACE, CopySource
def logger_request_response(response, url, tt, headers, data, str_data, str_response, method):
if len(data) > 2000:
@ -136,7 +136,59 @@ class VectorClient(Requests):
return response.json()
def vector_query(self, payload, db_name="default", timeout=10):
def vector_advanced_search(self, payload, db_name="default", timeout=10):
time.sleep(1)
url = f'{self.endpoint}/v2/vectordb/entities/advanced_search'
if self.db_name is not None:
payload["dbName"] = self.db_name
if db_name != "default":
payload["dbName"] = db_name
response = self.post(url, headers=self.update_headers(), data=payload)
rsp = response.json()
if "data" in rsp and len(rsp["data"]) == 0:
t0 = time.time()
while time.time() - t0 < timeout:
response = self.post(url, headers=self.update_headers(), data=payload)
rsp = response.json()
if len(rsp["data"]) > 0:
break
time.sleep(1)
else:
response = self.post(url, headers=self.update_headers(), data=payload)
rsp = response.json()
if "data" in rsp and len(rsp["data"]) == 0:
logger.info(f"after {timeout}s, still no data")
return response.json()
def vector_hybrid_search(self, payload, db_name="default", timeout=10):
time.sleep(1)
url = f'{self.endpoint}/v2/vectordb/entities/hybrid_search'
if self.db_name is not None:
payload["dbName"] = self.db_name
if db_name != "default":
payload["dbName"] = db_name
response = self.post(url, headers=self.update_headers(), data=payload)
rsp = response.json()
if "data" in rsp and len(rsp["data"]) == 0:
t0 = time.time()
while time.time() - t0 < timeout:
response = self.post(url, headers=self.update_headers(), data=payload)
rsp = response.json()
if len(rsp["data"]) > 0:
break
time.sleep(1)
else:
response = self.post(url, headers=self.update_headers(), data=payload)
rsp = response.json()
if "data" in rsp and len(rsp["data"]) == 0:
logger.info(f"after {timeout}s, still no data")
return response.json()
def vector_query(self, payload, db_name="default", timeout=5):
time.sleep(1)
url = f'{self.endpoint}/v2/vectordb/entities/query'
if self.db_name is not None:
@ -527,6 +579,7 @@ class RoleClient(Requests):
self.api_key = token
self.db_name = None
self.headers = self.update_headers()
self.role_names = []
def update_headers(self):
headers = {
@ -546,6 +599,8 @@ class RoleClient(Requests):
url = f'{self.endpoint}/v2/vectordb/roles/create'
response = self.post(url, headers=self.update_headers(), data=payload)
res = response.json()
if res["code"] == 200:
self.role_names.append(payload["roleName"])
return res
def role_describe(self, role_name):
@ -706,36 +761,63 @@ class ImportJobClient(Requests):
return headers
def list_import_jobs(self, payload, db_name="default"):
if self.db_name is not None:
db_name = self.db_name
payload["dbName"] = db_name
data = payload
if db_name is None:
payload.pop("dbName")
url = f'{self.endpoint}/v2/vectordb/jobs/import/list'
response = self.post(url, headers=self.update_headers(), data=data)
res = response.json()
return res
def create_import_jobs(self, payload):
url = f'{self.endpoint}/v2/vectordb/jobs/import/create'
response = self.post(url, headers=self.update_headers(), data=payload)
res = response.json()
return res
def get_import_job_progress(self, task_id):
def create_import_jobs(self, payload, db_name="default"):
if self.db_name is not None:
db_name = self.db_name
url = f'{self.endpoint}/v2/vectordb/jobs/import/create'
payload["dbName"] = db_name
response = self.post(url, headers=self.update_headers(), data=payload)
res = response.json()
return res
def get_import_job_progress(self, job_id, db_name="default"):
if self.db_name is not None:
db_name = self.db_name
payload = {
"taskID": task_id
"dbName": db_name,
"jobID": job_id
}
if db_name is None:
payload.pop("dbName")
if job_id is None:
payload.pop("jobID")
url = f'{self.endpoint}/v2/vectordb/jobs/import/get_progress'
response = self.post(url, headers=self.update_headers(), data=payload)
res = response.json()
return res
def wait_import_job_completed(self, job_id):
finished = False
t0 = time.time()
rsp = self.get_import_job_progress(job_id)
while not finished:
rsp = self.get_import_job_progress(job_id)
if rsp['data']['state'] == "Completed":
finished = True
time.sleep(5)
if time.time() - t0 > 120:
break
return rsp, finished
class StorageClient():
def __init__(self, endpoint, access_key, secret_key, bucket_name):
def __init__(self, endpoint, access_key, secret_key, bucket_name, root_path="file"):
self.endpoint = endpoint
self.access_key = access_key
self.secret_key = secret_key
self.bucket_name = bucket_name
self.root_path = root_path
self.client = Minio(
self.endpoint,
access_key=access_key,
@ -748,3 +830,37 @@ class StorageClient():
self.client.fput_object(self.bucket_name, object_name, file_path)
except S3Error as exc:
logger.error("fail to copy files to minio", exc)
def copy_file(self, src_bucket, src_object, dst_bucket, dst_object):
try:
# if dst bucket not exist, create it
if not self.client.bucket_exists(dst_bucket):
self.client.make_bucket(dst_bucket)
self.client.copy_object(dst_bucket, dst_object, CopySource(src_bucket, src_object))
except S3Error as exc:
logger.error("fail to copy files to minio", exc)
def get_collection_binlog(self, collection_id):
dir_list = [
"delta_log",
"insert_log"
]
binlog_list = []
# list objects dir/collection_id in bucket
for dir in dir_list:
prefix = f"{self.root_path}/{dir}/{collection_id}/"
objects = self.client.list_objects(self.bucket_name, prefix=prefix)
for obj in objects:
binlog_list.append(f"{self.bucket_name}/{obj.object_name}")
print(binlog_list)
return binlog_list
if __name__ == "__main__":
sc = StorageClient(
endpoint="10.104.19.57:9000",
access_key="minioadmin",
secret_key="minioadmin",
bucket_name="milvus-bucket"
)
sc.get_collection_binlog("448305293023730313")

View File

@ -27,6 +27,7 @@ class Base:
collection_client = None
partition_client = None
index_client = None
alias_client = None
user_client = None
role_client = None
import_job_client = None
@ -49,7 +50,7 @@ class TestBase(Base):
logger.error(e)
@pytest.fixture(scope="function", autouse=True)
def init_client(self, endpoint, token, minio_host):
def init_client(self, endpoint, token, minio_host, bucket_name, root_path):
self.endpoint = f"{endpoint}"
self.api_key = f"{token}"
self.invalid_api_key = "invalid_token"
@ -61,14 +62,14 @@ class TestBase(Base):
self.user_client = UserClient(self.endpoint, self.api_key)
self.role_client = RoleClient(self.endpoint, self.api_key)
self.import_job_client = ImportJobClient(self.endpoint, self.api_key)
self.storage_client = StorageClient(f"{minio_host}:9000", "minioadmin", "minioadmin", "milvus-bucket")
self.storage_client = StorageClient(f"{minio_host}:9000", "minioadmin", "minioadmin", bucket_name, root_path)
if token is None:
self.vector_client.api_key = None
self.collection_client.api_key = None
self.partition_client.api_key = None
connections.connect(uri=endpoint, token=token)
def init_collection(self, collection_name, pk_field="id", metric_type="L2", dim=128, nb=100, batch_size=1000):
def init_collection(self, collection_name, pk_field="id", metric_type="L2", dim=128, nb=100, batch_size=1000, return_insert_id=False):
# create collection
schema_payload = {
"collectionName": collection_name,
@ -85,6 +86,7 @@ class TestBase(Base):
batch = nb // batch_size
remainder = nb % batch_size
data = []
insert_ids = []
for i in range(batch):
nb = batch_size
data = get_data_by_payload(schema_payload, nb)
@ -96,6 +98,8 @@ class TestBase(Base):
logger.debug(f"body size: {body_size / 1024 / 1024} MB")
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
if return_insert_id:
insert_ids.extend(rsp['data']['insertIds'])
# insert remainder data
if remainder:
nb = remainder
@ -106,6 +110,10 @@ class TestBase(Base):
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
if return_insert_id:
insert_ids.extend(rsp['data']['insertIds'])
if return_insert_id:
return schema_payload, data, insert_ids
return schema_payload, data
@ -131,5 +139,7 @@ class TestBase(Base):
def update_database(self, db_name="default"):
self.create_database(db_name=db_name)
db.using_database(db_name=db_name)
self.collection_client.db_name = db_name
self.vector_client.db_name = db_name
self.import_job_client.db_name = db_name

View File

@ -6,6 +6,9 @@ def pytest_addoption(parser):
parser.addoption("--endpoint", action="store", default="http://127.0.0.1:19530", help="endpoint")
parser.addoption("--token", action="store", default="root:Milvus", help="token")
parser.addoption("--minio_host", action="store", default="127.0.0.1", help="minio host")
parser.addoption("--bucket_name", action="store", default="milvus-bucket", help="minio bucket name")
parser.addoption("--root_path", action="store", default="file", help="minio bucket root path")
parser.addoption("--release_name", action="store", default="my-release", help="release name")
@pytest.fixture
@ -21,3 +24,18 @@ def token(request):
@pytest.fixture
def minio_host(request):
return request.config.getoption("--minio_host")
@pytest.fixture
def bucket_name(request):
return request.config.getoption("--bucket_name")
@pytest.fixture
def root_path(request):
return request.config.getoption("--root_path")
@pytest.fixture
def release_name(request):
return request.config.getoption("--release_name")

View File

@ -11,4 +11,5 @@ filterwarnings =
markers =
L0 : 'L0 case, high priority'
L1 : 'L1 case, second priority'
L2 : 'L2 case, system level case'

View File

@ -17,9 +17,8 @@ from pymilvus import (
@pytest.mark.L0
class TestCreateCollection(TestBase):
@pytest.mark.parametrize("metric_type", ["L2", "IP", "COSINE"])
@pytest.mark.parametrize("dim", [128])
def test_create_collections_fast(self, dim, metric_type):
def test_create_collections_quick_setup(self, dim):
"""
target: test create collection
method: create a collection with a simple schema
@ -31,7 +30,6 @@ class TestCreateCollection(TestBase):
payload = {
"collectionName": name,
"dimension": dim,
"metricType": metric_type
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
@ -44,6 +42,112 @@ class TestCreateCollection(TestBase):
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['data']['collectionName'] == name
assert rsp['data']['autoId'] is False
assert rsp['data']['enableDynamicField'] is True
assert "COSINE" in str(rsp['data']["indexes"])
@pytest.mark.parametrize("dim", [128])
@pytest.mark.parametrize("metric_type", ["L2", "COSINE", "IP"])
@pytest.mark.parametrize("id_type", ["Int64", "VarChar"])
@pytest.mark.parametrize("primary_field", ["id", "url"])
@pytest.mark.parametrize("vector_field", ["vector", "embedding"])
def test_create_collection_quick_setup_with_custom(self, vector_field, primary_field, dim, id_type, metric_type):
"""
Insert a vector with a simple payload
"""
# create a collection
name = gen_collection_name()
collection_payload = {
"collectionName": name,
"dimension": dim,
"metricType": metric_type,
"primaryFieldName": primary_field,
"vectorFieldName": vector_field,
"idType": id_type,
}
if id_type == "VarChar":
collection_payload["params"] = {"max_length": "256"}
rsp = self.collection_client.collection_create(collection_payload)
assert rsp['code'] == 200
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['data']['collectionName'] == name
fields = [f["name"] for f in rsp['data']['fields']]
assert primary_field in fields
assert vector_field in fields
for f in rsp['data']['fields']:
if f['name'] == primary_field:
assert f['type'] == id_type
assert f['primaryKey'] is True
for index in rsp['data']['indexes']:
assert index['metricType'] == metric_type
def test_create_collections_with_all_params(self):
"""
target: test create collection
method: create a collection with a simple schema
expected: create collection success
"""
name = gen_collection_name()
dim = 128
metric_type = "COSINE"
client = self.collection_client
num_shards = 2
num_partitions = 36
consistency_level = "Strong"
ttl_seconds = 360
payload = {
"collectionName": name,
"enableDynamicField": True,
"params":{
"shardsNum": f"{num_shards}",
"partitionsNum": f"{num_partitions}",
"consistencyLevel": f"{consistency_level}",
"ttlSeconds": f"{ttl_seconds}",
},
"schema": {
"fields": [
{"fieldName": "book_id", "dataType": "Int64", "isPrimary": True, "elementTypeParams": {}},
{"fieldName": "word_count", "dataType": "Int64", "isPartitionKey": True, "elementTypeParams": {}},
{"fieldName": "book_describe", "dataType": "VarChar", "elementTypeParams": {"max_length": "256"}},
{"fieldName": "json", "dataType": "JSON", "elementTypeParams": {}},
{"fieldName": "int_array", "dataType": "Array", "elementDataType": "Int64",
"elementTypeParams": {"max_capacity": "1024"}},
{"fieldName": "book_intro", "dataType": "FloatVector", "elementTypeParams": {"dim": f"{dim}"}}
]
},
"indexParams": [
{"fieldName": "book_intro", "indexName": "book_intro_vector", "metricType": f"{metric_type}"}]
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
# describe collection by pymilvus
c = Collection(name)
res = c.describe()
logger.info(f"describe collection: {res}")
# describe collection
time.sleep(10)
rsp = client.collection_describe(name)
logger.info(f"describe collection: {rsp}")
ttl_seconds_actual = None
for d in rsp["data"]["properties"]:
if d["key"] == "collection.ttl.seconds":
ttl_seconds_actual = int(d["value"])
assert rsp['code'] == 200
assert rsp['data']['collectionName'] == name
assert rsp['data']['shardsNum'] == num_shards
assert rsp['data']['partitionsNum'] == num_partitions
assert rsp['data']['consistencyLevel'] == consistency_level
assert ttl_seconds_actual == ttl_seconds
@pytest.mark.parametrize("auto_id", [True, False])
@pytest.mark.parametrize("enable_dynamic_field", [True, False])
@ -369,7 +473,6 @@ class TestCreateCollection(TestBase):
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['data']['collectionName'] == name
assert f"FloatVector({dim})" in str(rsp['data']['fields'])
def test_create_collections_concurrent_with_different_param(self):
"""
@ -737,7 +840,7 @@ class TestDescribeCollection(TestBase):
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['data']['collectionName'] == name
assert rsp['data']['autoId'] is True
assert rsp['data']['autoId'] is False
assert rsp['data']['enableDynamicField'] is True
assert len(rsp['data']['indexes']) == 1
@ -781,7 +884,7 @@ class TestDescribeCollection(TestBase):
for field in rsp['data']['fields']:
if field['name'] == "store_address":
assert field['PartitionKey'] is True
assert field['partitionKey'] is True
if field['name'] == "reviewer_id":
assert field['primaryKey'] is True
assert rsp['data']['autoId'] is False

View File

@ -242,7 +242,7 @@ class TestCreateIndex(TestBase):
assert expected_index[i]['indexConfig']['index_type'] == actual_index[i]['indexType']
@pytest.mark.L0
@pytest.mark.L1
class TestCreateIndexNegative(TestBase):
@pytest.mark.parametrize("index_type", ["BIN_FLAT", "BIN_IVF_FLAT"])

File diff suppressed because it is too large Load Diff

View File

@ -15,8 +15,8 @@ class TestRestfulSdkCompatibility(TestBase):
@pytest.mark.parametrize("dim", [128, 256])
@pytest.mark.parametrize("enable_dynamic", [True, False])
@pytest.mark.parametrize("shard_num", [1, 2])
def test_collection_created_by_sdk_describe_by_restful(self, dim, enable_dynamic, shard_num):
@pytest.mark.parametrize("num_shards", [1, 2])
def test_collection_created_by_sdk_describe_by_restful(self, dim, enable_dynamic, num_shards):
"""
"""
# 1. create collection by sdk
@ -29,7 +29,7 @@ class TestRestfulSdkCompatibility(TestBase):
]
default_schema = CollectionSchema(fields=default_fields, description="test collection",
enable_dynamic_field=enable_dynamic)
collection = Collection(name=name, schema=default_schema, shards_num=shard_num)
collection = Collection(name=name, schema=default_schema, num_shards=num_shards)
logger.info(collection.schema)
# 2. use restful to get collection info
client = self.collection_client
@ -41,7 +41,7 @@ class TestRestfulSdkCompatibility(TestBase):
assert rsp['data']['collectionName'] == name
assert rsp['data']['enableDynamicField'] == enable_dynamic
assert rsp['data']['load'] == "LoadStateNotLoad"
assert rsp['data']['shardsNum'] == shard_num
assert rsp['data']['shardsNum'] == num_shards
@pytest.mark.parametrize("metric_type", ["L2", "IP", "COSINE"])
@pytest.mark.parametrize("dim", [128])
@ -131,6 +131,9 @@ class TestRestfulSdkCompatibility(TestBase):
FieldSchema(name="int64", dtype=DataType.INT64, is_primary=True),
FieldSchema(name="float", dtype=DataType.FLOAT),
FieldSchema(name="varchar", dtype=DataType.VARCHAR, max_length=65535),
FieldSchema(name="json", dtype=DataType.JSON),
FieldSchema(name="int_array", dtype=DataType.ARRAY, element_type=DataType.INT64, max_capacity=1024),
FieldSchema(name="varchar_array", dtype=DataType.ARRAY, element_type=DataType.VARCHAR, max_capacity=1024, max_length=65535),
FieldSchema(name="float_vector", dtype=DataType.FLOAT_VECTOR, dim=128)
]
default_schema = CollectionSchema(fields=default_fields, description="test collection",
@ -142,7 +145,13 @@ class TestRestfulSdkCompatibility(TestBase):
collection.load()
# insert data by restful
data = [
{"int64": i, "float": i, "varchar": str(i), "float_vector": [random.random() for _ in range(dim)], "age": i}
{"int64": i,
"float": i,
"varchar": str(i),
"json": {f"key_{i}": f"value_{i}"},
"int_array": [random.randint(0, 100) for _ in range(10)],
"varchar_array": [str(i) for _ in range(10)],
"float_vector": [random.random() for _ in range(dim)], "age": i}
for i in range(nb)
]
client = self.vector_client
@ -153,6 +162,7 @@ class TestRestfulSdkCompatibility(TestBase):
rsp = client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['data']['insertCount'] == nb
assert len(rsp['data']["insertIds"]) == nb
def test_collection_create_by_sdk_search_vector_by_restful(self):
"""
@ -181,7 +191,7 @@ class TestRestfulSdkCompatibility(TestBase):
client = self.vector_client
payload = {
"collectionName": name,
"vector": [random.random() for _ in range(dim)],
"data": [[random.random() for _ in range(dim)]],
"limit": 10
}
# search data by restful
@ -306,7 +316,7 @@ class TestRestfulSdkCompatibility(TestBase):
pk_id_list.append(item["int64"])
payload = {
"collectionName": name,
"id": pk_id_list
"filter": f"int64 in {pk_id_list}"
}
# delete data by restful
rsp = self.vector_client.vector_delete(payload)

View File

@ -1,6 +1,6 @@
from utils.utils import gen_unique_str
from base.testbase import TestBase
import pytest
class TestRoleE2E(TestBase):
@ -10,7 +10,7 @@ class TestRoleE2E(TestBase):
all_roles = rsp['data']
# delete all roles except default roles
for role in all_roles:
if role.startswith("role"):
if role.startswith("role") and role in self.role_client.role_names:
payload = {
"roleName": role
}
@ -26,6 +26,7 @@ class TestRoleE2E(TestBase):
self.role_client.role_revoke(payload)
self.role_client.role_drop(payload)
@pytest.mark.L1
def test_role_e2e(self):
# list role before create
@ -41,6 +42,7 @@ class TestRoleE2E(TestBase):
assert role_name in rsp['data']
# describe role
rsp = self.role_client.role_describe(role_name)
assert rsp['code'] == 200
# grant privilege to role
payload = {
"roleName": role_name,
@ -49,6 +51,7 @@ class TestRoleE2E(TestBase):
"privilege": "CreateCollection"
}
rsp = self.role_client.role_grant(payload)
assert rsp['code'] == 200
# describe role after grant
rsp = self.role_client.role_describe(role_name)
privileges = []

View File

@ -4,9 +4,32 @@ from base.testbase import TestBase
from pymilvus import (connections)
@pytest.mark.L0
class TestUserE2E(TestBase):
def teardown_method(self):
# because role num is limited, so we need to delete all roles after test
rsp = self.role_client.role_list()
all_roles = rsp['data']
# delete all roles except default roles
for role in all_roles:
if role.startswith("role") and role in self.role_client.role_names:
payload = {
"roleName": role
}
# revoke privilege from role
rsp = self.role_client.role_describe(role)
for d in rsp['data']:
payload = {
"roleName": role,
"objectType": d['objectType'],
"objectName": d['objectName'],
"privilege": d['privilege']
}
self.role_client.role_revoke(payload)
self.role_client.role_drop(payload)
@pytest.mark.L0
def test_user_e2e(self):
# list user before create
@ -43,6 +66,7 @@ class TestUserE2E(TestBase):
rsp = self.user_client.user_list()
assert user_name not in rsp['data']
@pytest.mark.L1
def test_user_binding_role(self):
# create user
user_name = gen_unique_str("user")
@ -100,7 +124,7 @@ class TestUserE2E(TestBase):
assert rsp['code'] == 200
@pytest.mark.L0
@pytest.mark.L1
class TestUserNegative(TestBase):
def test_create_user_with_short_password(self):

File diff suppressed because it is too large Load Diff

View File

@ -115,9 +115,11 @@ def get_random_json_data(uid=None):
def get_data_by_payload(payload, nb=100):
dim = payload.get("dimension", 128)
vector_field = payload.get("vectorField", "vector")
pk_field = payload.get("primaryField", "id")
data = []
if nb == 1:
data = [{
pk_field: int(time.time()*10000),
vector_field: preprocessing.normalize([np.array([random.random() for i in range(dim)])])[0].tolist(),
**get_random_json_data()
@ -125,6 +127,7 @@ def get_data_by_payload(payload, nb=100):
else:
for i in range(nb):
data.append({
pk_field: int(time.time()*10000),
vector_field: preprocessing.normalize([np.array([random.random() for i in range(dim)])])[0].tolist(),
**get_random_json_data(uid=i)
})

View File

@ -66,7 +66,7 @@ echo "prepare e2e test"
install_pytest_requirements
# Run restful test
# Run restful test v1
cd ${ROOT}/tests/restful_client
@ -79,6 +79,18 @@ else
--html=${CI_LOG_PATH}/report_restful.html --self-contained-html
fi
# Run restful test v2
cd ${ROOT}/tests/restful_client_v2
if [[ -n "${TEST_TIMEOUT:-}" ]]; then
timeout "${TEST_TIMEOUT}" pytest testcases --endpoint http://${MILVUS_SERVICE_NAME}:${MILVUS_SERVICE_PORT} -v -x -m L0 -n 6 --timeout 180\
--html=${CI_LOG_PATH}/report_restful.html --self-contained-html
else
pytest testcases --endpoint http://${MILVUS_SERVICE_NAME}:${MILVUS_SERVICE_PORT} -v -x -m L0 -n 6 --timeout 180\
--html=${CI_LOG_PATH}/report_restful.html --self-contained-html
fi
cd ${ROOT}/tests/python_client