mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-11-30 02:48:45 +08:00
test: add restful client test in ci (#30036)
add restful client test in ci --------- Signed-off-by: zhuwenxing <wenxing.zhu@zilliz.com>
This commit is contained in:
parent
c9860e3d2a
commit
cad8bf6c8d
@ -14,15 +14,14 @@ def logger_request_response(response, url, tt, headers, data, str_data, str_resp
|
||||
logger.debug(
|
||||
f"method: {method}, url: {url}, cost time: {tt}, header: {headers}, payload: {str_data}, response: {str_response}")
|
||||
else:
|
||||
logger.error(
|
||||
logger.debug(
|
||||
f"method: {method}, url: {url}, cost time: {tt}, header: {headers}, payload: {data}, response: {response.text}")
|
||||
else:
|
||||
logger.error(
|
||||
logger.debug(
|
||||
f"method: {method}, url: {url}, cost time: {tt}, header: {headers}, payload: {data}, response: {response.text}")
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
logger.error(
|
||||
f"method: {method}, url: {url}, cost time: {tt}, header: {headers}, payload: {data}, response: {response.text}")
|
||||
logger.debug(
|
||||
f"method: {method}, url: {url}, cost time: {tt}, header: {headers}, payload: {data}, response: {response.text}, error: {e}")
|
||||
|
||||
|
||||
class Requests:
|
||||
@ -92,11 +91,11 @@ class Requests:
|
||||
|
||||
|
||||
class VectorClient(Requests):
|
||||
def __init__(self, url, api_key, protocol):
|
||||
super().__init__(url, api_key)
|
||||
self.protocol = protocol
|
||||
self.url = url
|
||||
self.api_key = api_key
|
||||
def __init__(self, endpoint, token):
|
||||
super().__init__(url=endpoint, api_key=token)
|
||||
self.endpoint = endpoint
|
||||
self.token = token
|
||||
self.api_key = token
|
||||
self.db_name = None
|
||||
self.headers = self.update_headers()
|
||||
|
||||
@ -110,7 +109,7 @@ class VectorClient(Requests):
|
||||
|
||||
def vector_search(self, payload, db_name="default", timeout=10):
|
||||
time.sleep(1)
|
||||
url = f'{self.protocol}://{self.url}/vector/search'
|
||||
url = f'{self.endpoint}/vector/search'
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
@ -135,7 +134,7 @@ class VectorClient(Requests):
|
||||
|
||||
def vector_query(self, payload, db_name="default", timeout=10):
|
||||
time.sleep(1)
|
||||
url = f'{self.protocol}://{self.url}/vector/query'
|
||||
url = f'{self.endpoint}/vector/query'
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
@ -160,7 +159,7 @@ class VectorClient(Requests):
|
||||
|
||||
def vector_get(self, payload, db_name="default"):
|
||||
time.sleep(1)
|
||||
url = f'{self.protocol}://{self.url}/vector/get'
|
||||
url = f'{self.endpoint}/vector/get'
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
@ -169,7 +168,7 @@ class VectorClient(Requests):
|
||||
return response.json()
|
||||
|
||||
def vector_delete(self, payload, db_name="default"):
|
||||
url = f'{self.protocol}://{self.url}/vector/delete'
|
||||
url = f'{self.endpoint}/vector/delete'
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
@ -178,7 +177,7 @@ class VectorClient(Requests):
|
||||
return response.json()
|
||||
|
||||
def vector_insert(self, payload, db_name="default"):
|
||||
url = f'{self.protocol}://{self.url}/vector/insert'
|
||||
url = f'{self.endpoint}/vector/insert'
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
@ -189,11 +188,10 @@ class VectorClient(Requests):
|
||||
|
||||
class CollectionClient(Requests):
|
||||
|
||||
def __init__(self, url, api_key, protocol):
|
||||
super().__init__(url, api_key)
|
||||
self.protocol = protocol
|
||||
self.url = url
|
||||
self.api_key = api_key
|
||||
def __init__(self, endpoint, token):
|
||||
super().__init__(url=endpoint, api_key=token)
|
||||
self.endpoint = endpoint
|
||||
self.api_key = token
|
||||
self.db_name = None
|
||||
self.headers = self.update_headers()
|
||||
|
||||
@ -206,7 +204,7 @@ class CollectionClient(Requests):
|
||||
return headers
|
||||
|
||||
def collection_list(self, db_name="default"):
|
||||
url = f'{self.protocol}://{self.url}/vector/collections'
|
||||
url = f'{self.endpoint}/vector/collections'
|
||||
params = {}
|
||||
if self.db_name is not None:
|
||||
params = {
|
||||
@ -222,7 +220,7 @@ class CollectionClient(Requests):
|
||||
|
||||
def collection_create(self, payload, db_name="default"):
|
||||
time.sleep(1) # wait for collection created and in case of rate limit
|
||||
url = f'{self.protocol}://{self.url}/vector/collections/create'
|
||||
url = f'{self.endpoint}/vector/collections/create'
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
@ -231,7 +229,7 @@ class CollectionClient(Requests):
|
||||
return response.json()
|
||||
|
||||
def collection_describe(self, collection_name, db_name="default"):
|
||||
url = f'{self.protocol}://{self.url}/vector/collections/describe'
|
||||
url = f'{self.endpoint}/vector/collections/describe'
|
||||
params = {"collectionName": collection_name}
|
||||
if self.db_name is not None:
|
||||
params = {
|
||||
@ -248,7 +246,7 @@ class CollectionClient(Requests):
|
||||
|
||||
def collection_drop(self, payload, db_name="default"):
|
||||
time.sleep(1) # wait for collection drop and in case of rate limit
|
||||
url = f'{self.protocol}://{self.url}/vector/collections/drop'
|
||||
url = f'{self.endpoint}/vector/collections/drop'
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
|
@ -1,6 +1,5 @@
|
||||
import json
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
import time
|
||||
from pymilvus import connections, db
|
||||
@ -43,18 +42,16 @@ class TestBase(Base):
|
||||
logger.error(e)
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def init_client(self, protocol, host, port, username, password):
|
||||
self.protocol = protocol
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.url = f"{host}:{port}/v1"
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.api_key = f"{self.username}:{self.password}"
|
||||
def init_client(self, endpoint, token):
|
||||
self.url = f"{endpoint}/v1"
|
||||
self.api_key = f"{token}"
|
||||
self.invalid_api_key = "invalid_token"
|
||||
self.vector_client = VectorClient(self.url, self.api_key)
|
||||
self.collection_client = CollectionClient(self.url, self.api_key)
|
||||
connections.connect(host=self.host, port=self.port)
|
||||
if token is None:
|
||||
self.vector_client.api_key = None
|
||||
self.collection_client.api_key = None
|
||||
connections.connect(uri=endpoint, token=token)
|
||||
|
||||
def init_collection(self, collection_name, pk_field="id", metric_type="L2", dim=128, nb=100, batch_size=1000):
|
||||
# create collection
|
||||
@ -71,10 +68,7 @@ class TestBase(Base):
|
||||
self.wait_collection_load_completed(collection_name)
|
||||
batch_size = batch_size
|
||||
batch = nb // batch_size
|
||||
# in case of nb < batch_size
|
||||
if batch == 0:
|
||||
batch = 1
|
||||
batch_size = nb
|
||||
remainder = nb % batch_size
|
||||
data = []
|
||||
for i in range(batch):
|
||||
nb = batch_size
|
||||
@ -84,9 +78,20 @@ class TestBase(Base):
|
||||
"data": data
|
||||
}
|
||||
body_size = sys.getsizeof(json.dumps(payload))
|
||||
logger.info(f"body size: {body_size / 1024 / 1024} MB")
|
||||
logger.debug(f"body size: {body_size / 1024 / 1024} MB")
|
||||
rsp = self.vector_client.vector_insert(payload)
|
||||
assert rsp['code'] == 200
|
||||
# insert remainder data
|
||||
if remainder:
|
||||
nb = remainder
|
||||
data = get_data_by_payload(schema_payload, nb)
|
||||
payload = {
|
||||
"collectionName": collection_name,
|
||||
"data": data
|
||||
}
|
||||
rsp = self.vector_client.vector_insert(payload)
|
||||
assert rsp['code'] == 200
|
||||
|
||||
return schema_payload, data
|
||||
|
||||
def wait_collection_load_completed(self, name):
|
||||
@ -100,7 +105,6 @@ class TestBase(Base):
|
||||
time.sleep(5)
|
||||
|
||||
def create_database(self, db_name="default"):
|
||||
connections.connect(host=self.host, port=self.port)
|
||||
all_db = db.list_database()
|
||||
logger.info(f"all database: {all_db}")
|
||||
if db_name not in all_db:
|
||||
|
@ -3,34 +3,17 @@ import yaml
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--protocol", action="store", default="http", help="host")
|
||||
parser.addoption("--host", action="store", default="127.0.0.1", help="host")
|
||||
parser.addoption("--port", action="store", default="19530", help="port")
|
||||
parser.addoption("--username", action="store", default="root", help="email")
|
||||
parser.addoption("--password", action="store", default="Milvus", help="password")
|
||||
parser.addoption("--endpoint", action="store", default="http://127.0.0.1:19530", help="endpoint")
|
||||
parser.addoption("--token", action="store", default="root:Milvus", help="token")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def protocol(request):
|
||||
return request.config.getoption("--protocol")
|
||||
def endpoint(request):
|
||||
return request.config.getoption("--endpoint")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def host(request):
|
||||
return request.config.getoption("--host")
|
||||
def token(request):
|
||||
return request.config.getoption("--token")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def port(request):
|
||||
return request.config.getoption("--port")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def username(request):
|
||||
return request.config.getoption("--username")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def password(request):
|
||||
return request.config.getoption("--password")
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
[pytest]
|
||||
addopts = --strict --host 127.0.0.1 --port 19530 --username root --password Milvus --log-cli-level=INFO --capture=no
|
||||
addopts = --strict --endpoint http://127.0.0.1:19530 --token root:Milvus
|
||||
|
||||
log_format = [%(asctime)s - %(levelname)s - %(name)s]: %(message)s (%(filename)s:%(lineno)s)
|
||||
log_date_format = %Y-%m-%d %H:%M:%S
|
||||
|
@ -1,10 +1,11 @@
|
||||
--extra-index-url https://test.pypi.org/simple/
|
||||
requests==2.31.0
|
||||
urllib3==1.26.18
|
||||
loguru~=0.5.3
|
||||
pytest~=7.2.0
|
||||
pyyaml~=6.0
|
||||
numpy~=1.24.3
|
||||
allure-pytest>=2.8.18
|
||||
Faker==19.2.0
|
||||
pymilvus~=2.2.9
|
||||
pymilvus==2.4.0rc19
|
||||
scikit-learn~=1.1.3
|
||||
pytest-xdist==2.5.0
|
||||
|
@ -1,5 +1,5 @@
|
||||
import datetime
|
||||
import random
|
||||
import logging
|
||||
import time
|
||||
from utils.util_log import test_log as logger
|
||||
from utils.utils import gen_collection_name
|
||||
@ -39,6 +39,7 @@ class TestCreateCollection(TestBase):
|
||||
del payload["primaryField"]
|
||||
if vector_field is None:
|
||||
del payload["vectorField"]
|
||||
logging.info(f"create collection {name} with payload: {payload}")
|
||||
rsp = client.collection_create(payload)
|
||||
assert rsp['code'] == 200
|
||||
rsp = client.collection_list()
|
||||
@ -67,6 +68,7 @@ class TestCreateCollection(TestBase):
|
||||
rsp = client.collection_create(collection_payload)
|
||||
concurrent_rsp.append(rsp)
|
||||
logger.info(rsp)
|
||||
|
||||
name = gen_collection_name()
|
||||
dim = 128
|
||||
metric_type = "L2"
|
||||
@ -112,6 +114,7 @@ class TestCreateCollection(TestBase):
|
||||
rsp = client.collection_create(collection_payload)
|
||||
concurrent_rsp.append(rsp)
|
||||
logger.info(rsp)
|
||||
|
||||
name = gen_collection_name()
|
||||
dim = 128
|
||||
client = self.collection_client
|
||||
@ -141,6 +144,10 @@ class TestCreateCollection(TestBase):
|
||||
assert rsp['code'] == 200
|
||||
assert rsp['data']['collectionName'] == name
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestCreateCollectionNegative(TestBase):
|
||||
|
||||
def test_create_collections_with_invalid_api_key(self):
|
||||
"""
|
||||
target: test create collection with invalid api key(wrong username and password)
|
||||
@ -158,7 +165,8 @@ class TestCreateCollection(TestBase):
|
||||
rsp = client.collection_create(payload)
|
||||
assert rsp['code'] == 1800
|
||||
|
||||
@pytest.mark.parametrize("name", [" ", "test_collection_" * 100, "test collection", "test/collection", "test\collection"])
|
||||
@pytest.mark.parametrize("name",
|
||||
[" ", "test_collection_" * 100, "test collection", "test/collection", "test\collection"])
|
||||
def test_create_collections_with_invalid_collection_name(self, name):
|
||||
"""
|
||||
target: test create collection with invalid collection name
|
||||
@ -202,6 +210,9 @@ class TestListCollections(TestBase):
|
||||
for name in name_list:
|
||||
assert name in all_collections
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestListCollectionsNegative(TestBase):
|
||||
def test_list_collections_with_invalid_api_key(self):
|
||||
"""
|
||||
target: test list collection with an invalid api key
|
||||
@ -230,7 +241,6 @@ class TestListCollections(TestBase):
|
||||
@pytest.mark.L0
|
||||
class TestDescribeCollection(TestBase):
|
||||
|
||||
|
||||
def test_describe_collections_default(self):
|
||||
"""
|
||||
target: test describe collection with a simple schema
|
||||
@ -255,6 +265,9 @@ class TestDescribeCollection(TestBase):
|
||||
assert rsp['data']['collectionName'] == name
|
||||
assert f"FloatVector({dim})" in str(rsp['data']['fields'])
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestDescribeCollectionNegative(TestBase):
|
||||
def test_describe_collections_with_invalid_api_key(self):
|
||||
"""
|
||||
target: test describe collection with invalid api key
|
||||
@ -274,7 +287,7 @@ class TestDescribeCollection(TestBase):
|
||||
all_collections = rsp['data']
|
||||
assert name in all_collections
|
||||
# describe collection
|
||||
illegal_client = CollectionClient(self.url, "illegal_api_key", self.protocol)
|
||||
illegal_client = CollectionClient(self.url, "illegal_api_key")
|
||||
rsp = illegal_client.collection_describe(name)
|
||||
assert rsp['code'] == 1800
|
||||
|
||||
@ -304,7 +317,6 @@ class TestDescribeCollection(TestBase):
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestDropCollection(TestBase):
|
||||
|
||||
def test_drop_collections_default(self):
|
||||
"""
|
||||
Drop a collection with a simple schema
|
||||
@ -339,6 +351,9 @@ class TestDropCollection(TestBase):
|
||||
for name in clo_list:
|
||||
assert name not in all_collections
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestDropCollectionNegative(TestBase):
|
||||
def test_drop_collections_with_invalid_api_key(self):
|
||||
"""
|
||||
target: test drop collection with invalid api key
|
||||
@ -361,7 +376,7 @@ class TestDropCollection(TestBase):
|
||||
payload = {
|
||||
"collectionName": name,
|
||||
}
|
||||
illegal_client = CollectionClient(self.url, "invalid_api_key", self.protocol)
|
||||
illegal_client = CollectionClient(self.url, "invalid_api_key")
|
||||
rsp = illegal_client.collection_drop(payload)
|
||||
assert rsp['code'] == 1800
|
||||
rsp = client.collection_list()
|
||||
|
@ -10,6 +10,7 @@ from pymilvus import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestRestfulSdkCompatibility(TestBase):
|
||||
|
||||
@pytest.mark.parametrize("dim", [128, 256])
|
||||
|
@ -1,6 +1,4 @@
|
||||
import datetime
|
||||
import random
|
||||
import time
|
||||
from sklearn import preprocessing
|
||||
import numpy as np
|
||||
import sys
|
||||
@ -10,14 +8,13 @@ from utils import constant
|
||||
from utils.utils import gen_collection_name
|
||||
from utils.util_log import test_log as logger
|
||||
import pytest
|
||||
from api.milvus import VectorClient
|
||||
from base.testbase import TestBase
|
||||
from utils.utils import (get_data_by_fields, get_data_by_payload, get_common_fields_by_data)
|
||||
from utils.utils import (get_data_by_payload, get_common_fields_by_data)
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestInsertVector(TestBase):
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("insert_round", [2, 1])
|
||||
@pytest.mark.parametrize("nb", [100, 10, 1])
|
||||
@pytest.mark.parametrize("dim", [32, 128])
|
||||
@ -86,8 +83,10 @@ class TestInsertVector(TestBase):
|
||||
rsp = self.vector_client.vector_insert(payload)
|
||||
assert rsp['code'] == 200
|
||||
assert rsp['data']['insertCount'] == nb
|
||||
logger.info("finished")
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestInsertVectorNegative(TestBase):
|
||||
def test_insert_vector_with_invalid_api_key(self):
|
||||
"""
|
||||
Insert a vector with invalid api key
|
||||
@ -210,10 +209,11 @@ class TestInsertVector(TestBase):
|
||||
assert rsp['message'] == "fail to deal the insert data"
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestSearchVector(TestBase):
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("metric_type", ["IP", "L2"])
|
||||
@pytest.mark.xfail(reason="https://github.com/milvus-io/milvus/issues/30102")
|
||||
def test_search_vector_with_simple_payload(self, metric_type):
|
||||
"""
|
||||
Search a vector with a simple payload
|
||||
@ -243,8 +243,8 @@ class TestSearchVector(TestBase):
|
||||
if metric_type == "IP":
|
||||
assert distance == sorted(distance, reverse=True)
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("sum_limit_offset", [16384, 16385])
|
||||
@pytest.mark.xfail(reason="")
|
||||
def test_search_vector_with_exceed_sum_limit_offset(self, sum_limit_offset):
|
||||
"""
|
||||
Search a vector with a simple payload
|
||||
@ -264,11 +264,11 @@ class TestSearchVector(TestBase):
|
||||
"collectionName": name,
|
||||
"vector": vector_to_search,
|
||||
"limit": limit,
|
||||
"offset": sum_limit_offset-limit,
|
||||
"offset": sum_limit_offset - limit,
|
||||
}
|
||||
rsp = self.vector_client.vector_search(payload)
|
||||
if sum_limit_offset > max_search_sum_limit_offset:
|
||||
assert rsp['code'] == 1
|
||||
assert rsp['code'] == 65535
|
||||
return
|
||||
assert rsp['code'] == 200
|
||||
res = rsp['data']
|
||||
@ -283,7 +283,6 @@ class TestSearchVector(TestBase):
|
||||
if metric_type == "IP":
|
||||
assert distance == sorted(distance, reverse=True)
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("level", [0, 1, 2])
|
||||
@pytest.mark.parametrize("offset", [0, 10, 100])
|
||||
@pytest.mark.parametrize("limit", [1, 100])
|
||||
@ -322,7 +321,6 @@ class TestSearchVector(TestBase):
|
||||
for field in output_fields:
|
||||
assert field in item
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("filter_expr", ["uid >= 0", "uid >= 0 and uid < 100", "uid in [1,2,3]"])
|
||||
def test_search_vector_with_complex_int_filter(self, filter_expr):
|
||||
"""
|
||||
@ -355,7 +353,6 @@ class TestSearchVector(TestBase):
|
||||
uid = item.get("uid")
|
||||
eval(filter_expr)
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("filter_expr", ["name > \"placeholder\"", "name like \"placeholder%\""])
|
||||
def test_search_vector_with_complex_varchar_filter(self, filter_expr):
|
||||
"""
|
||||
@ -401,7 +398,6 @@ class TestSearchVector(TestBase):
|
||||
if "like" in filter_expr:
|
||||
assert name.startswith(prefix)
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("filter_expr", ["uid < 100 and name > \"placeholder\"",
|
||||
"uid < 100 and name like \"placeholder%\""
|
||||
])
|
||||
@ -453,6 +449,9 @@ class TestSearchVector(TestBase):
|
||||
if "like" in varchar_expr:
|
||||
assert name.startswith(prefix)
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestSearchVectorNegative(TestBase):
|
||||
@pytest.mark.parametrize("limit", [0, 16385])
|
||||
def test_search_vector_with_invalid_limit(self, limit):
|
||||
"""
|
||||
@ -541,9 +540,9 @@ class TestSearchVector(TestBase):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestQueryVector(TestBase):
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("expr", ["10+20 <= uid < 20+30", "uid in [1,2,3,4]",
|
||||
"uid > 0", "uid >= 0", "uid > 0",
|
||||
"uid > -100 and uid < 100"])
|
||||
@ -587,7 +586,6 @@ class TestQueryVector(TestBase):
|
||||
for field in output_fields:
|
||||
assert field in r
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("filter_expr", ["name > \"placeholder\"", "name like \"placeholder%\""])
|
||||
@pytest.mark.parametrize("include_output_fields", [True, False])
|
||||
def test_query_vector_with_varchar_filter(self, filter_expr, include_output_fields):
|
||||
@ -633,7 +631,7 @@ class TestQueryVector(TestBase):
|
||||
if "like" in filter_expr:
|
||||
assert name.startswith(prefix)
|
||||
|
||||
@pytest.mark.parametrize("sum_of_limit_offset", [16384, 16385])
|
||||
@pytest.mark.parametrize("sum_of_limit_offset", [16384])
|
||||
def test_query_vector_with_large_sum_of_limit_offset(self, sum_of_limit_offset):
|
||||
"""
|
||||
Query a vector with sum of limit and offset larger than max value
|
||||
@ -682,9 +680,9 @@ class TestQueryVector(TestBase):
|
||||
assert name.startswith(prefix)
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestGetVector(TestBase):
|
||||
|
||||
@pytest.mark.L0
|
||||
def test_get_vector_with_simple_payload(self):
|
||||
"""
|
||||
Search a vector with a simple payload
|
||||
@ -787,9 +785,9 @@ class TestGetVector(TestBase):
|
||||
assert field in r
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestDeleteVector(TestBase):
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.parametrize("include_invalid_id", [True, False])
|
||||
@pytest.mark.parametrize("id_field_type", ["list", "one"])
|
||||
def test_delete_vector_default(self, id_field_type, include_invalid_id):
|
||||
@ -850,6 +848,9 @@ class TestDeleteVector(TestBase):
|
||||
assert rsp['code'] == 200
|
||||
assert len(rsp['data']) == 0
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestDeleteVector(TestBase):
|
||||
def test_delete_vector_with_invalid_api_key(self):
|
||||
"""
|
||||
Delete a vector with an invalid api key
|
||||
|
@ -1,5 +1,4 @@
|
||||
import logging
|
||||
from loguru import logger as loguru_logger
|
||||
import sys
|
||||
|
||||
from config.log_config import log_config
|
||||
@ -44,7 +43,6 @@ class TestLog:
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
ch.setFormatter(formatter)
|
||||
# self.log.addHandler(ch)
|
||||
|
||||
except Exception as e:
|
||||
print("Can not use %s or %s or %s to log. error : %s" % (log_debug, log_file, log_err, str(e)))
|
||||
@ -55,6 +53,4 @@ log_debug = log_config.log_debug
|
||||
log_info = log_config.log_info
|
||||
log_err = log_config.log_err
|
||||
log_worker = log_config.log_worker
|
||||
self_defined_log = TestLog('ci_test', log_debug, log_info, log_err, log_worker).log
|
||||
loguru_log = loguru_logger
|
||||
test_log = self_defined_log
|
||||
test_log = TestLog('ci_test', log_debug, log_info, log_err, log_worker).log
|
||||
|
@ -39,6 +39,7 @@ MILVUS_HELM_NAMESPACE="${MILVUS_HELM_NAMESPACE:-default}"
|
||||
PARALLEL_NUM="${PARALLEL_NUM:-6}"
|
||||
# Use service name instead of IP to test
|
||||
MILVUS_SERVICE_NAME=$(echo "${MILVUS_HELM_RELEASE_NAME}-milvus.${MILVUS_HELM_NAMESPACE}" | tr -d '\n')
|
||||
# MILVUS_SERVICE_HOST=$(kubectl get svc ${MILVUS_SERVICE_NAME}-milvus -n ${MILVUS_HELM_NAMESPACE} -o jsonpath='{.spec.clusterIP}')
|
||||
MILVUS_SERVICE_PORT="19530"
|
||||
# Minio service name
|
||||
MINIO_SERVICE_NAME=$(echo "${MILVUS_HELM_RELEASE_NAME}-minio.${MILVUS_HELM_NAMESPACE}" | tr -d '\n')
|
||||
@ -65,6 +66,22 @@ echo "prepare e2e test"
|
||||
install_pytest_requirements
|
||||
|
||||
|
||||
# Run restful test
|
||||
|
||||
cd ${ROOT}/tests/restful_client
|
||||
|
||||
if [[ -n "${TEST_TIMEOUT:-}" ]]; then
|
||||
|
||||
timeout "${TEST_TIMEOUT}" pytest testcases --endpoint http://${MILVUS_SERVICE_NAME}:${MILVUS_SERVICE_PORT} -m L0 -n 6 \
|
||||
--html=${CI_LOG_PATH}/report_restful.html --self-contained-html
|
||||
else
|
||||
pytest testcases --endpoint http://${MILVUS_SERVICE_NAME}:${MILVUS_SERVICE_PORT} -m L0 -n 6 \
|
||||
--html=${CI_LOG_PATH}/report_restful.html --self-contained-html
|
||||
fi
|
||||
|
||||
cd ${ROOT}/tests/python_client
|
||||
|
||||
|
||||
# Pytest is not able to have both --timeout & --workers, so do not add --timeout or --workers in the shell script
|
||||
if [[ -n "${TEST_TIMEOUT:-}" ]]; then
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user