mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-12-01 19:39:21 +08:00
Add e2e test for RESTful APIs, fix json encoding for on proto_oneof fields (#17362)
Signed-off-by: shaoyue.chen <shaoyue.chen@zilliz.com>
This commit is contained in:
parent
ed94ecf847
commit
32a3ed0791
@ -197,6 +197,13 @@ pipeline {
|
||||
} else {
|
||||
error "Error: Unsupported Milvus client: ${MILVUS_CLIENT}"
|
||||
}
|
||||
sh """
|
||||
MILVUS_HELM_RELEASE_NAME="${release_name}" \
|
||||
MILVUS_HELM_NAMESPACE="milvus-ci" \
|
||||
MILVUS_CLUSTER_ENABLED="${clusterEnabled}" \
|
||||
TEST_TIMEOUT="${e2e_timeout_seconds}" \
|
||||
./e2e-restful.sh
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,9 +5,7 @@ import (
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/milvus-io/milvus/internal/proto/commonpb"
|
||||
"github.com/milvus-io/milvus/internal/proto/milvuspb"
|
||||
"github.com/milvus-io/milvus/internal/proto/schemapb"
|
||||
"github.com/milvus-io/milvus/internal/types"
|
||||
)
|
||||
|
||||
@ -98,29 +96,7 @@ func (h *Handlers) handleDummy(c *gin.Context) (interface{}, error) {
|
||||
return h.proxy.Dummy(c, &req)
|
||||
}
|
||||
|
||||
type WrappedCreateCollectionRequest struct {
|
||||
// Not useful for now
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
// Not useful for now
|
||||
DbName string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3" json:"db_name,omitempty"`
|
||||
// The unique collection name in milvus.(Required)
|
||||
CollectionName string `protobuf:"bytes,3,opt,name=collection_name,json=collectionName,proto3" json:"collection_name,omitempty"`
|
||||
// The serialized `schema.CollectionSchema`(Required)
|
||||
Schema schemapb.CollectionSchema `protobuf:"bytes,4,opt,name=schema,proto3" json:"schema,omitempty"`
|
||||
// Once set, no modification is allowed (Optional)
|
||||
// https://github.com/milvus-io/milvus/issues/6690
|
||||
ShardsNum int32 `protobuf:"varint,5,opt,name=shards_num,json=shardsNum,proto3" json:"shards_num,omitempty"`
|
||||
// The consistency level that the collection used, modification is not supported now.
|
||||
ConsistencyLevel commonpb.ConsistencyLevel `protobuf:"varint,6,opt,name=consistency_level,json=consistencyLevel,proto3,enum=milvus.proto.common.ConsistencyLevel" json:"consistency_level,omitempty"`
|
||||
}
|
||||
|
||||
func (h *Handlers) handleCreateCollection(c *gin.Context) (interface{}, error) {
|
||||
// About why we uses WrappedCreateCollectionRequest:
|
||||
// Milvus uses `bytes` as the type of `schema` field,
|
||||
// while the bytes has to be serialized by proto.Marshal.
|
||||
// It's very inconvenient for an HTTP clien to do this,
|
||||
// so we change the type to a struct,
|
||||
// and does the conversion for user.
|
||||
wrappedReq := WrappedCreateCollectionRequest{}
|
||||
err := shouldBind(c, &wrappedReq)
|
||||
if err != nil {
|
||||
@ -340,11 +316,24 @@ func (h *Handlers) handleDropIndex(c *gin.Context) (interface{}, error) {
|
||||
}
|
||||
|
||||
func (h *Handlers) handleInsert(c *gin.Context) (interface{}, error) {
|
||||
req := milvuspb.InsertRequest{}
|
||||
err := shouldBind(c, &req)
|
||||
wrappedReq := WrappedInsertRequest{}
|
||||
err := shouldBind(c, &wrappedReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: parse body failed: %v", errBadRequest, err)
|
||||
}
|
||||
fieldData, err := convertFieldDataArray(wrappedReq.FieldsData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: convert field data failed: %v", errBadRequest, err)
|
||||
}
|
||||
req := milvuspb.InsertRequest{
|
||||
Base: wrappedReq.Base,
|
||||
DbName: wrappedReq.DbName,
|
||||
CollectionName: wrappedReq.CollectionName,
|
||||
PartitionName: wrappedReq.PartitionName,
|
||||
FieldsData: fieldData,
|
||||
HashKeys: wrappedReq.HashKeys,
|
||||
NumRows: wrappedReq.NumRows,
|
||||
}
|
||||
return h.proxy.Insert(c, &req)
|
||||
}
|
||||
|
||||
@ -358,11 +347,29 @@ func (h *Handlers) handleDelete(c *gin.Context) (interface{}, error) {
|
||||
}
|
||||
|
||||
func (h *Handlers) handleSearch(c *gin.Context) (interface{}, error) {
|
||||
req := milvuspb.SearchRequest{}
|
||||
err := shouldBind(c, &req)
|
||||
wrappedReq := SearchRequest{}
|
||||
err := shouldBind(c, &wrappedReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: parse body failed: %v", errBadRequest, err)
|
||||
}
|
||||
req := milvuspb.SearchRequest{
|
||||
Base: wrappedReq.Base,
|
||||
DbName: wrappedReq.DbName,
|
||||
CollectionName: wrappedReq.CollectionName,
|
||||
PartitionNames: wrappedReq.PartitionNames,
|
||||
Dsl: wrappedReq.Dsl,
|
||||
DslType: wrappedReq.DslType,
|
||||
OutputFields: wrappedReq.OutputFields,
|
||||
SearchParams: wrappedReq.SearchParams,
|
||||
TravelTimestamp: wrappedReq.TravelTimestamp,
|
||||
GuaranteeTimestamp: wrappedReq.GuaranteeTimestamp,
|
||||
Nq: wrappedReq.Nq,
|
||||
}
|
||||
if len(wrappedReq.BinaryVectors) > 0 {
|
||||
req.PlaceholderGroup = binaryVector2Bytes(wrappedReq.BinaryVectors)
|
||||
} else {
|
||||
req.PlaceholderGroup = vector2Bytes(wrappedReq.Vectors)
|
||||
}
|
||||
return h.proxy.Search(c, &req)
|
||||
}
|
||||
|
||||
@ -385,11 +392,18 @@ func (h *Handlers) handleFlush(c *gin.Context) (interface{}, error) {
|
||||
}
|
||||
|
||||
func (h *Handlers) handleCalcDistance(c *gin.Context) (interface{}, error) {
|
||||
req := milvuspb.CalcDistanceRequest{}
|
||||
err := shouldBind(c, &req)
|
||||
wrappedReq := WrappedCalcDistanceRequest{}
|
||||
err := shouldBind(c, &wrappedReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: parse body failed: %v", errBadRequest, err)
|
||||
}
|
||||
|
||||
req := milvuspb.CalcDistanceRequest{
|
||||
Base: wrappedReq.Base,
|
||||
Params: wrappedReq.Params,
|
||||
OpLeft: wrappedReq.OpLeft.AsPbVectorArray(),
|
||||
OpRight: wrappedReq.OpRight.AsPbVectorArray(),
|
||||
}
|
||||
return h.proxy.CalcDistance(c, &req)
|
||||
}
|
||||
|
||||
|
396
internal/distributed/proxy/httpserver/wrap_request.go
Normal file
396
internal/distributed/proxy/httpserver/wrap_request.go
Normal file
@ -0,0 +1,396 @@
|
||||
package httpserver
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/milvus-io/milvus/internal/proto/commonpb"
|
||||
"github.com/milvus-io/milvus/internal/proto/milvuspb"
|
||||
"github.com/milvus-io/milvus/internal/proto/schemapb"
|
||||
)
|
||||
|
||||
// We wrap original protobuf structure for 2 reasons:
|
||||
// 1. Milvus uses `bytes` as the type of `schema` field,
|
||||
// while the bytes has to be serialized by proto.Marshal.
|
||||
// It's very inconvenient for an HTTP clien to do this,
|
||||
// so we change the type to a struct,
|
||||
// and does the conversion for user.
|
||||
// 2. Some fields uses proto.oneof, does not supported directly json marshal
|
||||
// so we have to implements the marshal procedure. example: InsertReqeust
|
||||
|
||||
// WrappedCreateCollectionRequest wraps CreateCollectionRequest
|
||||
type WrappedCreateCollectionRequest struct {
|
||||
// Not useful for now
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
// Not useful for now
|
||||
DbName string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3" json:"db_name,omitempty"`
|
||||
// The unique collection name in milvus.(Required)
|
||||
CollectionName string `protobuf:"bytes,3,opt,name=collection_name,json=collectionName,proto3" json:"collection_name,omitempty"`
|
||||
// The serialized `schema.CollectionSchema`(Required)
|
||||
Schema schemapb.CollectionSchema `protobuf:"bytes,4,opt,name=schema,proto3" json:"schema,omitempty"`
|
||||
// Once set, no modification is allowed (Optional)
|
||||
// https://github.com/milvus-io/milvus/issues/6690
|
||||
ShardsNum int32 `protobuf:"varint,5,opt,name=shards_num,json=shardsNum,proto3" json:"shards_num,omitempty"`
|
||||
// The consistency level that the collection used, modification is not supported now.
|
||||
ConsistencyLevel commonpb.ConsistencyLevel `protobuf:"varint,6,opt,name=consistency_level,json=consistencyLevel,proto3,enum=milvus.proto.common.ConsistencyLevel" json:"consistency_level,omitempty"`
|
||||
}
|
||||
|
||||
// WrappedInsertRequest is the InsertRequest wrapped for RESTful request
|
||||
type WrappedInsertRequest struct {
|
||||
Base *commonpb.MsgBase `json:"base,omitempty"`
|
||||
DbName string `json:"db_name,omitempty"`
|
||||
CollectionName string `json:"collection_name,omitempty"`
|
||||
PartitionName string `json:"partition_name,omitempty"`
|
||||
FieldsData []*FieldData `json:"fields_data,omitempty"`
|
||||
HashKeys []uint32 `json:"hash_keys,omitempty"`
|
||||
NumRows uint32 `json:"num_rows,omitempty"`
|
||||
}
|
||||
|
||||
// FieldData is the field data in RESTful request that can be convertd to schemapb.FieldData
|
||||
type FieldData struct {
|
||||
Type schemapb.DataType `json:"type,omitempty"`
|
||||
FieldName string `json:"field_name,omitempty"`
|
||||
Field []interface{} `json:"field,omitempty"`
|
||||
FieldID int64 `json:"field_id,omitempty"`
|
||||
}
|
||||
|
||||
// AsSchemapb converts the FieldData to schemapb.FieldData
|
||||
func (f FieldData) AsSchemapb() (*schemapb.FieldData, error) {
|
||||
// is scarlar
|
||||
ret := schemapb.FieldData{
|
||||
Type: f.Type,
|
||||
FieldName: f.FieldName,
|
||||
FieldId: f.FieldID,
|
||||
}
|
||||
raw := f.Field
|
||||
switch f.Type {
|
||||
case schemapb.DataType_Bool:
|
||||
// its an array in definition, so we only need to check the type of first element
|
||||
if len(raw) > 0 {
|
||||
_, ok := raw[0].(bool)
|
||||
if !ok {
|
||||
return nil, newTypeError(raw[0])
|
||||
}
|
||||
}
|
||||
data := make([]bool, len(raw))
|
||||
for i, v := range raw {
|
||||
data[i] = v.(bool)
|
||||
}
|
||||
ret.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_BoolData{
|
||||
BoolData: &schemapb.BoolArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
case schemapb.DataType_String:
|
||||
if len(raw) > 0 {
|
||||
_, ok := raw[0].(string)
|
||||
if !ok {
|
||||
return nil, newTypeError(raw[0])
|
||||
}
|
||||
}
|
||||
data := make([]string, len(raw))
|
||||
for i, v := range raw {
|
||||
data[i] = v.(string)
|
||||
}
|
||||
ret.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_StringData{
|
||||
StringData: &schemapb.StringArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
case schemapb.DataType_Int8, schemapb.DataType_Int16, schemapb.DataType_Int32:
|
||||
if len(raw) > 0 {
|
||||
_, ok := raw[0].(float64)
|
||||
if !ok {
|
||||
return nil, newTypeError(raw[0])
|
||||
}
|
||||
}
|
||||
data := make([]int32, len(raw))
|
||||
for i, v := range raw {
|
||||
data[i] = int32(v.(float64))
|
||||
}
|
||||
ret.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_IntData{
|
||||
IntData: &schemapb.IntArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
case schemapb.DataType_Int64:
|
||||
if len(raw) > 0 {
|
||||
_, ok := raw[0].(float64)
|
||||
if !ok {
|
||||
return nil, newTypeError(raw[0])
|
||||
}
|
||||
}
|
||||
data := make([]int64, len(raw))
|
||||
for i, v := range raw {
|
||||
data[i] = int64(v.(float64))
|
||||
}
|
||||
ret.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_LongData{
|
||||
LongData: &schemapb.LongArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
case schemapb.DataType_Float:
|
||||
if len(raw) > 0 {
|
||||
_, ok := raw[0].(float64)
|
||||
if !ok {
|
||||
return nil, newTypeError(raw[0])
|
||||
}
|
||||
}
|
||||
data := make([]float32, len(raw))
|
||||
for i, v := range raw {
|
||||
data[i] = float32(v.(float64))
|
||||
}
|
||||
ret.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_FloatData{
|
||||
FloatData: &schemapb.FloatArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
case schemapb.DataType_Double:
|
||||
if len(raw) > 0 {
|
||||
_, ok := raw[0].(float64)
|
||||
if !ok {
|
||||
return nil, newTypeError(raw[0])
|
||||
}
|
||||
}
|
||||
data := make([]float64, len(raw))
|
||||
for i, v := range raw {
|
||||
data[i] = v.(float64)
|
||||
}
|
||||
ret.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_DoubleData{
|
||||
DoubleData: &schemapb.DoubleArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
case schemapb.DataType_FloatVector:
|
||||
if len(raw) < 1 {
|
||||
return nil, errors.New("at least one row for insert")
|
||||
}
|
||||
rawArray0, ok := raw[0].([]interface{})
|
||||
if !ok {
|
||||
return nil, newTypeError(raw[0])
|
||||
}
|
||||
dim := len(rawArray0)
|
||||
if dim < 1 {
|
||||
return nil, errors.New("dim must >= 1")
|
||||
}
|
||||
_, ok = rawArray0[0].(float64)
|
||||
if !ok {
|
||||
return nil, newTypeError(rawArray0[0])
|
||||
}
|
||||
|
||||
data := make([]float32, len(raw)*dim)
|
||||
|
||||
var i int
|
||||
for _, rawArray := range raw {
|
||||
for _, v := range rawArray.([]interface{}) {
|
||||
data[i] = float32(v.(float64))
|
||||
i++
|
||||
}
|
||||
}
|
||||
ret.Field = &schemapb.FieldData_Vectors{
|
||||
Vectors: &schemapb.VectorField{
|
||||
Dim: int64(dim),
|
||||
Data: &schemapb.VectorField_FloatVector{
|
||||
FloatVector: &schemapb.FloatArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("unsupported data type")
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func newTypeError(t interface{}) error {
|
||||
return fmt.Errorf("field type[%s] error", reflect.TypeOf(t).String())
|
||||
}
|
||||
|
||||
func convertFieldDataArray(input []*FieldData) ([]*schemapb.FieldData, error) {
|
||||
ret := make([]*schemapb.FieldData, len(input))
|
||||
for i, v := range input {
|
||||
fieldData, err := v.AsSchemapb()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret[i] = fieldData
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// SearchRequest is the RESTful request body for search
|
||||
type SearchRequest struct {
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
DbName string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3" json:"db_name,omitempty"`
|
||||
CollectionName string `protobuf:"bytes,3,opt,name=collection_name,json=collectionName,proto3" json:"collection_name,omitempty"`
|
||||
PartitionNames []string `protobuf:"bytes,4,rep,name=partition_names,json=partitionNames,proto3" json:"partition_names,omitempty"`
|
||||
Dsl string `protobuf:"bytes,5,opt,name=dsl,proto3" json:"dsl,omitempty"`
|
||||
DslType commonpb.DslType `protobuf:"varint,7,opt,name=dsl_type,json=dslType,proto3,enum=milvus.proto.common.DslType" json:"dsl_type,omitempty"`
|
||||
BinaryVectors [][]byte `json:"binary_vectors,omitempty"`
|
||||
Vectors [][]float32 `json:"vectors,omitempty"`
|
||||
OutputFields []string `protobuf:"bytes,8,rep,name=output_fields,json=outputFields,proto3" json:"output_fields,omitempty"`
|
||||
SearchParams []*commonpb.KeyValuePair `protobuf:"bytes,9,rep,name=search_params,json=searchParams,proto3" json:"search_params,omitempty"`
|
||||
TravelTimestamp uint64 `protobuf:"varint,10,opt,name=travel_timestamp,json=travelTimestamp,proto3" json:"travel_timestamp,omitempty"`
|
||||
GuaranteeTimestamp uint64 `protobuf:"varint,11,opt,name=guarantee_timestamp,json=guaranteeTimestamp,proto3" json:"guarantee_timestamp,omitempty"`
|
||||
Nq int64 `protobuf:"varint,12,opt,name=nq,proto3" json:"nq,omitempty"`
|
||||
}
|
||||
|
||||
func binaryVector2Bytes(vectors [][]byte) []byte {
|
||||
ph := &commonpb.PlaceholderValue{
|
||||
Tag: "$0",
|
||||
Type: commonpb.PlaceholderType_BinaryVector,
|
||||
Values: make([][]byte, 0, len(vectors)),
|
||||
}
|
||||
ph.Values = append(ph.Values, vectors...)
|
||||
phg := &commonpb.PlaceholderGroup{
|
||||
Placeholders: []*commonpb.PlaceholderValue{
|
||||
ph,
|
||||
},
|
||||
}
|
||||
ret, _ := proto.Marshal(phg)
|
||||
return ret
|
||||
}
|
||||
|
||||
func vector2Bytes(vectors [][]float32) []byte {
|
||||
ph := &commonpb.PlaceholderValue{
|
||||
Tag: "$0",
|
||||
Type: commonpb.PlaceholderType_FloatVector,
|
||||
Values: make([][]byte, 0, len(vectors)),
|
||||
}
|
||||
for _, vector := range vectors {
|
||||
ph.Values = append(ph.Values, serializeVectors(vector))
|
||||
}
|
||||
phg := &commonpb.PlaceholderGroup{
|
||||
Placeholders: []*commonpb.PlaceholderValue{
|
||||
ph,
|
||||
},
|
||||
}
|
||||
ret, _ := proto.Marshal(phg)
|
||||
return ret
|
||||
}
|
||||
|
||||
// Serialize serialize vector into byte slice, used in search placeholder
|
||||
// LittleEndian is used for convention
|
||||
func serializeVectors(fv []float32) []byte {
|
||||
data := make([]byte, 0, 4*len(fv)) // float32 occupies 4 bytes
|
||||
buf := make([]byte, 4)
|
||||
for _, f := range fv {
|
||||
binary.LittleEndian.PutUint32(buf, math.Float32bits(f))
|
||||
data = append(data, buf...)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
// WrappedCalcDistanceRequest is the RESTful request body for calc distance
|
||||
type WrappedCalcDistanceRequest struct {
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
|
||||
OpLeft VectorsArray `json:"op_left,omitempty"`
|
||||
OpRight VectorsArray `json:"op_right,omitempty"`
|
||||
|
||||
Params []*commonpb.KeyValuePair `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
// VectorsArray is vector array, assigned by vectors or ids
|
||||
type VectorsArray struct {
|
||||
// Dim of vectors or binary_vectors, not needed when use ids
|
||||
Dim int64 `json:"dim,omitempty"`
|
||||
// Vectors is an array of vector divided by given dim. Disabled when ids or binary_vectors is set
|
||||
Vectors []float32 `json:"vectors,omitempty"`
|
||||
// Vectors is an array of binary vector divided by given dim. Disabled when IDs is set
|
||||
BinaryVectors []byte `json:"binary_vectors,omitempty"`
|
||||
// IDs of vector field in milvus, if not nil, vectors will be ignored
|
||||
IDs *VectorIDs `json:"ids,omitempty"`
|
||||
}
|
||||
|
||||
func (v VectorsArray) isIDs() bool {
|
||||
return v.IDs != nil
|
||||
}
|
||||
|
||||
func (v VectorsArray) isBinaryVector() bool {
|
||||
return v.IDs == nil && len(v.BinaryVectors) > 0
|
||||
}
|
||||
|
||||
// AsPbVectorArray convert as milvuspb.VectorArray
|
||||
func (v VectorsArray) AsPbVectorArray() *milvuspb.VectorsArray {
|
||||
ret := &milvuspb.VectorsArray{}
|
||||
switch {
|
||||
case v.isIDs():
|
||||
ids := &milvuspb.VectorsArray_IdArray{}
|
||||
ids.IdArray = &milvuspb.VectorIDs{
|
||||
CollectionName: v.IDs.CollectionName,
|
||||
FieldName: v.IDs.FieldName,
|
||||
}
|
||||
ids.IdArray.PartitionNames = v.IDs.PartitionNames
|
||||
ids.IdArray.IdArray = &schemapb.IDs{}
|
||||
ids.IdArray.IdArray.IdField = &schemapb.IDs_IntId{
|
||||
IntId: &schemapb.LongArray{
|
||||
Data: v.IDs.IDArray,
|
||||
},
|
||||
}
|
||||
ret.Array = ids
|
||||
case v.isBinaryVector():
|
||||
vf := &schemapb.VectorField{
|
||||
Dim: v.Dim,
|
||||
}
|
||||
vf.Data = &schemapb.VectorField_BinaryVector{
|
||||
BinaryVector: v.BinaryVectors,
|
||||
}
|
||||
ret.Array = &milvuspb.VectorsArray_DataArray{
|
||||
DataArray: vf,
|
||||
}
|
||||
default:
|
||||
// take it as ordinary vectors
|
||||
vf := &schemapb.VectorField{
|
||||
Dim: v.Dim,
|
||||
}
|
||||
vf.Data = &schemapb.VectorField_FloatVector{
|
||||
FloatVector: &schemapb.FloatArray{
|
||||
Data: v.Vectors,
|
||||
},
|
||||
}
|
||||
ret.Array = &milvuspb.VectorsArray_DataArray{
|
||||
DataArray: vf,
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// VectorIDs is an array of id reference in milvus
|
||||
type VectorIDs struct {
|
||||
CollectionName string `protobuf:"bytes,1,opt,name=collection_name,json=collectionName,proto3" json:"collection_name,omitempty"`
|
||||
FieldName string `protobuf:"bytes,2,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"`
|
||||
PartitionNames []string `json:"partition_names"`
|
||||
IDArray []int64 `json:"id_array,omitempty"`
|
||||
}
|
290
internal/distributed/proxy/httpserver/wrap_request_test.go
Normal file
290
internal/distributed/proxy/httpserver/wrap_request_test.go
Normal file
@ -0,0 +1,290 @@
|
||||
package httpserver
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/proto/milvuspb"
|
||||
"github.com/milvus-io/milvus/internal/proto/schemapb"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFieldData_AsSchemapb(t *testing.T) {
|
||||
t.Run("string_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_String,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("string_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_String,
|
||||
Field: []interface{}{1, 2, 3},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("bool_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Bool,
|
||||
Field: []interface{}{true, true, false},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("bool_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Bool,
|
||||
Field: []interface{}{1, 2, 3},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("int8_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Int8,
|
||||
Field: []interface{}{1, 2, 3},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("int8_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Int8,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("int32_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Int32,
|
||||
Field: []interface{}{1, 2, 3},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("int32_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Int32,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("int64_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Int64,
|
||||
Field: []interface{}{1, 2, 3},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("int64_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Int64,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("float_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Float,
|
||||
Field: []interface{}{1.1, 2.1, 3.1},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("float_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Float,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("double_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Double,
|
||||
Field: []interface{}{1.1, 2.1, 3.1},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("double_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_Double,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("varchar_not_support", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_VarChar,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
// vectors
|
||||
|
||||
t.Run("floatvector_ok", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_FloatVector,
|
||||
Field: []interface{}{
|
||||
[]float32{1.1, 2.2, 3.1},
|
||||
[]float32{1.1, 2.2, 3.1},
|
||||
[]float32{1.1, 2.2, 3.1},
|
||||
},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("floatvector_empty_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_FloatVector,
|
||||
Field: []interface{}{},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("floatvector_dim=0_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_FloatVector,
|
||||
Field: []interface{}{
|
||||
[]float32{},
|
||||
},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("floatvector_vectorTypeError_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_FloatVector,
|
||||
Field: []interface{}{
|
||||
[]string{"1"},
|
||||
},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
t.Run("floatvector_error", func(t *testing.T) {
|
||||
fieldData := FieldData{
|
||||
Type: schemapb.DataType_FloatVector,
|
||||
Field: []interface{}{"a", "b", "c"},
|
||||
}
|
||||
raw, _ := json.Marshal(fieldData)
|
||||
json.Unmarshal(raw, &fieldData)
|
||||
_, err := fieldData.AsSchemapb()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_vector2Bytes(t *testing.T) {
|
||||
ret := vector2Bytes([][]float32{{1.1, 1.2}})
|
||||
assert.NotEmpty(t, ret)
|
||||
}
|
||||
|
||||
func Test_binaryVector2Bytes(t *testing.T) {
|
||||
ret := binaryVector2Bytes([][]byte{
|
||||
[]byte("somebytes"),
|
||||
})
|
||||
assert.NotEmpty(t, ret)
|
||||
}
|
||||
|
||||
func TestVectorsArray_AsPbVectorArray(t *testing.T) {
|
||||
dim := int64(1)
|
||||
t.Run("vector_ok", func(t *testing.T) {
|
||||
vector := []float32{1, 2}
|
||||
v := VectorsArray{
|
||||
Dim: dim,
|
||||
Vectors: vector,
|
||||
}
|
||||
ret := v.AsPbVectorArray()
|
||||
da, ok := ret.Array.(*milvuspb.VectorsArray_DataArray)
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, dim, da.DataArray.Dim)
|
||||
assert.Equal(t, vector, da.DataArray.GetFloatVector().Data)
|
||||
})
|
||||
t.Run("binary_vector_ok", func(t *testing.T) {
|
||||
bv := []byte("somebytes")
|
||||
v := VectorsArray{
|
||||
// IDs: ,
|
||||
Dim: dim,
|
||||
BinaryVectors: bv,
|
||||
}
|
||||
ret := v.AsPbVectorArray()
|
||||
da, ok := ret.Array.(*milvuspb.VectorsArray_DataArray)
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, dim, da.DataArray.Dim)
|
||||
assert.Equal(t, bv, da.DataArray.GetBinaryVector())
|
||||
})
|
||||
t.Run("ids_ok", func(t *testing.T) {
|
||||
ids := []int64{1, 2, 3}
|
||||
cn := "collection"
|
||||
paritions := []string{"p1", "p2"}
|
||||
field := "field"
|
||||
v := VectorsArray{
|
||||
IDs: &VectorIDs{
|
||||
CollectionName: cn,
|
||||
PartitionNames: paritions,
|
||||
FieldName: field,
|
||||
IDArray: ids,
|
||||
},
|
||||
}
|
||||
ret := v.AsPbVectorArray()
|
||||
ia, ok := ret.Array.(*milvuspb.VectorsArray_IdArray)
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, cn, ia.IdArray.CollectionName)
|
||||
assert.Equal(t, paritions, ia.IdArray.PartitionNames)
|
||||
assert.Equal(t, field, ia.IdArray.FieldName)
|
||||
ints, ok := ia.IdArray.IdArray.IdField.(*schemapb.IDs_IntId)
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, ids, ints.IntId.Data)
|
||||
})
|
||||
}
|
@ -475,7 +475,7 @@ func parsePrimaryFieldData2IDs(fieldData *schemapb.FieldData) (*schemapb.IDs, er
|
||||
return nil, errors.New("currently only support DataType Int64 or VarChar as PrimaryField")
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("currently only support vector field as PrimaryField")
|
||||
return nil, errors.New("currently not support vector field as PrimaryField")
|
||||
}
|
||||
|
||||
return primaryData, nil
|
||||
|
118
tests/scripts/e2e-restful.sh
Executable file
118
tests/scripts/e2e-restful.sh
Executable file
@ -0,0 +1,118 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit immediately for non zero status
|
||||
set -e
|
||||
|
||||
# Print commands
|
||||
set -x
|
||||
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
ROOT="$( cd -P "$( dirname "$SOURCE" )/../.." && pwd )"
|
||||
|
||||
DATA_PATH="${ROOT}/tests/scripts/restful-data/"
|
||||
|
||||
MILVUS_CLUSTER_ENABLED="${MILVUS_CLUSTER_ENABLED:-false}"
|
||||
|
||||
# TODO: use service instead of podIP when milvus-helm supports
|
||||
if [[ "${MILVUS_CLUSTER_ENABLED}" == "false" ]]; then
|
||||
MILVUS_SERVICE_NAME=$(kubectl -n ${MILVUS_HELM_NAMESPACE} get pods -l app.kubernetes.io/name=milvus -l component=standalone -l app.kubernetes.io/instance=${MILVUS_HELM_RELEASE_NAME} -o=jsonpath='{.items[0].status.podIP}')
|
||||
else
|
||||
MILVUS_SERVICE_NAME=$(kubectl -n ${MILVUS_HELM_NAMESPACE} get pods -l app.kubernetes.io/name=milvus -l component=proxy -l app.kubernetes.io/instance=${MILVUS_HELM_RELEASE_NAME} -o=jsonpath='{.items[0].status.podIP}')
|
||||
fi
|
||||
|
||||
# Create a collection
|
||||
curl -X 'POST' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/collection" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d @${DATA_PATH}/create-collection.json
|
||||
|
||||
# Has collection
|
||||
curl -X 'GET' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/collection/existence" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"collection_name": "book"
|
||||
}'
|
||||
|
||||
# Check collection details
|
||||
curl -X 'GET' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/collection" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"collection_name": "book"
|
||||
}'
|
||||
|
||||
# Load collection
|
||||
curl -X 'POST' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/collection/load" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"collection_name": "book"
|
||||
}'
|
||||
|
||||
### Data
|
||||
# Insert Data
|
||||
curl -X 'POST' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/entities" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d @${DATA_PATH}/insert-data.json
|
||||
|
||||
# Build Index
|
||||
curl -X 'POST' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/index" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"collection_name": "book",
|
||||
"field_name": "book_intro",
|
||||
"extra_params":[
|
||||
{"key": "metric_type", "value": "L2"},
|
||||
{"key": "index_type", "value": "IVF_FLAT"},
|
||||
{"key": "params", "value": "{\"nlist\":1024}"}
|
||||
]
|
||||
}'
|
||||
|
||||
# KNN Search
|
||||
curl -X 'POST' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/search" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d @${DATA_PATH}/search.json
|
||||
|
||||
# Drop Index
|
||||
curl -X 'DELETE' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/index" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"collection_name": "book",
|
||||
"field_name": "book_intro"
|
||||
}'
|
||||
|
||||
# Release collection
|
||||
curl -X 'DELETE' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/collection/load" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"collection_name": "book"
|
||||
}'
|
||||
|
||||
# Drop collection
|
||||
curl -X 'DELETE' \
|
||||
"http://${MILVUS_SERVICE_NAME}:8080/api/v1/collection" \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"collection_name": "book"
|
||||
}'
|
32
tests/scripts/restful-data/create-collection.json
Normal file
32
tests/scripts/restful-data/create-collection.json
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"collection_name": "book",
|
||||
"consistency_level": 1,
|
||||
"db_name": "string",
|
||||
"schema": {
|
||||
"autoID": false,
|
||||
"description": "Test book search",
|
||||
"fields": [
|
||||
{
|
||||
"name": "book_id",
|
||||
"description": "book id",
|
||||
"is_primary_key": true,
|
||||
"autoID": false,
|
||||
"data_type": 5
|
||||
},
|
||||
{
|
||||
"name": "book_intro",
|
||||
"description": "embedded vector of book introduction",
|
||||
"autoID": false,
|
||||
"data_type": 101,
|
||||
"is_primary_key": false,
|
||||
"type_params": [
|
||||
{
|
||||
"key": "dim",
|
||||
"value": "2"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"name": "book"
|
||||
}
|
||||
}
|
20
tests/scripts/restful-data/insert-data.json
Normal file
20
tests/scripts/restful-data/insert-data.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"collection_name": "book",
|
||||
"fields_data": [
|
||||
{
|
||||
"field_name": "book_id",
|
||||
"type": 5,
|
||||
"field": [
|
||||
1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,
|
||||
]
|
||||
},
|
||||
{
|
||||
"field_name": "book_intro",
|
||||
"type": 101,
|
||||
"field": [
|
||||
[1,1],[2,1],[3,1],[4,1],[5,1],[6,1],[7,1],[8,1],[9,1],[10,1],[11,1],[12,1],[13,1],[14,1],[15,1],[16,1],[17,1],[18,1],[19,1],[20,1],[21,1],[22,1],[23,1],[24,1],[25,1],[26,1],[27,1],[28,1],[29,1],[30,1],[31,1],[32,1],[33,1],[34,1],[35,1],[36,1],[37,1],[38,1],[39,1],[40,1],[41,1],[42,1],[43,1],[44,1],[45,1],[46,1],[47,1],[48,1],[49,1],[50,1],[51,1],[52,1],[53,1],[54,1],[55,1],[56,1],[57,1],[58,1],[59,1],[60,1],[61,1],[62,1],[63,1],[64,1],[65,1],[66,1],[67,1],[68,1],[69,1],[70,1],[71,1],[72,1],[73,1],[74,1],[75,1],[76,1],[77,1],[78,1],[79,1],[80,1],[81,1],[82,1],[83,1],[84,1],[85,1],[86,1],[87,1],[88,1],[89,1],[90,1],[91,1],[92,1],[93,1],[94,1],[95,1],[96,1],[97,1],[98,1],[99,1],[100,1],
|
||||
]
|
||||
}
|
||||
],
|
||||
"num_rows": 1000
|
||||
}
|
14
tests/scripts/restful-data/search.json
Normal file
14
tests/scripts/restful-data/search.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"collection_name": "book",
|
||||
"output_fields": ["book_id"],
|
||||
"search_params": [
|
||||
{"key": "anns_field", "value": "book_intro"},
|
||||
{"key": "topk", "value": "2"},
|
||||
{"key": "params", "value": "{\"nprobe\": 10}"},
|
||||
{"key": "metric_type", "value": "L2"},
|
||||
{"key": "round_decimal", "value": "-1"}
|
||||
],
|
||||
"vectors": [ [10,5] ],
|
||||
"dsl": "",
|
||||
"dsl_type": 1
|
||||
}
|
Loading…
Reference in New Issue
Block a user