fix: some error in restful (#37487)

#37370

Signed-off-by: lixinguo <xinguo.li@zilliz.com>
Co-authored-by: lixinguo <xinguo.li@zilliz.com>
This commit is contained in:
smellthemoon 2024-11-13 17:12:39 +08:00 committed by GitHub
parent cf883b114e
commit a654487995
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 356 additions and 143 deletions

View File

@ -104,6 +104,8 @@ const (
HTTPReturnFieldPrimaryKey = "primaryKey"
HTTPReturnFieldPartitionKey = "partitionKey"
HTTPReturnFieldClusteringKey = "clusteringKey"
HTTPReturnFieldNullable = "nullable"
HTTPReturnFieldDefaultValue = "defaultValue"
HTTPReturnFieldAutoID = "autoId"
HTTPReturnFieldElementType = "elementType"
HTTPReturnDescription = "description"

View File

@ -728,7 +728,7 @@ func (h *HandlersV1) insert(c *gin.Context) {
return nil, RestRequestInterceptorErr
}
insertReq := req.(*milvuspb.InsertRequest)
insertReq.FieldsData, err = anyToColumns(httpReq.Data, nil, collSchema)
insertReq.FieldsData, err = anyToColumns(httpReq.Data, nil, collSchema, true)
if err != nil {
log.Warn("high level restful api, fail to deal with insert data", zap.Any("data", httpReq.Data), zap.Error(err))
HTTPAbortReturn(c, http.StatusOK, gin.H{
@ -827,7 +827,7 @@ func (h *HandlersV1) upsert(c *gin.Context) {
return nil, RestRequestInterceptorErr
}
upsertReq := req.(*milvuspb.UpsertRequest)
upsertReq.FieldsData, err = anyToColumns(httpReq.Data, nil, collSchema)
upsertReq.FieldsData, err = anyToColumns(httpReq.Data, nil, collSchema, false)
if err != nil {
log.Warn("high level restful api, fail to deal with upsert data", zap.Any("data", httpReq.Data), zap.Error(err))
HTTPAbortReturn(c, http.StatusOK, gin.H{

View File

@ -57,7 +57,7 @@ var DefaultShowCollectionsResp = milvuspb.ShowCollectionsResponse{
var DefaultDescCollectionResp = milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64, false),
Schema: generateCollectionSchema(schemapb.DataType_Int64, false, true),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}
@ -277,7 +277,7 @@ func TestVectorCollectionsDescribe(t *testing.T) {
name: "get load status fail",
mp: mp2,
exceptCode: http.StatusOK,
expectedBody: "{\"code\":200,\"data\":{\"collectionName\":\"" + DefaultCollectionName + "\",\"description\":\"\",\"enableDynamicField\":true,\"fields\":[{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_id\",\"partitionKey\":false,\"primaryKey\":true,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"word_count\",\"partitionKey\":false,\"primaryKey\":false,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_intro\",\"partitionKey\":false,\"primaryKey\":false,\"type\":\"FloatVector(2)\"}],\"indexes\":[{\"fieldName\":\"book_intro\",\"indexName\":\"" + DefaultIndexName + "\",\"metricType\":\"COSINE\"}],\"load\":\"\",\"shardsNum\":1}}",
expectedBody: "{\"code\":200,\"data\":{\"collectionName\":\"" + DefaultCollectionName + "\",\"description\":\"\",\"enableDynamicField\":true,\"fields\":[{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_id\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":true,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"word_count\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":false,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_intro\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":false,\"type\":\"FloatVector(2)\"}],\"indexes\":[{\"fieldName\":\"book_intro\",\"indexName\":\"" + DefaultIndexName + "\",\"metricType\":\"COSINE\"}],\"load\":\"\",\"shardsNum\":1}}",
})
mp3 := mocks.NewMockProxy(t)
@ -288,7 +288,7 @@ func TestVectorCollectionsDescribe(t *testing.T) {
name: "get indexes fail",
mp: mp3,
exceptCode: http.StatusOK,
expectedBody: "{\"code\":200,\"data\":{\"collectionName\":\"" + DefaultCollectionName + "\",\"description\":\"\",\"enableDynamicField\":true,\"fields\":[{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_id\",\"partitionKey\":false,\"primaryKey\":true,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"word_count\",\"partitionKey\":false,\"primaryKey\":false,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_intro\",\"partitionKey\":false,\"primaryKey\":false,\"type\":\"FloatVector(2)\"}],\"indexes\":[],\"load\":\"LoadStateLoaded\",\"shardsNum\":1}}",
expectedBody: "{\"code\":200,\"data\":{\"collectionName\":\"" + DefaultCollectionName + "\",\"description\":\"\",\"enableDynamicField\":true,\"fields\":[{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_id\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":true,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"word_count\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":false,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_intro\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":false,\"type\":\"FloatVector(2)\"}],\"indexes\":[],\"load\":\"LoadStateLoaded\",\"shardsNum\":1}}",
})
mp4 := mocks.NewMockProxy(t)
@ -299,7 +299,7 @@ func TestVectorCollectionsDescribe(t *testing.T) {
name: "show collection details success",
mp: mp4,
exceptCode: http.StatusOK,
expectedBody: "{\"code\":200,\"data\":{\"collectionName\":\"" + DefaultCollectionName + "\",\"description\":\"\",\"enableDynamicField\":true,\"fields\":[{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_id\",\"partitionKey\":false,\"primaryKey\":true,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"word_count\",\"partitionKey\":false,\"primaryKey\":false,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_intro\",\"partitionKey\":false,\"primaryKey\":false,\"type\":\"FloatVector(2)\"}],\"indexes\":[{\"fieldName\":\"book_intro\",\"indexName\":\"" + DefaultIndexName + "\",\"metricType\":\"COSINE\"}],\"load\":\"LoadStateLoaded\",\"shardsNum\":1}}",
expectedBody: "{\"code\":200,\"data\":{\"collectionName\":\"" + DefaultCollectionName + "\",\"description\":\"\",\"enableDynamicField\":true,\"fields\":[{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_id\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":true,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"word_count\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":false,\"type\":\"Int64\"},{\"autoId\":false,\"clusteringKey\":false,\"description\":\"\",\"name\":\"book_intro\",\"nullable\":false,\"partitionKey\":false,\"primaryKey\":false,\"type\":\"FloatVector(2)\"}],\"indexes\":[{\"fieldName\":\"book_intro\",\"indexName\":\"" + DefaultIndexName + "\",\"metricType\":\"COSINE\"}],\"load\":\"LoadStateLoaded\",\"shardsNum\":1}}",
})
for _, tt := range testCases {
@ -767,9 +767,9 @@ func TestInsertForDataType(t *testing.T) {
paramtable.Init()
paramtable.Get().Save(proxy.Params.HTTPCfg.AcceptTypeAllowInt64.Key, "true")
schemas := map[string]*schemapb.CollectionSchema{
"[success]kinds of data type": newCollectionSchema(generateCollectionSchema(schemapb.DataType_Int64, false)),
"[success]with dynamic field": withDynamicField(newCollectionSchema(generateCollectionSchema(schemapb.DataType_Int64, false))),
"[success]with array fields": withArrayField(newCollectionSchema(generateCollectionSchema(schemapb.DataType_Int64, false))),
"[success]kinds of data type": newCollectionSchema(generateCollectionSchema(schemapb.DataType_Int64, false, true)),
"[success]with dynamic field": withDynamicField(newCollectionSchema(generateCollectionSchema(schemapb.DataType_Int64, false, true))),
"[success]with array fields": withArrayField(newCollectionSchema(generateCollectionSchema(schemapb.DataType_Int64, false, true))),
}
for name, schema := range schemas {
t.Run(name, func(t *testing.T) {
@ -840,7 +840,7 @@ func TestReturnInt64(t *testing.T) {
}
for _, dataType := range schemas {
t.Run("[insert]httpCfg.allow: false", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
@ -871,7 +871,7 @@ func TestReturnInt64(t *testing.T) {
for _, dataType := range schemas {
t.Run("[upsert]httpCfg.allow: false", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
@ -902,7 +902,7 @@ func TestReturnInt64(t *testing.T) {
for _, dataType := range schemas {
t.Run("[insert]httpCfg.allow: false, Accept-Type-Allow-Int64: true", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
@ -934,7 +934,7 @@ func TestReturnInt64(t *testing.T) {
for _, dataType := range schemas {
t.Run("[upsert]httpCfg.allow: false, Accept-Type-Allow-Int64: true", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
@ -967,7 +967,7 @@ func TestReturnInt64(t *testing.T) {
paramtable.Get().Save(proxy.Params.HTTPCfg.AcceptTypeAllowInt64.Key, "true")
for _, dataType := range schemas {
t.Run("[insert]httpCfg.allow: true", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
@ -998,7 +998,7 @@ func TestReturnInt64(t *testing.T) {
for _, dataType := range schemas {
t.Run("[upsert]httpCfg.allow: true", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
@ -1029,7 +1029,7 @@ func TestReturnInt64(t *testing.T) {
for _, dataType := range schemas {
t.Run("[insert]httpCfg.allow: true, Accept-Type-Allow-Int64: false", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
@ -1061,7 +1061,7 @@ func TestReturnInt64(t *testing.T) {
for _, dataType := range schemas {
t.Run("[upsert]httpCfg.allow: true, Accept-Type-Allow-Int64: false", func(t *testing.T) {
schema := newCollectionSchema(generateCollectionSchema(dataType, false))
schema := newCollectionSchema(generateCollectionSchema(dataType, false, true))
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,

View File

@ -747,9 +747,11 @@ func (h *HandlersV2) delete(ctx context.Context, c *gin.Context, anyReq any, dbN
return h.proxy.Delete(reqCtx, req.(*milvuspb.DeleteRequest))
})
if err == nil {
HTTPReturn(c, http.StatusOK, wrapperReturnDefaultWithCost(
proxy.GetCostValue(resp.(*milvuspb.MutationResult).GetStatus()),
))
deleteResp := resp.(*milvuspb.MutationResult)
HTTPReturn(c, http.StatusOK, gin.H{
HTTPReturnCode: merr.Code(nil),
HTTPReturnData: gin.H{"deleteCount": deleteResp.DeleteCnt},
})
}
return resp, err
}
@ -781,7 +783,7 @@ func (h *HandlersV2) insert(ctx context.Context, c *gin.Context, anyReq any, dbN
}
req.NumRows = uint32(len(httpReq.Data))
req.FieldsData, err = anyToColumns(httpReq.Data, validDataMap, collSchema)
req.FieldsData, err = anyToColumns(httpReq.Data, validDataMap, collSchema, true)
if err != nil {
log.Ctx(ctx).Warn("high level restful api, fail to deal with insert data", zap.Any("data", httpReq.Data), zap.Error(err))
HTTPAbortReturn(c, http.StatusOK, gin.H{
@ -855,7 +857,7 @@ func (h *HandlersV2) upsert(ctx context.Context, c *gin.Context, anyReq any, dbN
}
req.NumRows = uint32(len(httpReq.Data))
req.FieldsData, err = anyToColumns(httpReq.Data, validDataMap, collSchema)
req.FieldsData, err = anyToColumns(httpReq.Data, validDataMap, collSchema, false)
if err != nil {
log.Ctx(ctx).Warn("high level restful api, fail to deal with upsert data", zap.Any("data", httpReq.Data), zap.Error(err))
HTTPAbortReturn(c, http.StatusOK, gin.H{

View File

@ -1129,7 +1129,7 @@ func TestMethodGet(t *testing.T) {
mp.EXPECT().HasCollection(mock.Anything, mock.Anything).Return(&milvuspb.BoolResponse{Status: commonErrorStatus}, nil).Once()
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64, false),
Schema: generateCollectionSchema(schemapb.DataType_Int64, false, true),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}, nil).Twice()
@ -1585,7 +1585,7 @@ func TestDML(t *testing.T) {
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64, false),
Schema: generateCollectionSchema(schemapb.DataType_Int64, false, true),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}, nil).Times(6)
@ -1607,7 +1607,7 @@ func TestDML(t *testing.T) {
mp.EXPECT().Delete(mock.Anything, mock.Anything).Return(&milvuspb.MutationResult{Status: commonSuccessStatus}, nil).Once()
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64, true),
Schema: generateCollectionSchema(schemapb.DataType_Int64, true, true),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}, nil).Once()
@ -1729,7 +1729,7 @@ func TestAllowInt64(t *testing.T) {
})
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64, false),
Schema: generateCollectionSchema(schemapb.DataType_Int64, false, true),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}, nil).Twice()
@ -1765,7 +1765,7 @@ func TestSearchV2(t *testing.T) {
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64, false),
Schema: generateCollectionSchema(schemapb.DataType_Int64, false, true),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}, nil).Times(11)
@ -1789,7 +1789,7 @@ func TestSearchV2(t *testing.T) {
Scores: DefaultScores,
}}, nil).Once()
mp.EXPECT().HybridSearch(mock.Anything, mock.Anything).Return(&milvuspb.SearchResults{Status: commonSuccessStatus, Results: &schemapb.SearchResultData{TopK: int64(0)}}, nil).Times(3)
collSchema := generateCollectionSchema(schemapb.DataType_Int64, false)
collSchema := generateCollectionSchema(schemapb.DataType_Int64, false, true)
binaryVectorField := generateVectorFieldSchema(schemapb.DataType_BinaryVector)
binaryVectorField.Name = "binaryVector"
float16VectorField := generateVectorFieldSchema(schemapb.DataType_Float16Vector)

View File

@ -152,6 +152,9 @@ func checkGetPrimaryKey(coll *schemapb.CollectionSchema, idResult gjson.Result)
func printFields(fields []*schemapb.FieldSchema) []gin.H {
var res []gin.H
for _, field := range fields {
if field.Name == common.MetaFieldName {
continue
}
fieldDetail := printFieldDetail(field, true)
res = append(res, fieldDetail)
}
@ -161,6 +164,9 @@ func printFields(fields []*schemapb.FieldSchema) []gin.H {
func printFieldsV2(fields []*schemapb.FieldSchema) []gin.H {
var res []gin.H
for _, field := range fields {
if field.Name == common.MetaFieldName {
continue
}
fieldDetail := printFieldDetail(field, false)
res = append(res, fieldDetail)
}
@ -175,6 +181,10 @@ func printFieldDetail(field *schemapb.FieldSchema, oldVersion bool) gin.H {
HTTPReturnFieldClusteringKey: field.IsClusteringKey,
HTTPReturnFieldAutoID: field.AutoID,
HTTPReturnDescription: field.Description,
HTTPReturnFieldNullable: field.Nullable,
}
if field.DefaultValue != nil {
fieldDetail[HTTPRequestDefaultValue] = field.DefaultValue
}
if field.GetIsFunctionOutput() {
fieldDetail[HTTPReturnFieldIsFunctionOutput] = true
@ -258,6 +268,9 @@ func checkAndSetData(body string, collSchema *schemapb.CollectionSchema) (error,
var fieldNames []string
for _, field := range collSchema.Fields {
if field.IsDynamic {
continue
}
fieldNames = append(fieldNames, field.Name)
}
@ -265,6 +278,9 @@ func checkAndSetData(body string, collSchema *schemapb.CollectionSchema) (error,
reallyData := map[string]interface{}{}
if data.Type == gjson.JSON {
for _, field := range collSchema.Fields {
if field.IsDynamic {
continue
}
fieldType := field.DataType
fieldName := field.Name
@ -279,8 +295,8 @@ func checkAndSetData(body string, collSchema *schemapb.CollectionSchema) (error,
}
dataString := gjson.Get(data.Raw, fieldName).String()
if field.IsPrimaryKey && field.AutoID {
// if has pass pk than just to try to set it
if field.IsPrimaryKey && field.AutoID && len(dataString) == 0 {
continue
}
@ -514,9 +530,13 @@ func checkAndSetData(body string, collSchema *schemapb.CollectionSchema) (error,
}
// fill dynamic schema
if collSchema.EnableDynamicField {
for mapKey, mapValue := range data.Map() {
if !containsString(fieldNames, mapKey) {
for mapKey, mapValue := range data.Map() {
if !containsString(fieldNames, mapKey) {
if collSchema.EnableDynamicField {
if mapKey == common.MetaFieldName {
return merr.WrapErrParameterInvalidMsg(fmt.Sprintf("use the invalid field name(%s) when enable dynamicField", mapKey)), nil, nil
}
mapValueStr := mapValue.String()
switch mapValue.Type {
case gjson.True, gjson.False:
@ -536,6 +556,8 @@ func checkAndSetData(body string, collSchema *schemapb.CollectionSchema) (error,
default:
log.Warn("unknown json type found", zap.Int("mapValue.Type", int(mapValue.Type)))
}
} else {
return merr.WrapErrParameterInvalidMsg("has pass more field without dynamic schema, please check it"), nil, nil
}
}
}
@ -649,7 +671,7 @@ func convertToIntArray(dataType schemapb.DataType, arr interface{}) []int32 {
return res
}
func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool, sch *schemapb.CollectionSchema) ([]*schemapb.FieldData, error) {
func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool, sch *schemapb.CollectionSchema, inInsert bool) ([]*schemapb.FieldData, error) {
rowsLen := len(rows)
if rowsLen == 0 {
return []*schemapb.FieldData{}, fmt.Errorf("no row need to be convert to columns")
@ -662,8 +684,7 @@ func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool,
fieldData := make(map[string]*schemapb.FieldData)
for _, field := range sch.Fields {
// skip auto id pk field
if (field.IsPrimaryKey && field.AutoID) || field.IsDynamic {
if (field.IsPrimaryKey && field.AutoID && inInsert) || field.IsDynamic {
continue
}
// skip function output field
@ -738,13 +759,16 @@ func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool,
return nil, err
}
for idx, field := range sch.Fields {
// skip auto id pk field
if (field.IsPrimaryKey && field.AutoID) || field.IsDynamic {
// remove pk field from candidates set, avoid adding it into dynamic column
delete(set, field.Name)
if field.IsDynamic {
continue
}
candi, ok := set[field.Name]
if field.IsPrimaryKey && field.AutoID && inInsert {
if ok {
return nil, merr.WrapErrParameterInvalidMsg(fmt.Sprintf("no need to pass pk field(%s) when autoid==true in insert", field.Name))
}
continue
}
if (field.Nullable || field.DefaultValue != nil) && !ok {
continue
}
@ -801,7 +825,7 @@ func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool,
delete(set, field.Name)
}
// if is not dynamic, but pass more field, will throw err in /internal/distributed/proxy/httpserver/utils.go@checkAndSetData
if isDynamic {
m := make(map[string]interface{})
for name, candi := range set {
@ -1304,7 +1328,7 @@ func buildQueryResp(rowsNum int64, needFields []string, fieldDataList []*schemap
}
data, ok := fieldDataList[j].GetScalars().Data.(*schemapb.ScalarField_JsonData)
if ok && !fieldDataList[j].IsDynamic {
row[fieldDataList[j].FieldName] = data.JsonData.Data[i]
row[fieldDataList[j].FieldName] = string(data.JsonData.Data[i])
} else {
var dataMap map[string]interface{}

View File

@ -93,25 +93,37 @@ func generateVectorFieldSchema(dataType schemapb.DataType) *schemapb.FieldSchema
}
}
func generateCollectionSchema(primaryDataType schemapb.DataType, autoID bool) *schemapb.CollectionSchema {
func generateCollectionSchema(primaryDataType schemapb.DataType, autoID bool, isDynamic bool) *schemapb.CollectionSchema {
primaryField := generatePrimaryField(primaryDataType, autoID)
vectorField := generateVectorFieldSchema(schemapb.DataType_FloatVector)
vectorField.Name = FieldBookIntro
fields := []*schemapb.FieldSchema{
primaryField, {
FieldID: common.StartOfUserFieldID + 1,
Name: FieldWordCount,
IsPrimaryKey: false,
Description: "",
DataType: 5,
AutoID: false,
}, vectorField,
}
if isDynamic {
fields = append(fields, &schemapb.FieldSchema{
FieldID: common.StartOfUserFieldID + 2,
Name: "$meta",
IsPrimaryKey: false,
Description: "",
DataType: 23,
AutoID: false,
IsDynamic: true,
})
}
return &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Description: "",
AutoID: false,
Fields: []*schemapb.FieldSchema{
primaryField, {
FieldID: common.StartOfUserFieldID + 1,
Name: FieldWordCount,
IsPrimaryKey: false,
Description: "",
DataType: 5,
AutoID: false,
}, vectorField,
},
EnableDynamicField: true,
Name: DefaultCollectionName,
Description: "",
AutoID: autoID,
Fields: fields,
EnableDynamicField: isDynamic,
}
}
@ -382,7 +394,7 @@ func generateQueryResult64(withDistance bool) []map[string]interface{} {
}
func TestPrintCollectionDetails(t *testing.T) {
coll := generateCollectionSchema(schemapb.DataType_Int64, false)
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
indexes := generateIndexes()
assert.Equal(t, []gin.H{
{
@ -391,6 +403,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldPrimaryKey: true,
HTTPReturnFieldNullable: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
},
@ -399,6 +412,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldType: "Int64",
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldNullable: false,
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
@ -409,6 +423,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldNullable: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
},
@ -420,6 +435,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldPrimaryKey: true,
HTTPReturnFieldNullable: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
HTTPReturnFieldID: int64(100),
@ -430,6 +446,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldNullable: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
HTTPReturnFieldID: int64(101),
@ -439,6 +456,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldType: "FloatVector",
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldNullable: false,
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
@ -472,6 +490,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldNullable: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
},
@ -480,6 +499,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldType: "Array",
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldNullable: false,
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
@ -493,6 +513,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldAutoID: false,
HTTPReturnFieldNullable: false,
HTTPReturnDescription: "",
HTTPReturnFieldID: int64(0),
Params: []*commonpb.KeyValuePair{
@ -504,6 +525,7 @@ func TestPrintCollectionDetails(t *testing.T) {
HTTPReturnFieldType: "Array",
HTTPReturnFieldPartitionKey: false,
HTTPReturnFieldClusteringKey: false,
HTTPReturnFieldNullable: false,
HTTPReturnFieldPrimaryKey: false,
HTTPReturnFieldAutoID: false,
HTTPReturnDescription: "",
@ -514,7 +536,7 @@ func TestPrintCollectionDetails(t *testing.T) {
}
func TestPrimaryField(t *testing.T) {
coll := generateCollectionSchema(schemapb.DataType_Int64, false)
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
primaryField := generatePrimaryField(schemapb.DataType_Int64, false)
field, ok := getPrimaryField(coll)
assert.Equal(t, true, ok)
@ -538,84 +560,253 @@ func TestPrimaryField(t *testing.T) {
rangeStr, err = convertRange(primaryField, idStr)
assert.Equal(t, nil, err)
assert.Equal(t, `"1","2","3"`, rangeStr)
coll2 := generateCollectionSchema(schemapb.DataType_VarChar, false)
coll2 := generateCollectionSchema(schemapb.DataType_VarChar, false, true)
filter, err = checkGetPrimaryKey(coll2, idStr)
assert.Equal(t, nil, err)
assert.Equal(t, `book_id in ["1","2","3"]`, filter)
}
func TestInsertWithDynamicFields(t *testing.T) {
body := "{\"data\": {\"id\": 0, \"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"classified\": false, \"databaseID\": null}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, false)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, int64(0), req.Data[0]["id"])
assert.Equal(t, int64(1), req.Data[0]["book_id"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
fieldsData, err := anyToColumns(req.Data, nil, coll)
assert.Equal(t, nil, err)
assert.Equal(t, true, fieldsData[len(fieldsData)-1].IsDynamic)
assert.Equal(t, schemapb.DataType_JSON, fieldsData[len(fieldsData)-1].Type)
assert.Equal(t, "{\"classified\":false,\"id\":0}", string(fieldsData[len(fieldsData)-1].GetScalars().GetJsonData().GetData()[0]))
func TestAnyToColumns(t *testing.T) {
t.Run("insert with dynamic field", func(t *testing.T) {
body := "{\"data\": {\"id\": 0, \"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"classified\": false, \"databaseID\": null}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, int64(0), req.Data[0]["id"])
assert.Equal(t, int64(1), req.Data[0]["book_id"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
fieldsData, err := anyToColumns(req.Data, nil, coll, true)
assert.Equal(t, nil, err)
assert.Equal(t, true, fieldsData[len(fieldsData)-1].IsDynamic)
assert.Equal(t, schemapb.DataType_JSON, fieldsData[len(fieldsData)-1].Type)
assert.Equal(t, "{\"classified\":false,\"id\":0}", string(fieldsData[len(fieldsData)-1].GetScalars().GetJsonData().GetData()[0]))
})
t.Run("upsert with dynamic field", func(t *testing.T) {
body := "{\"data\": {\"id\": 0, \"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"classified\": false, \"databaseID\": null}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, int64(0), req.Data[0]["id"])
assert.Equal(t, int64(1), req.Data[0]["book_id"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
fieldsData, err := anyToColumns(req.Data, nil, coll, false)
assert.Equal(t, nil, err)
assert.Equal(t, true, fieldsData[len(fieldsData)-1].IsDynamic)
assert.Equal(t, schemapb.DataType_JSON, fieldsData[len(fieldsData)-1].Type)
assert.Equal(t, "{\"classified\":false,\"id\":0}", string(fieldsData[len(fieldsData)-1].GetScalars().GetJsonData().GetData()[0]))
})
t.Run("insert with dynamic field, but pass pk when autoid==true", func(t *testing.T) {
body := "{\"data\": {\"id\": 0, \"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"classified\": false, \"databaseID\": null}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, true, true)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, int64(0), req.Data[0]["id"])
assert.Equal(t, int64(1), req.Data[0]["book_id"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
_, err = anyToColumns(req.Data, nil, coll, true)
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "no need to pass pk field"))
})
t.Run("pass more field", func(t *testing.T) {
body := "{\"data\": {\"id\": 0, \"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"classified\": false, \"databaseID\": null}}"
coll := generateCollectionSchema(schemapb.DataType_Int64, true, false)
var err error
err, _, _ = checkAndSetData(body, coll)
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "has pass more fiel"))
})
t.Run("insert with autoid==false", func(t *testing.T) {
body := "{\"data\": {\"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, false, false)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, int64(1), req.Data[0]["book_id"])
assert.Equal(t, []float32{0.1, 0.2}, req.Data[0]["book_intro"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
fieldsData, err := anyToColumns(req.Data, nil, coll, true)
assert.Equal(t, nil, err)
assert.Equal(t, 3, len(fieldsData))
assert.Equal(t, false, fieldsData[len(fieldsData)-1].IsDynamic)
})
t.Run("insert with autoid==false but has no pk", func(t *testing.T) {
body := "{\"data\": { \"book_intro\": [0.1, 0.2], \"word_count\": 2}}"
coll := generateCollectionSchema(schemapb.DataType_Int64, false, false)
var err error
err, _, _ = checkAndSetData(body, coll)
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "strconv.ParseInt: parsing \"\": invalid syntax"))
})
t.Run("insert with autoid==true", func(t *testing.T) {
body := "{\"data\": { \"book_intro\": [0.1, 0.2], \"word_count\": 2}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, true, false)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, []float32{0.1, 0.2}, req.Data[0]["book_intro"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
fieldsData, err := anyToColumns(req.Data, nil, coll, true)
assert.Equal(t, nil, err)
assert.Equal(t, 2, len(fieldsData))
assert.Equal(t, false, fieldsData[len(fieldsData)-1].IsDynamic)
})
t.Run("upsert with autoid==true", func(t *testing.T) {
body := "{\"data\": {\"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, true, false)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, int64(1), req.Data[0]["book_id"])
assert.Equal(t, []float32{0.1, 0.2}, req.Data[0]["book_intro"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
fieldsData, err := anyToColumns(req.Data, nil, coll, false)
assert.Equal(t, nil, err)
assert.Equal(t, 3, len(fieldsData))
assert.Equal(t, false, fieldsData[len(fieldsData)-1].IsDynamic)
})
t.Run("upsert with autoid==false", func(t *testing.T) {
body := "{\"data\": {\"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2}}"
req := InsertReq{}
coll := generateCollectionSchema(schemapb.DataType_Int64, true, false)
var err error
err, req.Data, _ = checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, int64(1), req.Data[0]["book_id"])
assert.Equal(t, []float32{0.1, 0.2}, req.Data[0]["book_intro"])
assert.Equal(t, int64(2), req.Data[0]["word_count"])
fieldsData, err := anyToColumns(req.Data, nil, coll, false)
assert.Equal(t, nil, err)
assert.Equal(t, 3, len(fieldsData))
assert.Equal(t, false, fieldsData[len(fieldsData)-1].IsDynamic)
})
}
func TestInsertWithoutVector(t *testing.T) {
body := "{\"data\": {}}"
var err error
primaryField := generatePrimaryField(schemapb.DataType_Int64, false)
primaryField.AutoID = true
floatVectorField := generateVectorFieldSchema(schemapb.DataType_FloatVector)
floatVectorField.Name = "floatVector"
binaryVectorField := generateVectorFieldSchema(schemapb.DataType_BinaryVector)
binaryVectorField.Name = "binaryVector"
float16VectorField := generateVectorFieldSchema(schemapb.DataType_Float16Vector)
float16VectorField.Name = "float16Vector"
bfloat16VectorField := generateVectorFieldSchema(schemapb.DataType_BFloat16Vector)
bfloat16VectorField.Name = "bfloat16Vector"
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, floatVectorField,
},
EnableDynamicField: true,
func TestCheckAndSetData(t *testing.T) {
t.Run("invalid field name with dynamic field", func(t *testing.T) {
body := "{\"data\": {\"id\": 0,\"$meta\": 2,\"book_id\": 1, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"classified\": false, \"databaseID\": null}}"
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
var err error
err, _, _ = checkAndSetData(body, coll)
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "use the invalid field name"))
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, binaryVectorField,
},
EnableDynamicField: true,
t.Run("without vector", func(t *testing.T) {
body := "{\"data\": {}}"
var err error
primaryField := generatePrimaryField(schemapb.DataType_Int64, true)
floatVectorField := generateVectorFieldSchema(schemapb.DataType_FloatVector)
floatVectorField.Name = "floatVector"
binaryVectorField := generateVectorFieldSchema(schemapb.DataType_BinaryVector)
binaryVectorField.Name = "binaryVector"
float16VectorField := generateVectorFieldSchema(schemapb.DataType_Float16Vector)
float16VectorField.Name = "float16Vector"
bfloat16VectorField := generateVectorFieldSchema(schemapb.DataType_BFloat16Vector)
bfloat16VectorField.Name = "bfloat16Vector"
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, floatVectorField,
},
EnableDynamicField: true,
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, binaryVectorField,
},
EnableDynamicField: true,
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, float16VectorField,
},
EnableDynamicField: true,
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, bfloat16VectorField,
},
EnableDynamicField: true,
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, float16VectorField,
},
EnableDynamicField: true,
t.Run("with pk when autoID == True when upsert", func(t *testing.T) {
arrayFieldName := "array-int64"
body := "{\"data\": {\"book_id\": 9999999999999999, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"" + arrayFieldName + "\": [9999999999999999]}}"
coll := generateCollectionSchema(schemapb.DataType_Int64, true, false)
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: arrayFieldName,
DataType: schemapb.DataType_Array,
ElementType: schemapb.DataType_Int64,
})
err, data, validData := checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, 1, len(data))
assert.Equal(t, 0, len(validData))
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
err, _, _ = checkAndSetData(body, &schemapb.CollectionSchema{
Name: DefaultCollectionName,
Fields: []*schemapb.FieldSchema{
primaryField, bfloat16VectorField,
},
EnableDynamicField: true,
t.Run("without pk when autoID == True when insert", func(t *testing.T) {
arrayFieldName := "array-int64"
body := "{\"data\": {\"book_intro\": [0.1, 0.2], \"word_count\": 2, \"" + arrayFieldName + "\": [9999999999999999]}}"
coll := generateCollectionSchema(schemapb.DataType_Int64, true, false)
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: arrayFieldName,
DataType: schemapb.DataType_Array,
ElementType: schemapb.DataType_Int64,
})
err, data, validData := checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, 1, len(data))
assert.Equal(t, 0, len(validData))
})
t.Run("with pk when autoID == false", func(t *testing.T) {
arrayFieldName := "array-int64"
body := "{\"data\": {\"book_id\": 9999999999999999, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"" + arrayFieldName + "\": [9999999999999999]}}"
coll := generateCollectionSchema(schemapb.DataType_Int64, false, false)
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: arrayFieldName,
DataType: schemapb.DataType_Array,
ElementType: schemapb.DataType_Int64,
})
err, data, validData := checkAndSetData(body, coll)
assert.Equal(t, nil, err)
assert.Equal(t, 1, len(data))
assert.Equal(t, 0, len(validData))
})
assert.Error(t, err)
assert.Equal(t, true, strings.HasPrefix(err.Error(), "missing vector field"))
}
func TestInsertWithInt64(t *testing.T) {
arrayFieldName := "array-int64"
body := "{\"data\": {\"book_id\": 9999999999999999, \"book_intro\": [0.1, 0.2], \"word_count\": 2, \"" + arrayFieldName + "\": [9999999999999999]}}"
coll := generateCollectionSchema(schemapb.DataType_Int64, false)
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: arrayFieldName,
DataType: schemapb.DataType_Array,
@ -632,7 +823,7 @@ func TestInsertWithInt64(t *testing.T) {
func TestInsertWithNullableField(t *testing.T) {
arrayFieldName := "array-int64"
coll := generateCollectionSchema(schemapb.DataType_Int64, false)
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: arrayFieldName,
DataType: schemapb.DataType_Array,
@ -657,14 +848,14 @@ func TestInsertWithNullableField(t *testing.T) {
assert.Equal(t, 4, len(data[0]))
assert.Equal(t, 5, len(data[1]))
fieldData, err := anyToColumns(data, validData, coll)
fieldData, err := anyToColumns(data, validData, coll, true)
assert.Equal(t, nil, err)
assert.Equal(t, len(coll.Fields)+1, len(fieldData))
assert.Equal(t, len(coll.Fields), len(fieldData))
}
func TestInsertWithDefaultValueField(t *testing.T) {
arrayFieldName := "array-int64"
coll := generateCollectionSchema(schemapb.DataType_Int64, false)
coll := generateCollectionSchema(schemapb.DataType_Int64, false, true)
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: arrayFieldName,
DataType: schemapb.DataType_Array,
@ -693,9 +884,9 @@ func TestInsertWithDefaultValueField(t *testing.T) {
assert.Equal(t, 4, len(data[0]))
assert.Equal(t, 5, len(data[1]))
fieldData, err := anyToColumns(data, validData, coll)
fieldData, err := anyToColumns(data, validData, coll, true)
assert.Equal(t, nil, err)
assert.Equal(t, len(coll.Fields)+1, len(fieldData))
assert.Equal(t, len(coll.Fields), len(fieldData))
}
func TestSerialize(t *testing.T) {
@ -782,16 +973,11 @@ func compareRow(m1 map[string]interface{}, m2 map[string]interface{}) bool {
}
}
} else if key == "field-json" {
arr1 := value.([]byte)
arr1 := value.(string)
arr2 := m2[key].([]byte)
if len(arr1) != len(arr2) {
if arr1 != string(arr2) {
return false
}
for j, element := range arr1 {
if element != arr2[j] {
return false
}
}
} else if strings.HasPrefix(key, "array-") {
continue
} else if value != m2[key] {
@ -1587,24 +1773,24 @@ func newRowsWithArray(results []map[string]interface{}) []map[string]interface{}
func TestArray(t *testing.T) {
body, _ := generateRequestBody(schemapb.DataType_Int64)
collectionSchema := generateCollectionSchema(schemapb.DataType_Int64, false)
collectionSchema := generateCollectionSchema(schemapb.DataType_Int64, false, true)
err, rows, validRows := checkAndSetData(string(body), collectionSchema)
assert.Equal(t, nil, err)
assert.Equal(t, 0, len(validRows))
assert.Equal(t, true, compareRows(rows, generateRawRows(schemapb.DataType_Int64), compareRow))
data, err := anyToColumns(rows, validRows, collectionSchema)
data, err := anyToColumns(rows, validRows, collectionSchema, true)
assert.Equal(t, nil, err)
assert.Equal(t, len(collectionSchema.Fields)+1, len(data))
assert.Equal(t, len(collectionSchema.Fields), len(data))
body, _ = generateRequestBodyWithArray(schemapb.DataType_Int64)
collectionSchema = newCollectionSchemaWithArray(generateCollectionSchema(schemapb.DataType_Int64, false))
collectionSchema = newCollectionSchemaWithArray(generateCollectionSchema(schemapb.DataType_Int64, false, true))
err, rows, validRows = checkAndSetData(string(body), collectionSchema)
assert.Equal(t, nil, err)
assert.Equal(t, 0, len(validRows))
assert.Equal(t, true, compareRows(rows, newRowsWithArray(generateRawRows(schemapb.DataType_Int64)), compareRow))
data, err = anyToColumns(rows, validRows, collectionSchema)
data, err = anyToColumns(rows, validRows, collectionSchema, true)
assert.Equal(t, nil, err)
assert.Equal(t, len(collectionSchema.Fields)+1, len(data))
assert.Equal(t, len(collectionSchema.Fields), len(data))
}
func TestVector(t *testing.T) {
@ -1668,7 +1854,7 @@ func TestVector(t *testing.T) {
assert.Equal(t, 16, len(row[sparseFloatVector].([]byte)))
}
assert.Equal(t, 0, len(validRows))
data, err := anyToColumns(rows, validRows, collectionSchema)
data, err := anyToColumns(rows, validRows, collectionSchema, true)
assert.Equal(t, nil, err)
assert.Equal(t, len(collectionSchema.Fields)+1, len(data))

View File

@ -832,7 +832,6 @@ func TestCreateVarcharArrayInvalidLength(t *testing.T) {
// create collection
err := mc.CreateCollection(ctx, client.NewCreateCollectionOption(collName, schema))
common.CheckErr(t, err, false, "type param(max_length) should be specified for varChar field")
// invalid Capacity
for _, invalidLength := range []int64{-1, 0, common.MaxLength + 1} {
arrayVarcharField.WithMaxLength(invalidLength)
@ -855,7 +854,6 @@ func TestCreateVarcharInvalidLength(t *testing.T) {
// create collection
err := mc.CreateCollection(ctx, client.NewCreateCollectionOption(collName, schema))
common.CheckErr(t, err, false, "type param(max_length) should be specified for varChar field")
// invalid Capacity
for _, invalidLength := range []int64{-1, 0, common.MaxLength + 1} {
varcharField.WithMaxLength(invalidLength)

View File

@ -33,6 +33,7 @@ class TestInsertVector(TestBase):
"dimension": dim,
"primaryField": primary_field,
"vectorField": vector_field,
"autoID":True,
}
rsp = self.collection_client.collection_create(collection_payload)
assert rsp['code'] == 200