feat: add test case for go client insert (#33603)

issue: #33419

Signed-off-by: ThreadDao <yufen.zong@zilliz.com>
This commit is contained in:
ThreadDao 2024-06-07 14:47:52 +08:00 committed by GitHub
parent 1629833060
commit 3729d2c8e7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 1140 additions and 60 deletions

View File

@ -82,7 +82,7 @@ func (c *ColumnSparseFloatVector) FieldData() *schemapb.FieldData {
for idx := 0; idx < vector.Len(); idx++ {
pos, value, _ := vector.Get(idx)
binary.LittleEndian.PutUint32(row[idx*8:], pos)
binary.LittleEndian.PutUint32(row[pos*8+4:], math.Float32bits(value))
binary.LittleEndian.PutUint32(row[idx*8+4:], math.Float32bits(value))
}
data = append(data, row)
if vector.Dim() > dim {

View File

@ -87,7 +87,7 @@ func (opt *columnBasedDataOption) processInsertColumns(colSchema *entity.Schema,
mNameColumn[col.Name()] = col
if col.Type() != field.DataType {
return nil, 0, fmt.Errorf("param column %s has type %v but collection field definition is %v", col.Name(), col.FieldData(), field.DataType)
return nil, 0, fmt.Errorf("param column %s has type %v but collection field definition is %v", col.Name(), col.Type(), field.DataType)
}
if field.DataType == entity.FieldTypeFloatVector || field.DataType == entity.FieldTypeBinaryVector {
dim := 0

View File

@ -197,7 +197,9 @@ func (mc *MilvusClient) DropIndex(ctx context.Context, option clientv2.DropIndex
// Insert insert data
func (mc *MilvusClient) Insert(ctx context.Context, option clientv2.InsertOption, callOptions ...grpc.CallOption) (clientv2.InsertResult, error) {
insertRes, err := mc.mClient.Insert(ctx, option, callOptions...)
log.Info("Insert", zap.Any("result", insertRes))
if err == nil{
log.Info("Insert", zap.Any("result", insertRes))
}
return insertRes, err
}

View File

@ -1,9 +1,14 @@
package common
import (
"fmt"
"strings"
"testing"
"github.com/milvus-io/milvus/client/v2/column"
"github.com/milvus-io/milvus/client/v2/entity"
"go.uber.org/zap"
"github.com/milvus-io/milvus/pkg/log"
"github.com/stretchr/testify/require"
@ -34,6 +39,54 @@ func CheckErr(t *testing.T, actualErr error, expErrNil bool, expErrorMsg ...stri
}
}
// EqualColumn assert field data is equal of two columns
func EqualColumn(t *testing.T, columnA column.Column, columnB column.Column) {
require.Equal(t, columnA.Name(), columnB.Name())
require.Equal(t, columnA.Type(), columnB.Type())
switch columnA.Type() {
case entity.FieldTypeBool:
require.ElementsMatch(t, columnA.(*column.ColumnBool).Data(), columnB.(*column.ColumnBool).Data())
case entity.FieldTypeInt8:
require.ElementsMatch(t, columnA.(*column.ColumnInt8).Data(), columnB.(*column.ColumnInt8).Data())
case entity.FieldTypeInt16:
require.ElementsMatch(t, columnA.(*column.ColumnInt16).Data(), columnB.(*column.ColumnInt16).Data())
case entity.FieldTypeInt32:
require.ElementsMatch(t, columnA.(*column.ColumnInt32).Data(), columnB.(*column.ColumnInt32).Data())
case entity.FieldTypeInt64:
require.ElementsMatch(t, columnA.(*column.ColumnInt64).Data(), columnB.(*column.ColumnInt64).Data())
case entity.FieldTypeFloat:
require.ElementsMatch(t, columnA.(*column.ColumnFloat).Data(), columnB.(*column.ColumnFloat).Data())
case entity.FieldTypeDouble:
require.ElementsMatch(t, columnA.(*column.ColumnDouble).Data(), columnB.(*column.ColumnDouble).Data())
case entity.FieldTypeVarChar:
require.ElementsMatch(t, columnA.(*column.ColumnVarChar).Data(), columnB.(*column.ColumnVarChar).Data())
case entity.FieldTypeJSON:
log.Debug("columnA", zap.Any("data", columnA.(*column.ColumnJSONBytes).Data()))
log.Debug("columnB", zap.Any("data", columnB.(*column.ColumnJSONBytes).Data()))
require.ElementsMatch(t, columnA.(*column.ColumnJSONBytes).Data(), columnB.(*column.ColumnJSONBytes).Data())
case entity.FieldTypeFloatVector:
require.ElementsMatch(t, columnA.(*column.ColumnFloatVector).Data(), columnB.(*column.ColumnFloatVector).Data())
case entity.FieldTypeBinaryVector:
require.ElementsMatch(t, columnA.(*column.ColumnBinaryVector).Data(), columnB.(*column.ColumnBinaryVector).Data())
case entity.FieldTypeArray:
log.Info("TODO support column element type")
default:
log.Info("Support column type is:", zap.Any("FieldType", []entity.FieldType{entity.FieldTypeBool,
entity.FieldTypeInt8, entity.FieldTypeInt16, entity.FieldTypeInt32, entity.FieldTypeInt64,
entity.FieldTypeFloat, entity.FieldTypeDouble, entity.FieldTypeString, entity.FieldTypeVarChar,
entity.FieldTypeArray, entity.FieldTypeFloatVector, entity.FieldTypeBinaryVector}))
}
}
// CheckOutputFields check query output fields
func CheckOutputFields(t *testing.T, expFields []string, actualColumns []column.Column) {
actualFields := make([]string, 0)
for _, actualColumn := range actualColumns {
actualFields = append(actualFields, actualColumn.Name())
}
require.ElementsMatchf(t, expFields, actualFields, fmt.Sprintf("Expected search output fields: %v, actual: %v", expFields, actualFields))
}
// CheckSearchResult check search result, check nq, topk, ids, score
func CheckSearchResult(t *testing.T, actualSearchResults []clientv2.ResultSet, expNq int, expTopK int) {
require.Equal(t, len(actualSearchResults), expNq)
@ -42,3 +95,19 @@ func CheckSearchResult(t *testing.T, actualSearchResults []clientv2.ResultSet, e
require.Equal(t, actualSearchResult.ResultCount, expTopK)
}
}
// CheckInsertResult check insert result, ids len (insert count), ids data (pks, but no auto ids)
func CheckInsertResult(t *testing.T, expIds column.Column, insertRes clientv2.InsertResult) {
require.Equal(t, expIds.Len(), insertRes.IDs.Len())
require.Equal(t, expIds.Len(), int(insertRes.InsertCount))
actualIds := insertRes.IDs
switch expIds.Type() {
// pk field support int64 and varchar type
case entity.FieldTypeInt64:
require.ElementsMatch(t, actualIds.(*column.ColumnInt64).Data(), expIds.(*column.ColumnInt64).Data())
case entity.FieldTypeVarChar:
require.ElementsMatch(t, actualIds.(*column.ColumnVarChar).Data(), expIds.(*column.ColumnVarChar).Data())
default:
log.Info("The primary field only support ", zap.Any("type", []entity.FieldType{entity.FieldTypeInt64, entity.FieldTypeVarChar}))
}
}

View File

@ -3,14 +3,15 @@ package common
import (
"encoding/binary"
"fmt"
"log"
"math"
"math/rand"
"strings"
"time"
"github.com/milvus-io/milvus/client/v2/entity"
"github.com/milvus-io/milvus/pkg/log"
"github.com/x448/float16"
"go.uber.org/zap"
)
var letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
@ -116,7 +117,7 @@ func GenSparseVector(maxLen int) entity.SparseEmbedding {
}
vector, err := entity.NewSliceSparseEmbedding(positions, values)
if err != nil {
log.Fatalf("Generate vector failed %s", err)
log.Fatal("Generate vector failed %s", zap.Error(err))
}
return vector
}

View File

@ -13,7 +13,7 @@ require (
google.golang.org/grpc v1.64.0
)
replace github.com/milvus-io/milvus/client/v2 v2.0.0-20240521081339-017fd7bc25de => ../../../milvus/client
//replace github.com/milvus-io/milvus/client/v2 v2.0.0-20240521081339-017fd7bc25de => ../../../milvus/client
require (
github.com/beorn7/perks v1.0.1 // indirect

View File

@ -64,7 +64,7 @@ func TestCreateAutoIdCollectionField(t *testing.T) {
require.True(t, coll.Schema.Fields[0].AutoID)
// insert
vecColumn := hp.GenColumnData(common.DefaultNb, vecField.DataType, *hp.TNewColumnOption())
vecColumn := hp.GenColumnData(common.DefaultNb, vecField.DataType, *hp.TNewDataOption())
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, vecColumn))
common.CheckErr(t, err, true)
}
@ -117,7 +117,7 @@ func TestCreateAutoIdCollectionSchema(t *testing.T) {
log.Info("field autoID", zap.Bool("fieldAuto", coll.Schema.Fields[0].AutoID))
// insert
vecColumn := hp.GenColumnData(common.DefaultNb, vecField.DataType, *hp.TNewColumnOption())
vecColumn := hp.GenColumnData(common.DefaultNb, vecField.DataType, *hp.TNewDataOption())
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, vecColumn))
common.CheckErr(t, err, false, "field pk not passed")
}
@ -146,7 +146,7 @@ func TestCreateAutoIdCollection(t *testing.T) {
log.Info("field autoID", zap.Bool("fieldAuto", coll.Schema.Fields[0].AutoID))
// insert
vecColumn := hp.GenColumnData(common.DefaultNb, vecField.DataType, *hp.TNewColumnOption())
vecColumn := hp.GenColumnData(common.DefaultNb, vecField.DataType, *hp.TNewDataOption())
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, vecColumn))
common.CheckErr(t, err, false, "field pk not passed")
}
@ -278,7 +278,7 @@ func TestCreateCollectionDynamicSchema(t *testing.T) {
require.True(t, coll.Schema.EnableDynamicField)
// insert dynamic
columnOption := *hp.TNewColumnOption()
columnOption := *hp.TNewDataOption()
varcharColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeVarChar, columnOption)
vecColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeFloatVector, columnOption)
dynamicData := hp.GenDynamicFieldData(0, common.DefaultNb)
@ -310,7 +310,7 @@ func TestCreateCollectionDynamic(t *testing.T) {
//require.True(t, coll.Schema.Fields[0].IsDynamic)
// insert dynamic
columnOption := *hp.TNewColumnOption()
columnOption := *hp.TNewDataOption()
varcharColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeVarChar, columnOption)
vecColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeFloatVector, columnOption)
dynamicData := hp.GenDynamicFieldData(0, common.DefaultNb)
@ -426,7 +426,8 @@ func TestCreateCollectionWithInvalidFieldName(t *testing.T) {
for _, invalidName := range common.GenInvalidNames() {
log.Debug("TestCreateCollectionWithInvalidFieldName", zap.String("fieldName", invalidName))
pkField := entity.NewField().WithName(invalidName).WithDataType(entity.FieldTypeInt64).WithIsPrimaryKey(true)
schema := entity.NewSchema().WithName("aaa").WithField(pkField)
vecField := entity.NewField().WithName("vec").WithDataType(entity.FieldTypeFloatVector).WithDim(128)
schema := entity.NewSchema().WithName("aaa").WithField(pkField).WithField(vecField)
collOpt := clientv2.NewCreateCollectionOption("aaa", schema)
err := mc.CreateCollection(ctx, collOpt)
@ -490,8 +491,7 @@ func TestCreateCollectionInvalidFields(t *testing.T) {
vecField := entity.NewField().WithName(common.DefaultFloatVecFieldName).WithDataType(entity.FieldTypeFloatVector).WithDim(common.DefaultDim)
noneField := entity.NewField().WithName("none").WithDataType(entity.FieldTypeNone)
invalidFields := []invalidFieldsStruct{
// TODO https://github.com/milvus-io/milvus/issues/33199
//{fields: []*entity.Field{pkField}, errMsg: "vector field not set"},
{fields: []*entity.Field{pkField}, errMsg: "schema does not contain vector field"},
{fields: []*entity.Field{vecField}, errMsg: "primary key is not specified"},
{fields: []*entity.Field{pkField, pkField2, vecField}, errMsg: "there are more than one primary key"},
{fields: []*entity.Field{pkField, vecField, noneField}, errMsg: "data type None is not valid"},
@ -935,13 +935,14 @@ func TestCreateCollectionInvalid(t *testing.T) {
schema *entity.Schema
errMsg string
}
vecField := entity.NewField().WithName("vec").WithDataType(entity.FieldTypeFloatVector).WithDim(8)
mSchemaErrs := []mSchemaErr{
{schema: nil, errMsg: "duplicated field name"},
{schema: entity.NewSchema(), errMsg: "collection name should not be empty"},
{schema: entity.NewSchema().WithName("aaa"), errMsg: "primary key is not specified"},
{schema: entity.NewSchema().WithName("aaa").WithField(entity.NewField()), errMsg: "primary key is not specified"},
{schema: entity.NewSchema().WithName("aaa").WithField(entity.NewField().WithIsPrimaryKey(true)), errMsg: "the data type of primary key should be Int64 or VarChar"},
{schema: entity.NewSchema().WithName("aaa").WithField(entity.NewField().WithIsPrimaryKey(true).WithDataType(entity.FieldTypeVarChar)), errMsg: "field name should not be empty"},
{schema: entity.NewSchema().WithField(vecField), errMsg: "collection name should not be empty"}, // no collection name
{schema: entity.NewSchema().WithName("aaa").WithField(vecField), errMsg: "primary key is not specified"}, // no pk field
{schema: entity.NewSchema().WithName("aaa").WithField(vecField).WithField(entity.NewField()), errMsg: "primary key is not specified"},
{schema: entity.NewSchema().WithName("aaa").WithField(vecField).WithField(entity.NewField().WithIsPrimaryKey(true)), errMsg: "the data type of primary key should be Int64 or VarChar"},
{schema: entity.NewSchema().WithName("aaa").WithField(vecField).WithField(entity.NewField().WithIsPrimaryKey(true).WithDataType(entity.FieldTypeVarChar)), errMsg: "field name should not be empty"},
}
for _, mSchema := range mSchemaErrs {
err := mc.CreateCollection(ctx, clientv2.NewCreateCollectionOption(collName, mSchema.schema))

View File

@ -2,6 +2,7 @@ package helper
import (
"bytes"
"encoding/json"
"strconv"
"github.com/milvus-io/milvus/client/v2/column"
@ -43,54 +44,69 @@ func (opt *InsertParams) TWithIsRows(isRows bool) *InsertParams {
}
// GenColumnDataOption -- create column data --
type GenColumnOption struct {
dim int64
maxLen int64
start int
fieldName string
elementType entity.FieldType
type GenDataOption struct {
dim int
maxLen int
sparseMaxLen int
maxCapacity int
start int
fieldName string
elementType entity.FieldType
}
func (opt *GenColumnOption) TWithDim(dim int64) *GenColumnOption {
func (opt *GenDataOption) TWithDim(dim int) *GenDataOption {
opt.dim = dim
return opt
}
func (opt *GenColumnOption) TWithMaxLen(maxLen int64) *GenColumnOption {
func (opt *GenDataOption) TWithMaxLen(maxLen int) *GenDataOption {
opt.maxLen = maxLen
return opt
}
func (opt *GenColumnOption) TWithStart(start int) *GenColumnOption {
func (opt *GenDataOption) TWithSparseMaxLen(sparseMaxLen int) *GenDataOption {
opt.sparseMaxLen = sparseMaxLen
return opt
}
func (opt *GenDataOption) TWithMaxCapacity(maxCap int) *GenDataOption {
opt.maxCapacity = maxCap
return opt
}
func (opt *GenDataOption) TWithStart(start int) *GenDataOption {
opt.start = start
return opt
}
func (opt *GenColumnOption) TWithFieldName(fieldName string) *GenColumnOption {
func (opt *GenDataOption) TWithFieldName(fieldName string) *GenDataOption {
opt.fieldName = fieldName
return opt
}
func (opt *GenColumnOption) TWithElementType(eleType entity.FieldType) *GenColumnOption {
func (opt *GenDataOption) TWithElementType(eleType entity.FieldType) *GenDataOption {
opt.elementType = eleType
return opt
}
func TNewColumnOption() *GenColumnOption {
return &GenColumnOption{
dim: common.DefaultDim,
maxLen: common.TestMaxLen,
start: 0,
func TNewDataOption() *GenDataOption {
return &GenDataOption{
dim: common.DefaultDim,
maxLen: common.TestMaxLen,
sparseMaxLen: common.TestMaxLen,
maxCapacity: common.TestCapacity,
start: 0,
elementType: entity.FieldTypeNone,
}
}
func GenArrayColumnData(nb int, eleType entity.FieldType, option GenColumnOption) column.Column {
func GenArrayColumnData(nb int, eleType entity.FieldType, option GenDataOption) column.Column {
start := option.start
fieldName := option.fieldName
if option.fieldName == "" {
fieldName = GetFieldNameByElementType(eleType)
}
capacity := int(option.maxLen)
capacity := option.maxCapacity
switch eleType {
case entity.FieldTypeBool:
boolValues := make([][]bool, 0, nb)
@ -180,10 +196,63 @@ func GenArrayColumnData(nb int, eleType entity.FieldType, option GenColumnOption
}
}
type JSONStruct struct {
Number int32 `json:"number,omitempty" milvus:"name:number"`
String string `json:"string,omitempty" milvus:"name:string"`
*BoolStruct
List []int64 `json:"list,omitempty" milvus:"name:list"`
}
// GenDefaultJSONData gen default column with data
func GenDefaultJSONData(nb int, option GenDataOption) [][]byte {
jsonValues := make([][]byte, 0, nb)
start := option.start
var m interface{}
for i := start; i < start+nb; i++ {
// kv value
_bool := &BoolStruct{
Bool: i%2 == 0,
}
if i < (start+nb)/2 {
if i%2 == 0 {
m = JSONStruct{
String: strconv.Itoa(i),
BoolStruct: _bool,
}
} else {
m = JSONStruct{
Number: int32(i),
String: strconv.Itoa(i),
BoolStruct: _bool,
List: []int64{int64(i), int64(i + 1)},
}
}
} else {
// int, float, string, list
switch i % 4 {
case 0:
m = i
case 1:
m = float32(i)
case 2:
m = strconv.Itoa(i)
case 3:
m = []int64{int64(i), int64(i + 1)}
}
}
bs, err := json.Marshal(&m)
if err != nil {
log.Fatal("Marshal json field failed", zap.Error(err))
}
jsonValues = append(jsonValues, bs)
}
return jsonValues
}
// GenColumnData GenColumnDataOption
func GenColumnData(nb int, fieldType entity.FieldType, option GenColumnOption) column.Column {
dim := int(option.dim)
maxLen := int(option.maxLen)
func GenColumnData(nb int, fieldType entity.FieldType, option GenDataOption) column.Column {
dim := option.dim
sparseMaxLen := option.sparseMaxLen
start := option.start
fieldName := option.fieldName
if option.fieldName == "" {
@ -248,14 +317,16 @@ func GenColumnData(nb int, fieldType entity.FieldType, option GenColumnOption) c
case entity.FieldTypeArray:
return GenArrayColumnData(nb, option.elementType, option)
case entity.FieldTypeJSON:
jsonValues := GenDefaultJSONData(nb, option)
return column.NewColumnJSONBytes(fieldName, jsonValues)
case entity.FieldTypeFloatVector:
vecFloatValues := make([][]float32, 0, nb)
for i := start; i < start+nb; i++ {
vec := common.GenFloatVector(dim)
vecFloatValues = append(vecFloatValues, vec)
}
return column.NewColumnFloatVector(fieldName, int(option.dim), vecFloatValues)
return column.NewColumnFloatVector(fieldName, option.dim, vecFloatValues)
case entity.FieldTypeBinaryVector:
binaryVectors := make([][]byte, 0, nb)
for i := 0; i < nb; i++ {
@ -280,7 +351,7 @@ func GenColumnData(nb int, fieldType entity.FieldType, option GenColumnOption) c
case entity.FieldTypeSparseVector:
vectors := make([]entity.SparseEmbedding, 0, nb)
for i := start; i < start+nb; i++ {
vec := common.GenSparseVector(maxLen)
vec := common.GenSparseVector(sparseMaxLen)
vectors = append(vectors, vec)
}
return column.NewColumnSparseVectors(fieldName, vectors)
@ -299,26 +370,26 @@ func GenDynamicFieldData(start int, nb int) []column.Column {
numberValues := make([]int32, 0, nb)
stringValues := make([]string, 0, nb)
boolValues := make([]bool, 0, nb)
//listValues := make([][]byte, 0, Nb)
//m := make(map[string]interface{})
listValues := make([][]byte, 0, nb)
m := make(map[string]interface{})
for i := start; i < start+nb; i++ {
numberValues = append(numberValues, int32(i))
stringValues = append(stringValues, strconv.Itoa(i))
boolValues = append(boolValues, i%3 == 0)
//m["list"] = ListStruct{
// List: []int64{int64(i), int64(i + 1)},
//}
//bs, err := json.Marshal(m)
//if err != nil {
// log.Fatalf("Marshal json field failed: %s", err)
//}
//listValues = append(listValues, bs)
m["list"] = ListStruct{
List: []int64{int64(i), int64(i + 1)},
}
bs, err := json.Marshal(m)
if err != nil {
log.Fatal("Marshal json field failed:", zap.Error(err))
}
listValues = append(listValues, bs)
}
data := []column.Column{
column.NewColumnInt32(common.DefaultDynamicNumberField, numberValues),
column.NewColumnString(common.DefaultDynamicStringField, stringValues),
column.NewColumnBool(common.DefaultDynamicBoolField, boolValues),
//entity.NewColumnJSONBytes(DefaultDynamicListField, listValues),
column.NewColumnJSONBytes(common.DefaultDynamicListField, listValues),
}
return data
}

View File

@ -242,7 +242,7 @@ func (cf FieldsAllFields) GenFields(option GenFieldsOption) []*entity.Field {
pkField,
}
// scalar fields and array fields
for _, fieldType := range GetAllScaleFieldType() {
for _, fieldType := range GetAllScalarFieldType() {
if fieldType == entity.FieldTypeInt64 {
continue
} else if fieldType == entity.FieldTypeArray {

View File

@ -47,7 +47,7 @@ func GetAllVectorFieldType() []entity.FieldType {
}
}
func GetAllScaleFieldType() []entity.FieldType {
func GetAllScalarFieldType() []entity.FieldType {
return []entity.FieldType{
entity.FieldTypeBool,
entity.FieldTypeInt8,
@ -63,7 +63,7 @@ func GetAllScaleFieldType() []entity.FieldType {
}
func GetAllFieldsType() []entity.FieldType {
allFieldType := GetAllScaleFieldType()
allFieldType := GetAllScalarFieldType()
allFieldType = append(allFieldType, entity.FieldTypeBinaryVector,
entity.FieldTypeFloatVector,
entity.FieldTypeFloat16Vector,
@ -128,7 +128,7 @@ func (chainTask *CollectionPrepare) CreateCollection(ctx context.Context, t *tes
}
func (chainTask *CollectionPrepare) InsertData(ctx context.Context, t *testing.T, mc *base.MilvusClient,
ip *InsertParams, option *GenColumnOption) (*CollectionPrepare, clientv2.InsertResult) {
ip *InsertParams, option *GenDataOption) (*CollectionPrepare, clientv2.InsertResult) {
if nil == ip.Schema || ip.Schema.CollectionName == "" {
log.Fatal("[InsertData] Nil Schema is not expected")
}

View File

@ -0,0 +1,217 @@
package helper
import (
"bytes"
"strconv"
"github.com/milvus-io/milvus/client/v2/entity"
"github.com/milvus-io/milvus/tests/go_client/common"
)
type Dynamic struct {
Number int32 `json:"dynamicNumber,omitempty" milvus:"name:dynamicNumber"`
String string `json:"dynamicString,omitempty" milvus:"name:dynamicString"`
*BoolDynamic
List []int64 `json:"dynamicList,omitempty" milvus:"name:dynamicList"`
}
type BaseRow struct {
*BoolStruct
Int8 int8 `json:"int8,omitempty" milvus:"name:int8"`
Int16 int16 `json:"int16,omitempty" milvus:"name:int16"`
Int32 int32 `json:"int32,omitempty" milvus:"name:int32"`
Int64 int64 `json:"int64,omitempty" milvus:"name:int64"`
Float float32 `json:"float,omitempty" milvus:"name:float"`
Double float64 `json:"double,omitempty" milvus:"name:double"`
Varchar string `json:"varchar,omitempty" milvus:"name:varchar"`
JSON *JSONStruct `json:"json,omitempty" milvus:"name:json"`
FloatVec []float32 `json:"floatVec,omitempty" milvus:"name:floatVec"`
Fp16Vec []byte `json:"fp16Vec,omitempty" milvus:"name:fp16Vec"`
Bf16Vec []byte `json:"bf16Vec,omitempty" milvus:"name:bf16Vec"`
BinaryVec []byte `json:"binaryVec,omitempty" milvus:"name:binaryVec"`
SparseVec entity.SparseEmbedding `json:"sparseVec,omitempty" milvus:"name:sparseVec"`
Array
Dynamic
}
type BoolStruct struct {
Bool bool `json:"bool" milvus:"name:bool"`
}
type BoolDynamic struct {
Bool bool `json:"dynamicBool" milvus:"name:dynamicBool"`
}
type Array struct {
BoolArray []bool `json:"boolArray,omitempty" milvus:"name:boolArray"`
Int8Array []int8 `json:"int8Array,omitempty" milvus:"name:int8Array"`
Int16Array []int16 `json:"int16Array,omitempty" milvus:"name:int16Array"`
Int32Array []int32 `json:"int32Array,omitempty" milvus:"name:int32Array"`
Int64Array []int64 `json:"int64Array,omitempty" milvus:"name:int64Array"`
FloatArray []float32 `json:"floatArray,omitempty" milvus:"name:floatArray"`
DoubleArray []float64 `json:"doubleArray,omitempty" milvus:"name:doubleArray"`
VarcharArray [][]byte `json:"varcharArray,omitempty" milvus:"name:varcharArray"`
}
func getBool(b bool) *bool {
return &b
}
func GenDynamicRow(index int) Dynamic {
var dynamic Dynamic
_bool := &BoolDynamic{
Bool: index%2 == 0,
}
if index%2 == 0 {
dynamic = Dynamic{
Number: int32(index),
String: strconv.Itoa(index),
BoolDynamic: _bool,
}
} else {
dynamic = Dynamic{
Number: int32(index),
String: strconv.Itoa(index),
BoolDynamic: _bool,
List: []int64{int64(index), int64(index + 1)},
}
}
return dynamic
}
func GenJsonRow(index int) *JSONStruct {
var jsonStruct JSONStruct
_bool := &BoolStruct{
Bool: index%2 == 0,
}
if index%2 == 0 {
jsonStruct = JSONStruct{
String: strconv.Itoa(index),
BoolStruct: _bool,
}
} else {
jsonStruct = JSONStruct{
Number: int32(index),
String: strconv.Itoa(index),
BoolStruct: _bool,
List: []int64{int64(index), int64(index + 1)},
}
}
return &jsonStruct
}
func GenInt64VecRows(nb int, enableDynamicField bool, autoID bool, option GenDataOption) []interface{} {
dim := option.dim
start := option.start
rows := make([]interface{}, 0, nb)
// BaseRow generate insert rows
for i := start; i < start+nb; i++ {
baseRow := BaseRow{
FloatVec: common.GenFloatVector(dim),
}
if !autoID {
baseRow.Int64 = int64(i + 1)
}
if enableDynamicField {
baseRow.Dynamic = GenDynamicRow(i + 1)
}
rows = append(rows, &baseRow)
}
return rows
}
func GenInt64VarcharSparseRows(nb int, enableDynamicField bool, autoID bool, option GenDataOption) []interface{} {
start := option.start
rows := make([]interface{}, 0, nb)
// BaseRow generate insert rows
for i := start; i < start+nb; i++ {
vec := common.GenSparseVector(2)
//log.Info("", zap.Any("SparseVec", vec))
baseRow := BaseRow{
Varchar: strconv.Itoa(i + 1),
SparseVec: vec,
}
if !autoID {
baseRow.Int64 = int64(i + 1)
}
if enableDynamicField {
baseRow.Dynamic = GenDynamicRow(i + 1)
}
rows = append(rows, &baseRow)
}
return rows
}
func GenAllFieldsRows(nb int, enableDynamicField bool, option GenDataOption) []interface{} {
rows := make([]interface{}, 0, nb)
// BaseRow generate insert rows
dim := option.dim
start := option.start
for i := start; i < start+nb; i++ {
_bool := &BoolStruct{
Bool: i%2 == 0,
}
baseRow := BaseRow{
Int64: int64(i + 1),
BoolStruct: _bool,
Int8: int8(i + 1),
Int16: int16(i + 1),
Int32: int32(i + 1),
Float: float32(i + 1),
Double: float64(i + 1),
Varchar: strconv.Itoa(i + 1),
JSON: GenJsonRow(i + 1),
FloatVec: common.GenFloatVector(dim),
Fp16Vec: common.GenFloat16Vector(dim),
Bf16Vec: common.GenBFloat16Vector(dim),
BinaryVec: common.GenBinaryVector(dim),
}
baseRow.Array = GenAllArrayRow(i, option)
if enableDynamicField {
baseRow.Dynamic = GenDynamicRow(i + 1)
}
rows = append(rows, &baseRow)
}
return rows
}
func GenAllArrayRow(index int, option GenDataOption) Array {
capacity := option.maxCapacity
boolRow := make([]bool, 0, capacity)
int8Row := make([]int8, 0, capacity)
int16Row := make([]int16, 0, capacity)
int32Row := make([]int32, 0, capacity)
int64Row := make([]int64, 0, capacity)
floatRow := make([]float32, 0, capacity)
doubleRow := make([]float64, 0, capacity)
varcharRow := make([][]byte, 0, capacity)
for j := 0; j < capacity; j++ {
boolRow = append(boolRow, index%2 == 0)
int8Row = append(int8Row, int8(index+j))
int16Row = append(int16Row, int16(index+j))
int32Row = append(int32Row, int32(index+j))
int64Row = append(int64Row, int64(index+j))
floatRow = append(floatRow, float32(index+j))
doubleRow = append(doubleRow, float64(index+j))
var buf bytes.Buffer
buf.WriteString(strconv.Itoa(index + j))
varcharRow = append(varcharRow, buf.Bytes())
}
arrayRow := Array{
BoolArray: boolRow,
Int8Array: int8Row,
Int16Array: int16Row,
Int32Array: int32Row,
Int64Array: int64Row,
FloatArray: floatRow,
DoubleArray: doubleRow,
VarcharArray: varcharRow,
}
return arrayRow
}

View File

@ -0,0 +1,718 @@
package testcases
import (
"math"
"testing"
"time"
"github.com/milvus-io/milvus/client/v2/index"
"github.com/milvus-io/milvus/client/v2/column"
"github.com/milvus-io/milvus/pkg/log"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
clientv2 "github.com/milvus-io/milvus/client/v2"
"github.com/milvus-io/milvus/client/v2/entity"
"github.com/milvus-io/milvus/tests/go_client/common"
hp "github.com/milvus-io/milvus/tests/go_client/testcases/helper"
)
func TestInsertDefault(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, autoID := range [2]bool{false, true} {
// create collection
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(autoID), hp.TNewSchemaOption())
// insert
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
pkColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeInt64, *columnOpt)
vecColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeFloatVector, *columnOpt)
insertOpt := clientv2.NewColumnBasedInsertOption(schema.CollectionName).WithColumns(vecColumn)
if !autoID {
insertOpt.WithColumns(pkColumn)
}
insertRes, err := mc.Insert(ctx, insertOpt)
common.CheckErr(t, err, true)
if !autoID {
common.CheckInsertResult(t, pkColumn, insertRes)
}
}
}
func TestInsertDefaultPartition(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, autoID := range [2]bool{false, true} {
// create collection
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(autoID), hp.TNewSchemaOption())
// create partition
parName := common.GenRandomString("par", 4)
err := mc.CreatePartition(ctx, clientv2.NewCreatePartitionOption(schema.CollectionName, parName))
common.CheckErr(t, err, true)
// insert
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
pkColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeInt64, *columnOpt)
vecColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeFloatVector, *columnOpt)
insertOpt := clientv2.NewColumnBasedInsertOption(schema.CollectionName).WithColumns(vecColumn)
if !autoID {
insertOpt.WithColumns(pkColumn)
}
insertRes, err := mc.Insert(ctx, insertOpt.WithPartition(parName))
common.CheckErr(t, err, true)
if !autoID {
common.CheckInsertResult(t, pkColumn, insertRes)
}
}
}
func TestInsertVarcharPkDefault(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, autoID := range [2]bool{false, true} {
// create collection
cp := hp.NewCreateCollectionParams(hp.VarcharBinary)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(autoID).TWithMaxLen(20), hp.TNewSchemaOption())
// insert
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
pkColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeVarChar, *columnOpt)
vecColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeBinaryVector, *columnOpt)
insertOpt := clientv2.NewColumnBasedInsertOption(schema.CollectionName).WithColumns(vecColumn)
if !autoID {
insertOpt.WithColumns(pkColumn)
}
insertRes, err := mc.Insert(ctx, insertOpt)
common.CheckErr(t, err, true)
if !autoID {
common.CheckInsertResult(t, pkColumn, insertRes)
}
}
}
// test insert data into collection that has all scala fields
func TestInsertAllFieldsData(t *testing.T) {
t.Parallel()
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, dynamic := range [2]bool{false, true} {
// create collection
cp := hp.NewCreateCollectionParams(hp.AllFields)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption().TWithEnableDynamicField(dynamic))
// insert
insertOpt := clientv2.NewColumnBasedInsertOption(schema.CollectionName)
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
for _, field := range schema.Fields {
if field.DataType == entity.FieldTypeArray {
columnOpt.TWithElementType(field.ElementType)
}
_column := hp.GenColumnData(common.DefaultNb, field.DataType, *columnOpt)
insertOpt.WithColumns(_column)
}
if dynamic {
insertOpt.WithColumns(hp.GenDynamicFieldData(0, common.DefaultNb)...)
}
insertRes, errInsert := mc.Insert(ctx, insertOpt)
common.CheckErr(t, errInsert, true)
pkColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeInt64, *columnOpt)
common.CheckInsertResult(t, pkColumn, insertRes)
// flush and check row count
flushTak, _ := mc.Flush(ctx, clientv2.NewFlushOption(schema.CollectionName))
err := flushTak.Await(ctx)
common.CheckErr(t, err, true)
}
}
// test insert dynamic data with column
func TestInsertDynamicExtraColumn(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
// create collection
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption().TWithEnableDynamicField(true))
// insert without dynamic field
insertOpt := clientv2.NewColumnBasedInsertOption(schema.CollectionName)
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
for _, field := range schema.Fields {
_column := hp.GenColumnData(common.DefaultNb, field.DataType, *columnOpt)
insertOpt.WithColumns(_column)
}
insertRes, errInsert := mc.Insert(ctx, insertOpt)
common.CheckErr(t, errInsert, true)
require.Equal(t, common.DefaultNb, int(insertRes.InsertCount))
// insert with dynamic field
insertOptDynamic := clientv2.NewColumnBasedInsertOption(schema.CollectionName)
columnOpt.TWithStart(common.DefaultNb)
for _, fieldType := range hp.GetAllScalarFieldType() {
if fieldType == entity.FieldTypeArray {
columnOpt.TWithElementType(entity.FieldTypeInt64).TWithMaxCapacity(2)
}
_column := hp.GenColumnData(common.DefaultNb, fieldType, *columnOpt)
insertOptDynamic.WithColumns(_column)
}
insertOptDynamic.WithColumns(hp.GenColumnData(common.DefaultNb, entity.FieldTypeFloatVector, *columnOpt))
insertRes2, errInsert2 := mc.Insert(ctx, insertOptDynamic)
common.CheckErr(t, errInsert2, true)
require.Equal(t, common.DefaultNb, int(insertRes2.InsertCount))
// index
it, _ := mc.CreateIndex(ctx, clientv2.NewCreateIndexOption(schema.CollectionName, common.DefaultFloatVecFieldName, index.NewSCANNIndex(entity.COSINE, 32)))
err := it.Await(ctx)
common.CheckErr(t, err, true)
// load
lt, _ := mc.LoadCollection(ctx, clientv2.NewLoadCollectionOption(schema.CollectionName))
err = lt.Await(ctx)
common.CheckErr(t, err, true)
// query
res, _ := mc.Query(ctx, clientv2.NewQueryOption(schema.CollectionName).WithFilter("int64 == 3000").WithOutputFields([]string{"*"}))
common.CheckOutputFields(t, []string{common.DefaultFloatVecFieldName, common.DefaultInt64FieldName, common.DefaultDynamicFieldName}, res.Fields)
for _, c := range res.Fields {
log.Debug("data", zap.Any("data", c.FieldData()))
}
}
// test insert array column with empty data
func TestInsertEmptyArray(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64VecArray)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim).TWithMaxCapacity(0)
insertOpt := clientv2.NewColumnBasedInsertOption(schema.CollectionName)
for _, field := range schema.Fields {
if field.DataType == entity.FieldTypeArray {
columnOpt.TWithElementType(field.ElementType)
}
_column := hp.GenColumnData(common.DefaultNb, field.DataType, *columnOpt)
insertOpt.WithColumns(_column)
}
_, err := mc.Insert(ctx, insertOpt)
common.CheckErr(t, err, true)
}
func TestInsertArrayDataTypeNotMatch(t *testing.T) {
t.Parallel()
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
// share field and data
int64Field := entity.NewField().WithName(common.DefaultInt64FieldName).WithDataType(entity.FieldTypeInt64).WithIsPrimaryKey(true)
vecField := entity.NewField().WithName(common.DefaultFloatVecFieldName).WithDataType(entity.FieldTypeFloatVector).WithDim(common.DefaultDim)
int64Column := hp.GenColumnData(100, entity.FieldTypeInt64, *hp.TNewDataOption())
vecColumn := hp.GenColumnData(100, entity.FieldTypeFloatVector, *hp.TNewDataOption().TWithDim(128))
for _, eleType := range hp.GetAllArrayElementType() {
collName := common.GenRandomString(prefix, 6)
arrayField := entity.NewField().WithName("array").WithDataType(entity.FieldTypeArray).WithElementType(eleType).WithMaxCapacity(100).WithMaxLength(100)
// create collection
schema := entity.NewSchema().WithName(collName).WithField(int64Field).WithField(vecField).WithField(arrayField)
err := mc.CreateCollection(ctx, clientv2.NewCreateCollectionOption(collName, schema))
common.CheckErr(t, err, true)
// prepare data
columnType := entity.FieldTypeInt64
if eleType == entity.FieldTypeInt64 {
columnType = entity.FieldTypeBool
}
arrayColumn := hp.GenColumnData(100, entity.FieldTypeArray, *hp.TNewDataOption().TWithElementType(columnType).TWithFieldName("array"))
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(collName, int64Column, vecColumn, arrayColumn))
common.CheckErr(t, err, false, "insert data does not match")
}
}
func TestInsertArrayDataCapacityExceed(t *testing.T) {
t.Parallel()
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
// share field and data
int64Field := entity.NewField().WithName(common.DefaultInt64FieldName).WithDataType(entity.FieldTypeInt64).WithIsPrimaryKey(true)
vecField := entity.NewField().WithName(common.DefaultFloatVecFieldName).WithDataType(entity.FieldTypeFloatVector).WithDim(common.DefaultDim)
int64Column := hp.GenColumnData(100, entity.FieldTypeInt64, *hp.TNewDataOption())
vecColumn := hp.GenColumnData(100, entity.FieldTypeFloatVector, *hp.TNewDataOption().TWithDim(128))
for _, eleType := range hp.GetAllArrayElementType() {
collName := common.GenRandomString(prefix, 6)
arrayField := entity.NewField().WithName("array").WithDataType(entity.FieldTypeArray).WithElementType(eleType).WithMaxCapacity(common.TestCapacity).WithMaxLength(100)
// create collection
schema := entity.NewSchema().WithName(collName).WithField(int64Field).WithField(vecField).WithField(arrayField)
err := mc.CreateCollection(ctx, clientv2.NewCreateCollectionOption(collName, schema))
common.CheckErr(t, err, true)
// insert array data capacity > field.MaxCapacity
arrayColumn := hp.GenColumnData(100, entity.FieldTypeArray, *hp.TNewDataOption().TWithElementType(eleType).TWithFieldName("array").TWithMaxCapacity(common.TestCapacity * 2))
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(collName, int64Column, vecColumn, arrayColumn))
common.CheckErr(t, err, false, "array length exceeds max capacity")
}
}
// test insert not exist collection or not exist partition
func TestInsertNotExist(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
// insert data into not exist collection
intColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeInt64, *hp.TNewDataOption())
_, err := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption("notExist", intColumn))
common.CheckErr(t, err, false, "can't find collection")
// insert data into not exist partition
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
vecColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeFloatVector, *hp.TNewDataOption().TWithDim(common.DefaultDim))
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, intColumn, vecColumn).WithPartition("aaa"))
common.CheckErr(t, err, false, "partition not found")
}
// test insert data columns len, order mismatch fields
func TestInsertColumnsMismatchFields(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
// column data
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
intColumn := hp.GenColumnData(100, entity.FieldTypeInt64, *columnOpt)
floatColumn := hp.GenColumnData(100, entity.FieldTypeFloat, *columnOpt)
vecColumn := hp.GenColumnData(100, entity.FieldTypeFloatVector, *columnOpt)
// insert
collName := schema.CollectionName
// len(column) < len(fields)
_, errInsert := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(collName, intColumn))
common.CheckErr(t, errInsert, false, "not passed")
// len(column) > len(fields)
_, errInsert2 := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(collName, intColumn, vecColumn, vecColumn))
common.CheckErr(t, errInsert2, false, "duplicated column")
//
_, errInsert3 := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(collName, intColumn, floatColumn, vecColumn))
common.CheckErr(t, errInsert3, false, "does not exist in collection")
// order(column) != order(fields)
_, errInsert4 := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(collName, vecColumn, intColumn))
common.CheckErr(t, errInsert4, true)
}
// test insert with columns which has different len
func TestInsertColumnsDifferentLen(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
// column data
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
intColumn := hp.GenColumnData(100, entity.FieldTypeInt64, *columnOpt)
vecColumn := hp.GenColumnData(200, entity.FieldTypeFloatVector, *columnOpt)
// len(column) < len(fields)
_, errInsert := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, intColumn, vecColumn))
common.CheckErr(t, errInsert, false, "column size not match")
}
// test insert invalid column: empty column or dim not match
func TestInsertInvalidColumn(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
// create collection
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
// insert with empty column data
pkColumn := column.NewColumnInt64(common.DefaultInt64FieldName, []int64{})
vecColumn := hp.GenColumnData(100, entity.FieldTypeFloatVector, *hp.TNewDataOption())
_, err := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, pkColumn, vecColumn))
common.CheckErr(t, err, false, "need long int array][actual=got nil]")
// insert with empty vector data
vecColumn2 := column.NewColumnFloatVector(common.DefaultFloatVecFieldName, common.DefaultDim, [][]float32{})
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, pkColumn, vecColumn2))
common.CheckErr(t, err, false, "num_rows should be greater than 0")
// insert with vector data dim not match
vecColumnDim := column.NewColumnFloatVector(common.DefaultFloatVecFieldName, common.DefaultDim-8, [][]float32{})
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, pkColumn, vecColumnDim))
common.CheckErr(t, err, false, "vector dim 120 not match collection definition")
}
// test insert invalid column: empty column or dim not match
func TestInsertColumnVarcharExceedLen(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
// create collection
varcharMaxLen := 10
cp := hp.NewCreateCollectionParams(hp.VarcharBinary)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithMaxLen(int64(varcharMaxLen)), hp.TNewSchemaOption())
// insert with empty column data
varcharValues := make([]string, 0, 100)
for i := 0; i < 100; i++ {
_value := common.GenRandomString("", varcharMaxLen+1)
varcharValues = append(varcharValues, _value)
}
pkColumn := column.NewColumnVarChar(common.DefaultVarcharFieldName, varcharValues)
vecColumn := hp.GenColumnData(100, entity.FieldTypeBinaryVector, *hp.TNewDataOption())
_, err := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, pkColumn, vecColumn))
common.CheckErr(t, err, false, "the length (12) of 0th VarChar varchar exceeds max length (0)%!(EXTRA int64=10)")
}
// test insert sparse vector
func TestInsertSparseData(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64VarcharSparseVec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
// insert sparse data
columnOpt := hp.TNewDataOption()
pkColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeInt64, *columnOpt)
columns := []column.Column{
pkColumn,
hp.GenColumnData(common.DefaultNb, entity.FieldTypeVarChar, *columnOpt),
hp.GenColumnData(common.DefaultNb, entity.FieldTypeSparseVector, *columnOpt.TWithSparseMaxLen(common.DefaultDim)),
}
inRes, err := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, columns...))
common.CheckErr(t, err, true)
common.CheckInsertResult(t, pkColumn, inRes)
}
func TestInsertSparseDataMaxDim(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64VarcharSparseVec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
// insert sparse data
columnOpt := hp.TNewDataOption()
pkColumn := hp.GenColumnData(1, entity.FieldTypeInt64, *columnOpt)
varcharColumn := hp.GenColumnData(1, entity.FieldTypeVarChar, *columnOpt)
// sparse vector with max dim
positions := []uint32{0, math.MaxUint32 - 10, math.MaxUint32 - 1}
values := []float32{0.453, 5.0776, 100.098}
sparseVec, err := entity.NewSliceSparseEmbedding(positions, values)
common.CheckErr(t, err, true)
sparseColumn := column.NewColumnSparseVectors(common.DefaultSparseVecFieldName, []entity.SparseEmbedding{sparseVec})
inRes, err := mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, pkColumn, varcharColumn, sparseColumn))
common.CheckErr(t, err, true)
common.CheckInsertResult(t, pkColumn, inRes)
}
func TestInsertSparseInvalidVector(t *testing.T) {
// invalid sparse vector: len(positions) != len(values)
positions := []uint32{1, 10}
values := []float32{0.4, 5.0, 0.34}
_, err := entity.NewSliceSparseEmbedding(positions, values)
common.CheckErr(t, err, false, "invalid sparse embedding input, positions shall have same number of values")
// invalid sparse vector: positions >= uint32
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64VarcharSparseVec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
// insert data column
columnOpt := hp.TNewDataOption()
data := []column.Column{
hp.GenColumnData(1, entity.FieldTypeInt64, *columnOpt),
hp.GenColumnData(1, entity.FieldTypeVarChar, *columnOpt),
}
// invalid sparse vector: position > (maximum of uint32 - 1)
positions = []uint32{math.MaxUint32}
values = []float32{0.4}
sparseVec, err := entity.NewSliceSparseEmbedding(positions, values)
common.CheckErr(t, err, true)
data1 := append(data, column.NewColumnSparseVectors(common.DefaultSparseVecFieldName, []entity.SparseEmbedding{sparseVec}))
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, data1...))
common.CheckErr(t, err, false, "invalid index in sparse float vector: must be less than 2^32-1")
// invalid sparse vector: empty position and values
positions = []uint32{}
values = []float32{}
sparseVec, err = entity.NewSliceSparseEmbedding(positions, values)
common.CheckErr(t, err, true)
data2 := append(data, column.NewColumnSparseVectors(common.DefaultSparseVecFieldName, []entity.SparseEmbedding{sparseVec}))
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, data2...))
common.CheckErr(t, err, false, "empty sparse float vector row")
}
func TestInsertSparseVectorSamePosition(t *testing.T) {
// invalid sparse vector: positions >= uint32
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64VarcharSparseVec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption())
//insert data column
columnOpt := hp.TNewDataOption()
data := []column.Column{
hp.GenColumnData(1, entity.FieldTypeInt64, *columnOpt),
hp.GenColumnData(1, entity.FieldTypeVarChar, *columnOpt),
}
//invalid sparse vector: position > (maximum of uint32 - 1)
sparseVec, err := entity.NewSliceSparseEmbedding([]uint32{2, 10, 2}, []float32{0.4, 0.5, 0.6})
common.CheckErr(t, err, true)
data = append(data, column.NewColumnSparseVectors(common.DefaultSparseVecFieldName, []entity.SparseEmbedding{sparseVec}))
_, err = mc.Insert(ctx, clientv2.NewColumnBasedInsertOption(schema.CollectionName, data...))
common.CheckErr(t, err, false, "unsorted or same indices in sparse float vector")
}
/******************
Test insert rows
******************/
// test insert rows enable or disable dynamic field
func TestInsertDefaultRows(t *testing.T) {
t.Parallel()
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, autoId := range []bool{false, true} {
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(autoId), hp.TNewSchemaOption())
log.Info("fields", zap.Any("FieldNames", schema.Fields))
// insert rows
rows := hp.GenInt64VecRows(common.DefaultNb, false, autoId, *hp.TNewDataOption())
log.Info("rows data", zap.Any("rows[8]", rows[8]))
ids, err := mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rows...))
common.CheckErr(t, err, true)
if !autoId {
int64Values := make([]int64, 0, common.DefaultNb)
for i := 0; i < common.DefaultNb; i++ {
int64Values = append(int64Values, int64(i+1))
}
common.CheckInsertResult(t, column.NewColumnInt64(common.DefaultInt64FieldName, int64Values), ids)
}
require.Equal(t, ids.InsertCount, int64(common.DefaultNb))
// flush and check row count
flushTask, errFlush := mc.Flush(ctx, clientv2.NewFlushOption(schema.CollectionName))
common.CheckErr(t, errFlush, true)
errFlush = flushTask.Await(ctx)
common.CheckErr(t, errFlush, true)
}
}
// test insert rows enable or disable dynamic field
func TestInsertAllFieldsRows(t *testing.T) {
t.Skip("https://github.com/milvus-io/milvus/issues/33459")
t.Parallel()
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, enableDynamicField := range [2]bool{true, false} {
cp := hp.NewCreateCollectionParams(hp.AllFields)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption().TWithEnableDynamicField(enableDynamicField))
log.Info("fields", zap.Any("FieldNames", schema.Fields))
// insert rows
rows := hp.GenAllFieldsRows(common.DefaultNb, false, *hp.TNewDataOption())
log.Debug("", zap.Any("row[0]", rows[0]))
log.Debug("", zap.Any("row", rows[1]))
ids, err := mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rows...))
common.CheckErr(t, err, true)
int64Values := make([]int64, 0, common.DefaultNb)
for i := 0; i < common.DefaultNb; i++ {
int64Values = append(int64Values, int64(i))
}
common.CheckInsertResult(t, column.NewColumnInt64(common.DefaultInt64FieldName, int64Values), ids)
// flush and check row count
flushTask, errFlush := mc.Flush(ctx, clientv2.NewFlushOption(schema.CollectionName))
common.CheckErr(t, errFlush, true)
errFlush = flushTask.Await(ctx)
common.CheckErr(t, errFlush, true)
}
}
// test insert rows enable or disable dynamic field
func TestInsertVarcharRows(t *testing.T) {
t.Skip("https://github.com/milvus-io/milvus/issues/33457")
t.Parallel()
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, autoId := range []bool{true} {
cp := hp.NewCreateCollectionParams(hp.Int64VarcharSparseVec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption(), hp.TNewSchemaOption().TWithAutoID(autoId))
log.Info("fields", zap.Any("FieldNames", schema.Fields))
// insert rows
rows := hp.GenInt64VarcharSparseRows(common.DefaultNb, false, autoId, *hp.TNewDataOption().TWithSparseMaxLen(1000))
ids, err := mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rows...))
common.CheckErr(t, err, true)
int64Values := make([]int64, 0, common.DefaultNb)
for i := 0; i < common.DefaultNb; i++ {
int64Values = append(int64Values, int64(i))
}
common.CheckInsertResult(t, column.NewColumnInt64(common.DefaultInt64FieldName, int64Values), ids)
// flush and check row count
flushTask, errFlush := mc.Flush(ctx, clientv2.NewFlushOption(schema.CollectionName))
common.CheckErr(t, errFlush, true)
errFlush = flushTask.Await(ctx)
common.CheckErr(t, errFlush, true)
}
}
func TestInsertSparseRows(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
int64Field := entity.NewField().WithName(common.DefaultInt64FieldName).WithDataType(entity.FieldTypeInt64).WithIsPrimaryKey(true)
sparseField := entity.NewField().WithName(common.DefaultSparseVecFieldName).WithDataType(entity.FieldTypeSparseVector)
collName := common.GenRandomString("insert", 6)
schema := entity.NewSchema().WithName(collName).WithField(int64Field).WithField(sparseField)
err := mc.CreateCollection(ctx, clientv2.NewCreateCollectionOption(collName, schema))
common.CheckErr(t, err, true)
// prepare rows
rows := make([]interface{}, 0, common.DefaultNb)
// BaseRow generate insert rows
for i := 0; i < common.DefaultNb; i++ {
vec := common.GenSparseVector(500)
//log.Info("", zap.Any("SparseVec", vec))
baseRow := hp.BaseRow{
Int64: int64(i + 1),
SparseVec: vec,
}
rows = append(rows, &baseRow)
}
ids, err := mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rows...))
common.CheckErr(t, err, true)
int64Values := make([]int64, 0, common.DefaultNb)
for i := 0; i < common.DefaultNb; i++ {
int64Values = append(int64Values, int64(i+1))
}
common.CheckInsertResult(t, column.NewColumnInt64(common.DefaultInt64FieldName, int64Values), ids)
// flush and check row count
flushTask, errFlush := mc.Flush(ctx, clientv2.NewFlushOption(schema.CollectionName))
common.CheckErr(t, errFlush, true)
errFlush = flushTask.Await(ctx)
common.CheckErr(t, errFlush, true)
}
// test field name: pk, row json name: int64
func TestInsertRowFieldNameNotMatch(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
// create collection with pk name: pk
vecField := entity.NewField().WithName(common.DefaultFloatVecFieldName).WithDataType(entity.FieldTypeFloatVector).WithDim(common.DefaultDim)
int64Field := entity.NewField().WithName("pk").WithDataType(entity.FieldTypeInt64).WithIsPrimaryKey(true)
collName := common.GenRandomString(prefix, 6)
schema := entity.NewSchema().WithName(collName).WithField(int64Field).WithField(vecField)
err := mc.CreateCollection(ctx, clientv2.NewCreateCollectionOption(collName, schema))
common.CheckErr(t, err, true)
// insert rows, with json key name: int64
rows := hp.GenInt64VecRows(10, false, false, *hp.TNewDataOption())
_, errInsert := mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rows...))
common.CheckErr(t, errInsert, false, "row 0 does not has field pk")
}
// test field name: pk, row json name: int64
func TestInsertRowMismatchFields(t *testing.T) {
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithDim(8), hp.TNewSchemaOption())
//rows fields < schema fields
rowsLess := make([]interface{}, 0, 10)
for i := 1; i < 11; i++ {
row := hp.BaseRow{
Int64: int64(i),
}
rowsLess = append(rowsLess, row)
}
_, errInsert := mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rowsLess...))
common.CheckErr(t, errInsert, false, "[expected=need float vector][actual=got nil]")
/*
// extra fields
t.Log("https://github.com/milvus-io/milvus/issues/33487")
rowsMore := make([]interface{}, 0, 10)
for i := 1; i< 11; i++ {
row := hp.BaseRow{
Int64: int64(i),
Int32: int32(i),
FloatVec: common.GenFloatVector(8),
}
rowsMore = append(rowsMore, row)
}
log.Debug("Row data", zap.Any("row[0]", rowsMore[0]))
_, errInsert = mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rowsMore...))
common.CheckErr(t, errInsert, false, "")
*/
// rows order != schema order
rowsOrder := make([]interface{}, 0, 10)
for i := 1; i < 11; i++ {
row := hp.BaseRow{
FloatVec: common.GenFloatVector(8),
Int64: int64(i),
}
rowsOrder = append(rowsOrder, row)
}
log.Debug("Row data", zap.Any("row[0]", rowsOrder[0]))
_, errInsert = mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rowsOrder...))
common.CheckErr(t, errInsert, true)
}
func TestInsertAutoIDInvalidRow(t *testing.T) {
t.Skip("https://github.com/milvus-io/milvus/issues/33460")
t.Parallel()
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
mc := createDefaultMilvusClient(ctx, t)
for _, autoId := range []bool{false, true} {
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(autoId), hp.TNewSchemaOption())
// insert rows: autoId true -> o pk data; autoID false -> has pk data
rows := hp.GenInt64VecRows(10, false, !autoId, *hp.TNewDataOption())
log.Info("rows data", zap.Any("rows[8]", rows[0]))
_, err := mc.Insert(ctx, clientv2.NewRowBasedInsertOption(schema.CollectionName, rows...))
common.CheckErr(t, err, false, "missing pk data")
}
}

View File

@ -17,7 +17,7 @@ import (
)
var addr = flag.String("addr", "localhost:19530", "server host and port")
var defaultCfg = clientv2.ClientConfig{Address: *addr}
var defaultCfg clientv2.ClientConfig
// teardown
func teardown() {
@ -65,6 +65,7 @@ func createDefaultMilvusClient(ctx context.Context, t *testing.T) *base.MilvusCl
func TestMain(m *testing.M) {
flag.Parse()
log.Info("Parser Milvus address", zap.String("address", *addr))
defaultCfg = clientv2.ClientConfig{Address: *addr}
code := m.Run()
if code != 0 {
log.Error("Tests failed and exited", zap.Int("code", code))

View File

@ -21,7 +21,7 @@ func TestSearch(t *testing.T) {
log.Info("schema", zap.Any("schema", schema))
insertParams := hp.NewInsertParams(schema, common.DefaultNb)
hp.CollPrepare.InsertData(ctx, t, mc, insertParams, hp.TNewColumnOption())
hp.CollPrepare.InsertData(ctx, t, mc, insertParams, hp.TNewDataOption())
// flush -> index -> load
hp.CollPrepare.FlushData(ctx, t, mc, schema.CollectionName)