mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-11-30 10:59:32 +08:00
1. Fix dim mismatch with multi-vector fields and JSON import 2. Enhance: do not display file ID in GetImportResponse. issue: https://github.com/milvus-io/milvus/issues/33681, https://github.com/milvus-io/milvus/issues/33682 pr: https://github.com/milvus-io/milvus/pull/33723 --------- Signed-off-by: bigsheeper <yihao.dai@zilliz.com>
This commit is contained in:
parent
b71a404776
commit
396f8608dd
@ -360,9 +360,9 @@ func (c *importChecker) checkGC(job ImportJob) {
|
||||
job.GetState() != internalpb.ImportJobState_Failed {
|
||||
return
|
||||
}
|
||||
GCRetention := Params.DataCoordCfg.ImportTaskRetention.GetAsDuration(time.Second)
|
||||
cleanupTime := tsoutil.PhysicalTime(job.GetCleanupTs())
|
||||
if time.Since(cleanupTime) >= GCRetention {
|
||||
if time.Now().After(cleanupTime) {
|
||||
GCRetention := Params.DataCoordCfg.ImportTaskRetention.GetAsDuration(time.Second)
|
||||
log.Info("job has reached the GC retention", zap.Int64("jobID", job.GetJobID()),
|
||||
zap.Time("cleanupTime", cleanupTime), zap.Duration("GCRetention", GCRetention))
|
||||
tasks := c.imeta.GetTaskBy(WithJob(job.GetJobID()))
|
||||
|
@ -20,11 +20,13 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||
"github.com/milvus-io/milvus/internal/proto/datapb"
|
||||
"github.com/milvus-io/milvus/internal/proto/internalpb"
|
||||
"github.com/milvus-io/milvus/pkg/log"
|
||||
"github.com/milvus-io/milvus/pkg/util/tsoutil"
|
||||
)
|
||||
|
||||
@ -46,8 +48,11 @@ func UpdateJobState(state internalpb.ImportJobState) UpdateJobAction {
|
||||
job.(*importJob).ImportJob.RequestedDiskSize = 0
|
||||
// set cleanup ts
|
||||
dur := Params.DataCoordCfg.ImportTaskRetention.GetAsDuration(time.Second)
|
||||
cleanupTs := tsoutil.ComposeTSByTime(time.Now().Add(dur), 0)
|
||||
cleanupTime := time.Now().Add(dur)
|
||||
cleanupTs := tsoutil.ComposeTSByTime(cleanupTime, 0)
|
||||
job.(*importJob).ImportJob.CleanupTs = cleanupTs
|
||||
log.Info("set import job cleanup ts", zap.Int64("jobID", job.GetJobID()),
|
||||
zap.Time("cleanupTime", cleanupTime), zap.Uint64("cleanupTs", cleanupTs))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -428,7 +428,7 @@ func GetTaskProgresses(jobID int64, imeta ImportMeta, meta *meta) []*internalpb.
|
||||
}
|
||||
for _, fileStat := range task.GetFileStats() {
|
||||
progresses = append(progresses, &internalpb.ImportTaskProgress{
|
||||
FileName: fileStat.GetImportFile().String(),
|
||||
FileName: fmt.Sprintf("%v", fileStat.GetImportFile().GetPaths()),
|
||||
FileSize: fileStat.GetFileSize(),
|
||||
Reason: task.GetReason(),
|
||||
Progress: progress,
|
||||
|
@ -34,7 +34,7 @@ type RowParser interface {
|
||||
}
|
||||
|
||||
type rowParser struct {
|
||||
dim int
|
||||
id2Dim map[int64]int
|
||||
id2Field map[int64]*schemapb.FieldSchema
|
||||
name2FieldID map[string]int64
|
||||
pkField *schemapb.FieldSchema
|
||||
@ -45,17 +45,18 @@ func NewRowParser(schema *schemapb.CollectionSchema) (RowParser, error) {
|
||||
id2Field := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) int64 {
|
||||
return field.GetFieldID()
|
||||
})
|
||||
vecField, err := typeutil.GetVectorFieldSchema(schema)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dim := int64(0)
|
||||
if typeutil.IsVectorType(vecField.DataType) && !typeutil.IsSparseFloatVectorType(vecField.DataType) {
|
||||
dim, err = typeutil.GetDim(vecField)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
id2Dim := make(map[int64]int)
|
||||
for id, field := range id2Field {
|
||||
if typeutil.IsVectorType(field.GetDataType()) && !typeutil.IsSparseFloatVectorType(field.GetDataType()) {
|
||||
dim, err := typeutil.GetDim(field)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
id2Dim[id] = int(dim)
|
||||
}
|
||||
}
|
||||
|
||||
pkField, err := typeutil.GetPrimaryFieldSchema(schema)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -75,7 +76,7 @@ func NewRowParser(schema *schemapb.CollectionSchema) (RowParser, error) {
|
||||
delete(name2FieldID, dynamicField.GetName())
|
||||
}
|
||||
return &rowParser{
|
||||
dim: int(dim),
|
||||
id2Dim: id2Dim,
|
||||
id2Field: id2Field,
|
||||
name2FieldID: name2FieldID,
|
||||
pkField: pkField,
|
||||
@ -92,7 +93,7 @@ func (r *rowParser) wrapTypeError(v any, fieldID int64) error {
|
||||
func (r *rowParser) wrapDimError(actualDim int, fieldID int64) error {
|
||||
field := r.id2Field[fieldID]
|
||||
return merr.WrapErrImportFailed(fmt.Sprintf("expected dim '%d' for field '%s' with type '%s', got dim '%d'",
|
||||
r.dim, field.GetName(), field.GetDataType().String(), actualDim))
|
||||
r.id2Dim[fieldID], field.GetName(), field.GetDataType().String(), actualDim))
|
||||
}
|
||||
|
||||
func (r *rowParser) wrapArrayValueTypeError(v any, eleType schemapb.DataType) error {
|
||||
@ -268,7 +269,7 @@ func (r *rowParser) parseEntity(fieldID int64, obj any) (any, error) {
|
||||
if !ok {
|
||||
return nil, r.wrapTypeError(obj, fieldID)
|
||||
}
|
||||
if len(arr) != r.dim/8 {
|
||||
if len(arr) != r.id2Dim[fieldID]/8 {
|
||||
return nil, r.wrapDimError(len(arr)*8, fieldID)
|
||||
}
|
||||
vec := make([]byte, len(arr))
|
||||
@ -289,7 +290,7 @@ func (r *rowParser) parseEntity(fieldID int64, obj any) (any, error) {
|
||||
if !ok {
|
||||
return nil, r.wrapTypeError(obj, fieldID)
|
||||
}
|
||||
if len(arr) != r.dim {
|
||||
if len(arr) != r.id2Dim[fieldID] {
|
||||
return nil, r.wrapDimError(len(arr), fieldID)
|
||||
}
|
||||
vec := make([]float32, len(arr))
|
||||
@ -311,7 +312,7 @@ func (r *rowParser) parseEntity(fieldID int64, obj any) (any, error) {
|
||||
if !ok {
|
||||
return nil, r.wrapTypeError(obj, fieldID)
|
||||
}
|
||||
if len(arr) != r.dim {
|
||||
if len(arr) != r.id2Dim[fieldID] {
|
||||
return nil, r.wrapDimError(len(arr), fieldID)
|
||||
}
|
||||
vec := make([]byte, len(arr)*2)
|
||||
@ -333,7 +334,7 @@ func (r *rowParser) parseEntity(fieldID int64, obj any) (any, error) {
|
||||
if !ok {
|
||||
return nil, r.wrapTypeError(obj, fieldID)
|
||||
}
|
||||
if len(arr) != r.dim {
|
||||
if len(arr) != r.id2Dim[fieldID] {
|
||||
return nil, r.wrapDimError(len(arr), fieldID)
|
||||
}
|
||||
vec := make([]byte, len(arr)*2)
|
||||
|
228
tests/integration/import/multi_vector_test.go
Normal file
228
tests/integration/import/multi_vector_test.go
Normal file
@ -0,0 +1,228 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package importv2
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/samber/lo"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/milvuspb"
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||
"github.com/milvus-io/milvus/internal/proto/internalpb"
|
||||
"github.com/milvus-io/milvus/internal/util/importutilv2"
|
||||
"github.com/milvus-io/milvus/pkg/common"
|
||||
"github.com/milvus-io/milvus/pkg/log"
|
||||
"github.com/milvus-io/milvus/pkg/util/funcutil"
|
||||
"github.com/milvus-io/milvus/pkg/util/merr"
|
||||
"github.com/milvus-io/milvus/pkg/util/metric"
|
||||
"github.com/milvus-io/milvus/tests/integration"
|
||||
)
|
||||
|
||||
func (s *BulkInsertSuite) testMultipleVectorFields() {
|
||||
const (
|
||||
rowCount = 10000
|
||||
dim1 = 64
|
||||
dim2 = 32
|
||||
)
|
||||
|
||||
c := s.Cluster
|
||||
ctx, cancel := context.WithTimeout(c.GetContext(), 600*time.Second)
|
||||
defer cancel()
|
||||
|
||||
collectionName := "TestBulkInsert_MultipleVectorFields_" + funcutil.GenRandomStr()
|
||||
|
||||
schema := integration.ConstructSchema(collectionName, 0, true, &schemapb.FieldSchema{
|
||||
FieldID: 100,
|
||||
Name: integration.Int64Field,
|
||||
IsPrimaryKey: true,
|
||||
DataType: schemapb.DataType_Int64,
|
||||
AutoID: true,
|
||||
}, &schemapb.FieldSchema{
|
||||
FieldID: 101,
|
||||
Name: integration.FloatVecField,
|
||||
DataType: schemapb.DataType_FloatVector,
|
||||
TypeParams: []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: common.DimKey,
|
||||
Value: fmt.Sprintf("%d", dim1),
|
||||
},
|
||||
},
|
||||
}, &schemapb.FieldSchema{
|
||||
FieldID: 102,
|
||||
Name: integration.BFloat16VecField,
|
||||
DataType: schemapb.DataType_BFloat16Vector,
|
||||
TypeParams: []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: common.DimKey,
|
||||
Value: fmt.Sprintf("%d", dim2),
|
||||
},
|
||||
},
|
||||
})
|
||||
schema.EnableDynamicField = true
|
||||
marshaledSchema, err := proto.Marshal(schema)
|
||||
s.NoError(err)
|
||||
|
||||
createCollectionStatus, err := c.Proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{
|
||||
DbName: "",
|
||||
CollectionName: collectionName,
|
||||
Schema: marshaledSchema,
|
||||
ShardsNum: common.DefaultShardsNum,
|
||||
})
|
||||
s.NoError(err)
|
||||
s.Equal(int32(0), createCollectionStatus.GetCode())
|
||||
|
||||
// create index 1
|
||||
createIndexStatus, err := c.Proxy.CreateIndex(ctx, &milvuspb.CreateIndexRequest{
|
||||
CollectionName: collectionName,
|
||||
FieldName: integration.FloatVecField,
|
||||
IndexName: "_default_1",
|
||||
ExtraParams: integration.ConstructIndexParam(dim1, integration.IndexFaissIvfFlat, metric.L2),
|
||||
})
|
||||
s.NoError(err)
|
||||
s.Equal(int32(0), createIndexStatus.GetCode())
|
||||
|
||||
s.WaitForIndexBuilt(ctx, collectionName, integration.FloatVecField)
|
||||
|
||||
// create index 2
|
||||
createIndexStatus, err = c.Proxy.CreateIndex(ctx, &milvuspb.CreateIndexRequest{
|
||||
CollectionName: collectionName,
|
||||
FieldName: integration.BFloat16VecField,
|
||||
IndexName: "_default_2",
|
||||
ExtraParams: integration.ConstructIndexParam(dim2, integration.IndexFaissIvfFlat, metric.L2),
|
||||
})
|
||||
s.NoError(err)
|
||||
s.Equal(int32(0), createIndexStatus.GetCode())
|
||||
|
||||
s.WaitForIndexBuilt(ctx, collectionName, integration.BFloat16VecField)
|
||||
|
||||
// import
|
||||
var files []*internalpb.ImportFile
|
||||
err = os.MkdirAll(c.ChunkManager.RootPath(), os.ModePerm)
|
||||
s.NoError(err)
|
||||
|
||||
switch s.fileType {
|
||||
case importutilv2.Numpy:
|
||||
importFile, err := GenerateNumpyFiles(c.ChunkManager, schema, rowCount)
|
||||
s.NoError(err)
|
||||
importFile.Paths = lo.Filter(importFile.Paths, func(path string, _ int) bool {
|
||||
return !strings.Contains(path, "$meta")
|
||||
})
|
||||
files = []*internalpb.ImportFile{importFile}
|
||||
case importutilv2.JSON:
|
||||
rowBasedFile := c.ChunkManager.RootPath() + "/" + "test.json"
|
||||
GenerateJSONFile(s.T(), rowBasedFile, schema, rowCount)
|
||||
defer os.Remove(rowBasedFile)
|
||||
files = []*internalpb.ImportFile{
|
||||
{
|
||||
Paths: []string{
|
||||
rowBasedFile,
|
||||
},
|
||||
},
|
||||
}
|
||||
case importutilv2.Parquet:
|
||||
filePath := fmt.Sprintf("/tmp/test_%d.parquet", rand.Int())
|
||||
err = GenerateParquetFile(filePath, schema, rowCount)
|
||||
s.NoError(err)
|
||||
defer os.Remove(filePath)
|
||||
files = []*internalpb.ImportFile{
|
||||
{
|
||||
Paths: []string{
|
||||
filePath,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
importResp, err := c.Proxy.ImportV2(ctx, &internalpb.ImportRequest{
|
||||
CollectionName: collectionName,
|
||||
Files: files,
|
||||
})
|
||||
s.NoError(err)
|
||||
s.Equal(int32(0), importResp.GetStatus().GetCode())
|
||||
log.Info("Import result", zap.Any("importResp", importResp))
|
||||
|
||||
jobID := importResp.GetJobID()
|
||||
err = WaitForImportDone(ctx, c, jobID)
|
||||
s.NoError(err)
|
||||
|
||||
// load
|
||||
loadStatus, err := c.Proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{
|
||||
CollectionName: collectionName,
|
||||
})
|
||||
s.NoError(err)
|
||||
s.Equal(commonpb.ErrorCode_Success, loadStatus.GetErrorCode())
|
||||
s.WaitForLoad(ctx, collectionName)
|
||||
|
||||
segments, err := c.MetaWatcher.ShowSegments()
|
||||
s.NoError(err)
|
||||
s.NotEmpty(segments)
|
||||
log.Info("Show segments", zap.Any("segments", segments))
|
||||
|
||||
// load refresh
|
||||
loadStatus, err = c.Proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{
|
||||
CollectionName: collectionName,
|
||||
Refresh: true,
|
||||
})
|
||||
s.NoError(err)
|
||||
s.Equal(commonpb.ErrorCode_Success, loadStatus.GetErrorCode())
|
||||
s.WaitForLoadRefresh(ctx, "", collectionName)
|
||||
|
||||
// search vec 1
|
||||
expr := fmt.Sprintf("%s > 0", integration.Int64Field)
|
||||
nq := 10
|
||||
topk := 10
|
||||
roundDecimal := -1
|
||||
|
||||
params := integration.GetSearchParams(integration.IndexFaissIvfFlat, metric.L2)
|
||||
searchReq := integration.ConstructSearchRequest("", collectionName, expr,
|
||||
integration.FloatVecField, schemapb.DataType_FloatVector, nil, metric.L2, params, nq, dim1, topk, roundDecimal)
|
||||
|
||||
searchResult, err := c.Proxy.Search(ctx, searchReq)
|
||||
|
||||
err = merr.CheckRPCCall(searchResult, err)
|
||||
s.NoError(err)
|
||||
s.Equal(nq*topk, len(searchResult.GetResults().GetScores()))
|
||||
|
||||
// search vec 2
|
||||
searchReq = integration.ConstructSearchRequest("", collectionName, expr,
|
||||
integration.BFloat16VecField, schemapb.DataType_BFloat16Vector, nil, metric.L2, params, nq, dim2, topk, roundDecimal)
|
||||
|
||||
searchResult, err = c.Proxy.Search(ctx, searchReq)
|
||||
|
||||
err = merr.CheckRPCCall(searchResult, err)
|
||||
s.NoError(err)
|
||||
// s.Equal(nq*topk, len(searchResult.GetResults().GetScores())) // TODO: fix bf16vector search
|
||||
}
|
||||
|
||||
func (s *BulkInsertSuite) TestMultipleVectorFields_JSON() {
|
||||
s.fileType = importutilv2.JSON
|
||||
s.testMultipleVectorFields()
|
||||
}
|
||||
|
||||
func (s *BulkInsertSuite) TestMultipleVectorFields_Parquet() {
|
||||
s.fileType = importutilv2.Parquet
|
||||
s.testMultipleVectorFields()
|
||||
}
|
Loading…
Reference in New Issue
Block a user