2021-10-15 18:09:00 +08:00
|
|
|
// Licensed to the LF AI & Data foundation under one
|
|
|
|
// or more contributor license agreements. See the NOTICE file
|
|
|
|
// distributed with this work for additional information
|
|
|
|
// regarding copyright ownership. The ASF licenses this file
|
|
|
|
// to you under the Apache License, Version 2.0 (the
|
|
|
|
// "License"); you may not use this file except in compliance
|
2021-04-21 18:41:37 +08:00
|
|
|
// with the License. You may obtain a copy of the License at
|
|
|
|
//
|
2021-10-15 18:09:00 +08:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2021-04-21 18:41:37 +08:00
|
|
|
//
|
2021-10-15 18:09:00 +08:00
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
2021-04-21 18:41:37 +08:00
|
|
|
|
|
|
|
package datanode
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
|
|
|
"encoding/binary"
|
2021-06-15 19:11:55 +08:00
|
|
|
"errors"
|
2022-04-20 14:03:40 +08:00
|
|
|
"fmt"
|
2021-04-21 18:41:37 +08:00
|
|
|
"math"
|
|
|
|
"math/rand"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
2022-09-25 15:56:51 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/util/metautil"
|
|
|
|
|
2021-04-21 18:41:37 +08:00
|
|
|
"go.uber.org/zap"
|
|
|
|
|
2021-04-22 14:45:57 +08:00
|
|
|
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
|
|
|
|
"github.com/milvus-io/milvus/internal/log"
|
2022-03-03 21:57:56 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/mq/msgstream"
|
2021-10-25 20:13:51 +08:00
|
|
|
s "github.com/milvus-io/milvus/internal/storage"
|
2021-04-22 14:45:57 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/types"
|
2022-04-07 22:05:32 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/util/dependency"
|
2022-02-07 22:45:46 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/util/tsoutil"
|
|
|
|
"github.com/milvus-io/milvus/internal/util/typeutil"
|
2021-04-21 18:41:37 +08:00
|
|
|
|
2022-10-16 20:49:27 +08:00
|
|
|
"github.com/milvus-io/milvus-proto/go-api/commonpb"
|
|
|
|
"github.com/milvus-io/milvus-proto/go-api/milvuspb"
|
|
|
|
"github.com/milvus-io/milvus-proto/go-api/schemapb"
|
2021-11-02 18:16:32 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/common"
|
2021-05-25 15:35:37 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/datapb"
|
2021-04-22 14:45:57 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/etcdpb"
|
|
|
|
"github.com/milvus-io/milvus/internal/proto/internalpb"
|
2021-06-22 16:14:09 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
|
2021-12-29 14:35:21 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/util/etcd"
|
2021-04-21 18:41:37 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
const ctxTimeInMillisecond = 5000
|
|
|
|
const debug = false
|
|
|
|
|
2022-09-26 18:06:54 +08:00
|
|
|
// As used in data_sync_service_test.go
|
|
|
|
var segID2SegInfo = map[int64]*datapb.SegmentInfo{
|
|
|
|
1: {
|
|
|
|
ID: 1,
|
|
|
|
CollectionID: 1,
|
|
|
|
PartitionID: 1,
|
|
|
|
InsertChannel: "by-dev-rootcoord-dml-test_v1",
|
|
|
|
},
|
|
|
|
2: {
|
|
|
|
ID: 2,
|
|
|
|
CollectionID: 1,
|
|
|
|
InsertChannel: "by-dev-rootcoord-dml-test_v1",
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
ID: 3,
|
|
|
|
CollectionID: 1,
|
|
|
|
InsertChannel: "by-dev-rootcoord-dml-test_v1",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2021-12-01 10:11:39 +08:00
|
|
|
var emptyFlushAndDropFunc flushAndDropFunc = func(_ []*segmentFlushPack) {}
|
|
|
|
|
2022-03-25 14:27:25 +08:00
|
|
|
func newIDLEDataNodeMock(ctx context.Context, pkType schemapb.DataType) *DataNode {
|
2022-04-07 22:05:32 +08:00
|
|
|
factory := dependency.NewDefaultFactory(true)
|
|
|
|
node := NewDataNode(ctx, factory)
|
2021-04-21 18:41:37 +08:00
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
rc := &RootCoordFactory{
|
2021-04-21 18:41:37 +08:00
|
|
|
ID: 0,
|
|
|
|
collectionID: 1,
|
|
|
|
collectionName: "collection-1",
|
2022-03-25 14:27:25 +08:00
|
|
|
pkType: pkType,
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
2021-10-04 17:34:16 +08:00
|
|
|
node.rootCoord = rc
|
2021-05-25 15:35:37 +08:00
|
|
|
|
2021-06-21 18:22:13 +08:00
|
|
|
ds := &DataCoordFactory{}
|
2021-10-04 17:34:16 +08:00
|
|
|
node.dataCoord = ds
|
2021-05-25 15:35:37 +08:00
|
|
|
|
|
|
|
return node
|
|
|
|
}
|
|
|
|
|
2021-06-07 11:25:37 +08:00
|
|
|
func newHEALTHDataNodeMock(dmChannelName string) *DataNode {
|
2021-05-25 15:35:37 +08:00
|
|
|
var ctx context.Context
|
|
|
|
|
|
|
|
if debug {
|
|
|
|
ctx = context.Background()
|
|
|
|
} else {
|
|
|
|
var cancel context.CancelFunc
|
|
|
|
d := time.Now().Add(ctxTimeInMillisecond * time.Millisecond)
|
|
|
|
ctx, cancel = context.WithDeadline(context.Background(), d)
|
|
|
|
go func() {
|
|
|
|
<-ctx.Done()
|
|
|
|
cancel()
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2022-04-07 22:05:32 +08:00
|
|
|
factory := dependency.NewDefaultFactory(true)
|
|
|
|
node := NewDataNode(ctx, factory)
|
2021-05-25 15:35:37 +08:00
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
ms := &RootCoordFactory{
|
2021-05-25 15:35:37 +08:00
|
|
|
ID: 0,
|
|
|
|
collectionID: 1,
|
|
|
|
collectionName: "collection-1",
|
|
|
|
}
|
2021-10-04 17:34:16 +08:00
|
|
|
node.rootCoord = ms
|
2021-04-21 18:41:37 +08:00
|
|
|
|
2021-06-21 18:22:13 +08:00
|
|
|
ds := &DataCoordFactory{}
|
2021-10-04 17:34:16 +08:00
|
|
|
node.dataCoord = ds
|
2021-04-21 18:41:37 +08:00
|
|
|
|
|
|
|
return node
|
|
|
|
}
|
|
|
|
|
|
|
|
func makeNewChannelNames(names []string, suffix string) []string {
|
|
|
|
var ret []string
|
|
|
|
for _, name := range names {
|
|
|
|
ret = append(ret, name+suffix)
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
func clearEtcd(rootPath string) error {
|
2022-02-07 10:09:45 +08:00
|
|
|
client, err := etcd.GetEtcdClient(&Params.EtcdCfg)
|
2021-04-21 18:41:37 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-12-29 14:35:21 +08:00
|
|
|
etcdKV := etcdkv.NewEtcdKV(client, rootPath)
|
2021-04-21 18:41:37 +08:00
|
|
|
|
|
|
|
err = etcdKV.RemoveWithPrefix("writer/segment")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
_, _, err = etcdKV.LoadWithPrefix("writer/segment")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
log.Debug("Clear ETCD with prefix writer/segment ")
|
|
|
|
|
|
|
|
err = etcdKV.RemoveWithPrefix("writer/ddl")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
_, _, err = etcdKV.LoadWithPrefix("writer/ddl")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
log.Debug("Clear ETCD with prefix writer/ddl")
|
|
|
|
return nil
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
type MetaFactory struct {
|
|
|
|
}
|
|
|
|
|
2021-11-08 19:49:07 +08:00
|
|
|
func NewMetaFactory() *MetaFactory {
|
|
|
|
return &MetaFactory{}
|
|
|
|
}
|
|
|
|
|
2021-04-21 18:41:37 +08:00
|
|
|
type DataFactory struct {
|
2022-03-25 14:27:25 +08:00
|
|
|
rawData []byte
|
|
|
|
columnData []*schemapb.FieldData
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
type RootCoordFactory struct {
|
|
|
|
types.RootCoord
|
2021-04-21 18:41:37 +08:00
|
|
|
ID UniqueID
|
|
|
|
collectionName string
|
|
|
|
collectionID UniqueID
|
2022-03-25 14:27:25 +08:00
|
|
|
pkType schemapb.DataType
|
2022-09-26 18:06:54 +08:00
|
|
|
|
|
|
|
ReportImportErr bool
|
|
|
|
ReportImportNotSuccess bool
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2021-06-21 18:22:13 +08:00
|
|
|
type DataCoordFactory struct {
|
|
|
|
types.DataCoord
|
2021-09-09 15:36:01 +08:00
|
|
|
|
2022-05-27 16:20:00 +08:00
|
|
|
SaveBinlogPathError bool
|
|
|
|
SaveBinlogPathStatus commonpb.ErrorCode
|
2021-11-08 19:49:07 +08:00
|
|
|
|
|
|
|
CompleteCompactionError bool
|
|
|
|
CompleteCompactionNotSuccess bool
|
2021-12-01 10:11:39 +08:00
|
|
|
|
2022-05-27 16:20:00 +08:00
|
|
|
DropVirtualChannelError bool
|
|
|
|
DropVirtualChannelStatus commonpb.ErrorCode
|
2022-06-16 12:00:10 +08:00
|
|
|
|
|
|
|
GetSegmentInfosError bool
|
|
|
|
GetSegmentInfosNotSuccess bool
|
2022-09-26 18:06:54 +08:00
|
|
|
|
|
|
|
AddSegmentError bool
|
|
|
|
AddSegmentNotSuccess bool
|
2021-11-08 19:49:07 +08:00
|
|
|
}
|
|
|
|
|
2022-04-12 22:19:34 +08:00
|
|
|
func (ds *DataCoordFactory) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) {
|
|
|
|
return &datapb.AssignSegmentIDResponse{
|
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
},
|
|
|
|
SegIDAssignments: []*datapb.SegmentIDAssignment{
|
|
|
|
{
|
|
|
|
SegID: 666,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2021-11-08 19:49:07 +08:00
|
|
|
func (ds *DataCoordFactory) CompleteCompaction(ctx context.Context, req *datapb.CompactionResult) (*commonpb.Status, error) {
|
|
|
|
if ds.CompleteCompactionError {
|
|
|
|
return nil, errors.New("Error")
|
|
|
|
}
|
|
|
|
if ds.CompleteCompactionNotSuccess {
|
|
|
|
return &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, nil
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2021-06-21 18:22:13 +08:00
|
|
|
func (ds *DataCoordFactory) SaveBinlogPaths(ctx context.Context, req *datapb.SaveBinlogPathsRequest) (*commonpb.Status, error) {
|
2021-09-09 15:36:01 +08:00
|
|
|
if ds.SaveBinlogPathError {
|
|
|
|
return nil, errors.New("Error")
|
|
|
|
}
|
2022-05-27 16:20:00 +08:00
|
|
|
return &commonpb.Status{ErrorCode: ds.SaveBinlogPathStatus}, nil
|
2021-05-27 18:45:24 +08:00
|
|
|
}
|
|
|
|
|
2021-12-01 10:11:39 +08:00
|
|
|
func (ds *DataCoordFactory) DropVirtualChannel(ctx context.Context, req *datapb.DropVirtualChannelRequest) (*datapb.DropVirtualChannelResponse, error) {
|
|
|
|
if ds.DropVirtualChannelError {
|
|
|
|
return nil, errors.New("error")
|
|
|
|
}
|
|
|
|
return &datapb.DropVirtualChannelResponse{
|
|
|
|
Status: &commonpb.Status{
|
2022-05-27 16:20:00 +08:00
|
|
|
ErrorCode: ds.DropVirtualChannelStatus,
|
2021-12-01 10:11:39 +08:00
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-04-20 14:03:40 +08:00
|
|
|
func (ds *DataCoordFactory) UpdateSegmentStatistics(ctx context.Context, req *datapb.UpdateSegmentStatisticsRequest) (*commonpb.Status, error) {
|
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-09-26 18:06:54 +08:00
|
|
|
func (ds *DataCoordFactory) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
|
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ds *DataCoordFactory) UnsetIsImportingState(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
|
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ds *DataCoordFactory) MarkSegmentsDropped(context.Context, *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
|
2022-06-02 18:54:04 +08:00
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-10-11 21:07:23 +08:00
|
|
|
func (ds *DataCoordFactory) BroadcastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
|
2022-10-10 20:31:22 +08:00
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-10-18 13:39:26 +08:00
|
|
|
func (ds *DataCoordFactory) CheckHealth(ctx context.Context, req *milvuspb.CheckHealthRequest) (*milvuspb.CheckHealthResponse, error) {
|
|
|
|
return &milvuspb.CheckHealthResponse{
|
|
|
|
IsHealthy: true,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-06-16 12:00:10 +08:00
|
|
|
func (ds *DataCoordFactory) GetSegmentInfo(ctx context.Context, req *datapb.GetSegmentInfoRequest) (*datapb.GetSegmentInfoResponse, error) {
|
|
|
|
if ds.GetSegmentInfosError {
|
2022-10-14 15:15:24 +08:00
|
|
|
return nil, errors.New("mock get segment info error")
|
2022-06-16 12:00:10 +08:00
|
|
|
}
|
|
|
|
if ds.GetSegmentInfosNotSuccess {
|
|
|
|
return &datapb.GetSegmentInfoResponse{
|
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
Reason: "mock GetSegmentInfo failed",
|
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
var segmentInfos []*datapb.SegmentInfo
|
|
|
|
for _, segmentID := range req.SegmentIDs {
|
2022-09-26 18:06:54 +08:00
|
|
|
if segInfo, ok := segID2SegInfo[segmentID]; ok {
|
|
|
|
segmentInfos = append(segmentInfos, segInfo)
|
|
|
|
} else {
|
|
|
|
segmentInfos = append(segmentInfos, &datapb.SegmentInfo{
|
|
|
|
ID: segmentID,
|
|
|
|
})
|
|
|
|
}
|
2022-06-16 12:00:10 +08:00
|
|
|
}
|
|
|
|
return &datapb.GetSegmentInfoResponse{
|
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
},
|
|
|
|
Infos: segmentInfos,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-03-25 14:27:25 +08:00
|
|
|
func (mf *MetaFactory) GetCollectionMeta(collectionID UniqueID, collectionName string, pkDataType schemapb.DataType) *etcdpb.CollectionMeta {
|
2021-04-21 18:41:37 +08:00
|
|
|
sch := schemapb.CollectionSchema{
|
|
|
|
Name: collectionName,
|
|
|
|
Description: "test collection by meta factory",
|
|
|
|
AutoID: true,
|
2022-03-25 14:27:25 +08:00
|
|
|
}
|
|
|
|
sch.Fields = mf.GetFieldSchema()
|
|
|
|
for _, field := range sch.Fields {
|
|
|
|
if field.GetDataType() == pkDataType && field.FieldID >= 100 {
|
|
|
|
field.IsPrimaryKey = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &etcdpb.CollectionMeta{
|
|
|
|
ID: collectionID,
|
|
|
|
Schema: &sch,
|
|
|
|
CreateTime: Timestamp(1),
|
|
|
|
SegmentIDs: make([]UniqueID, 0),
|
|
|
|
PartitionIDs: []UniqueID{0},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (mf *MetaFactory) GetFieldSchema() []*schemapb.FieldSchema {
|
|
|
|
fields := []*schemapb.FieldSchema{
|
|
|
|
{
|
|
|
|
FieldID: 0,
|
|
|
|
Name: "RowID",
|
|
|
|
Description: "RowID field",
|
|
|
|
DataType: schemapb.DataType_Int64,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "f0_tk1",
|
|
|
|
Value: "f0_tv1",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 1,
|
|
|
|
Name: "Timestamp",
|
|
|
|
Description: "Timestamp field",
|
|
|
|
DataType: schemapb.DataType_Int64,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "f1_tk1",
|
|
|
|
Value: "f1_tv1",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 100,
|
|
|
|
Name: "float_vector_field",
|
|
|
|
Description: "field 100",
|
|
|
|
DataType: schemapb.DataType_FloatVector,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "dim",
|
|
|
|
Value: "2",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "indexkey",
|
|
|
|
Value: "indexvalue",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 101,
|
|
|
|
Name: "binary_vector_field",
|
|
|
|
Description: "field 101",
|
|
|
|
DataType: schemapb.DataType_BinaryVector,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "dim",
|
|
|
|
Value: "32",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "indexkey",
|
|
|
|
Value: "indexvalue",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 102,
|
|
|
|
Name: "bool_field",
|
|
|
|
Description: "field 102",
|
|
|
|
DataType: schemapb.DataType_Bool,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 103,
|
|
|
|
Name: "int8_field",
|
|
|
|
Description: "field 103",
|
|
|
|
DataType: schemapb.DataType_Int8,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 104,
|
|
|
|
Name: "int16_field",
|
|
|
|
Description: "field 104",
|
|
|
|
DataType: schemapb.DataType_Int16,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 105,
|
|
|
|
Name: "int32_field",
|
|
|
|
Description: "field 105",
|
|
|
|
DataType: schemapb.DataType_Int32,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 106,
|
|
|
|
Name: "int64_field",
|
|
|
|
Description: "field 106",
|
|
|
|
DataType: schemapb.DataType_Int64,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 107,
|
|
|
|
Name: "float32_field",
|
|
|
|
Description: "field 107",
|
|
|
|
DataType: schemapb.DataType_Float,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 108,
|
|
|
|
Name: "float64_field",
|
|
|
|
Description: "field 108",
|
|
|
|
DataType: schemapb.DataType_Double,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 109,
|
|
|
|
Name: "varChar_field",
|
|
|
|
Description: "field 109",
|
|
|
|
DataType: schemapb.DataType_VarChar,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
2022-06-07 15:58:06 +08:00
|
|
|
Key: "max_length",
|
2022-03-25 14:27:25 +08:00
|
|
|
Value: "100",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
2022-03-25 14:27:25 +08:00
|
|
|
|
|
|
|
return fields
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func NewDataFactory() *DataFactory {
|
2022-03-25 14:27:25 +08:00
|
|
|
return &DataFactory{rawData: GenRowData(), columnData: GenColumnData()}
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func GenRowData() (rawData []byte) {
|
|
|
|
const DIM = 2
|
|
|
|
const N = 1
|
|
|
|
|
|
|
|
// Float vector
|
|
|
|
var fvector = [DIM]float32{1, 2}
|
|
|
|
for _, ele := range fvector {
|
|
|
|
buf := make([]byte, 4)
|
2021-11-02 18:16:32 +08:00
|
|
|
common.Endian.PutUint32(buf, math.Float32bits(ele))
|
2021-04-21 18:41:37 +08:00
|
|
|
rawData = append(rawData, buf...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Binary vector
|
|
|
|
// Dimension of binary vector is 32
|
|
|
|
// size := 4, = 32 / 8
|
|
|
|
var bvector = []byte{255, 255, 255, 0}
|
|
|
|
rawData = append(rawData, bvector...)
|
|
|
|
|
|
|
|
// Bool
|
|
|
|
var fieldBool = true
|
|
|
|
buf := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(buf, common.Endian, fieldBool); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
rawData = append(rawData, buf.Bytes()...)
|
|
|
|
|
|
|
|
// int8
|
|
|
|
var dataInt8 int8 = 100
|
|
|
|
bint8 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint8, common.Endian, dataInt8); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint8.Bytes()...)
|
|
|
|
|
|
|
|
// int16
|
|
|
|
var dataInt16 int16 = 200
|
|
|
|
bint16 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint16, common.Endian, dataInt16); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint16.Bytes()...)
|
|
|
|
|
|
|
|
// int32
|
|
|
|
var dataInt32 int32 = 300
|
|
|
|
bint32 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint32, common.Endian, dataInt32); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint32.Bytes()...)
|
|
|
|
|
|
|
|
// int64
|
|
|
|
var dataInt64 int64 = 400
|
|
|
|
bint64 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint64, common.Endian, dataInt64); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint64.Bytes()...)
|
|
|
|
|
|
|
|
// float32
|
|
|
|
var datafloat float32 = 1.1
|
|
|
|
bfloat32 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bfloat32, common.Endian, datafloat); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bfloat32.Bytes()...)
|
|
|
|
|
|
|
|
// float64
|
|
|
|
var datafloat64 = 2.2
|
|
|
|
bfloat64 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bfloat64, common.Endian, datafloat64); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bfloat64.Bytes()...)
|
|
|
|
log.Debug("Rawdata length:", zap.Int("Length of rawData", len(rawData)))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-25 14:27:25 +08:00
|
|
|
func GenColumnData() (fieldsData []*schemapb.FieldData) {
|
|
|
|
// Float vector
|
|
|
|
var fVector = []float32{1, 2}
|
|
|
|
floatVectorData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_FloatVector,
|
|
|
|
FieldName: "float_vector_field",
|
|
|
|
FieldId: 100,
|
|
|
|
Field: &schemapb.FieldData_Vectors{
|
|
|
|
Vectors: &schemapb.VectorField{
|
|
|
|
Dim: 2,
|
|
|
|
Data: &schemapb.VectorField_FloatVector{
|
|
|
|
FloatVector: &schemapb.FloatArray{
|
|
|
|
Data: fVector,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, floatVectorData)
|
|
|
|
|
|
|
|
// Binary vector
|
|
|
|
// Dimension of binary vector is 32
|
|
|
|
// size := 4, = 32 / 8
|
|
|
|
binaryVector := []byte{255, 255, 255, 0}
|
|
|
|
binaryVectorData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_BinaryVector,
|
|
|
|
FieldName: "binary_vector_field",
|
|
|
|
FieldId: 101,
|
|
|
|
Field: &schemapb.FieldData_Vectors{
|
|
|
|
Vectors: &schemapb.VectorField{
|
|
|
|
Dim: 32,
|
|
|
|
Data: &schemapb.VectorField_BinaryVector{
|
|
|
|
BinaryVector: binaryVector,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, binaryVectorData)
|
|
|
|
|
|
|
|
// bool
|
|
|
|
boolData := []bool{true}
|
|
|
|
boolFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Bool,
|
|
|
|
FieldName: "bool_field",
|
|
|
|
FieldId: 102,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_BoolData{
|
|
|
|
BoolData: &schemapb.BoolArray{
|
|
|
|
Data: boolData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, boolFieldData)
|
|
|
|
|
|
|
|
// int8
|
|
|
|
int8Data := []int32{100}
|
|
|
|
int8FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int8,
|
|
|
|
FieldName: "int8_field",
|
|
|
|
FieldId: 103,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_IntData{
|
|
|
|
IntData: &schemapb.IntArray{
|
|
|
|
Data: int8Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int8FieldData)
|
|
|
|
|
|
|
|
// int16
|
|
|
|
int16Data := []int32{200}
|
|
|
|
int16FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int16,
|
|
|
|
FieldName: "int16_field",
|
|
|
|
FieldId: 104,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_IntData{
|
|
|
|
IntData: &schemapb.IntArray{
|
|
|
|
Data: int16Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int16FieldData)
|
|
|
|
|
|
|
|
// int32
|
|
|
|
int32Data := []int32{300}
|
|
|
|
int32FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int32,
|
|
|
|
FieldName: "int32_field",
|
|
|
|
FieldId: 105,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_IntData{
|
|
|
|
IntData: &schemapb.IntArray{
|
|
|
|
Data: int32Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int32FieldData)
|
|
|
|
|
|
|
|
// int64
|
|
|
|
int64Data := []int64{400}
|
|
|
|
int64FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int64,
|
|
|
|
FieldName: "int64_field",
|
|
|
|
FieldId: 106,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_LongData{
|
|
|
|
LongData: &schemapb.LongArray{
|
|
|
|
Data: int64Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int64FieldData)
|
|
|
|
|
|
|
|
// float
|
|
|
|
floatData := []float32{1.1}
|
|
|
|
floatFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Float,
|
|
|
|
FieldName: "float32_field",
|
|
|
|
FieldId: 107,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_FloatData{
|
|
|
|
FloatData: &schemapb.FloatArray{
|
|
|
|
Data: floatData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, floatFieldData)
|
|
|
|
|
|
|
|
//double
|
|
|
|
doubleData := []float64{2.2}
|
|
|
|
doubleFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Double,
|
|
|
|
FieldName: "float64_field",
|
|
|
|
FieldId: 108,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_DoubleData{
|
|
|
|
DoubleData: &schemapb.DoubleArray{
|
|
|
|
Data: doubleData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, doubleFieldData)
|
|
|
|
|
|
|
|
//var char
|
|
|
|
varCharData := []string{"test"}
|
|
|
|
varCharFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_VarChar,
|
|
|
|
FieldName: "varChar_field",
|
|
|
|
FieldId: 109,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_StringData{
|
|
|
|
StringData: &schemapb.StringArray{
|
|
|
|
Data: varCharData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, varCharFieldData)
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-05-25 15:35:37 +08:00
|
|
|
func (df *DataFactory) GenMsgStreamInsertMsg(idx int, chanName string) *msgstream.InsertMsg {
|
2021-04-21 18:41:37 +08:00
|
|
|
var msg = &msgstream.InsertMsg{
|
|
|
|
BaseMsg: msgstream.BaseMsg{
|
|
|
|
HashValues: []uint32{uint32(idx)},
|
|
|
|
},
|
|
|
|
InsertRequest: internalpb.InsertRequest{
|
|
|
|
Base: &commonpb.MsgBase{
|
|
|
|
MsgType: commonpb.MsgType_Insert,
|
2021-05-25 15:35:37 +08:00
|
|
|
MsgID: 0,
|
2021-04-21 18:41:37 +08:00
|
|
|
Timestamp: Timestamp(idx + 1000),
|
|
|
|
SourceID: 0,
|
|
|
|
},
|
2021-05-25 15:35:37 +08:00
|
|
|
CollectionName: "col1",
|
2021-04-21 18:41:37 +08:00
|
|
|
PartitionName: "default",
|
2021-05-25 15:35:37 +08:00
|
|
|
SegmentID: 1,
|
2021-11-11 20:56:49 +08:00
|
|
|
CollectionID: UniqueID(0),
|
2021-09-27 10:01:59 +08:00
|
|
|
ShardName: chanName,
|
2021-04-21 18:41:37 +08:00
|
|
|
Timestamps: []Timestamp{Timestamp(idx + 1000)},
|
|
|
|
RowIDs: []UniqueID{UniqueID(idx)},
|
2022-03-25 14:27:25 +08:00
|
|
|
// RowData: []*commonpb.Blob{{Value: df.rawData}},
|
|
|
|
FieldsData: df.columnData,
|
|
|
|
Version: internalpb.InsertDataVersion_ColumnBased,
|
|
|
|
NumRows: 1,
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
return msg
|
|
|
|
}
|
|
|
|
|
2021-05-25 15:35:37 +08:00
|
|
|
func (df *DataFactory) GetMsgStreamTsInsertMsgs(n int, chanName string) (inMsgs []msgstream.TsMsg) {
|
2021-04-21 18:41:37 +08:00
|
|
|
for i := 0; i < n; i++ {
|
2021-05-25 15:35:37 +08:00
|
|
|
var msg = df.GenMsgStreamInsertMsg(i, chanName)
|
2021-04-21 18:41:37 +08:00
|
|
|
var tsMsg msgstream.TsMsg = msg
|
|
|
|
inMsgs = append(inMsgs, tsMsg)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-10-11 16:31:44 +08:00
|
|
|
func (df *DataFactory) GetMsgStreamInsertMsgs(n int) (msgs []*msgstream.InsertMsg) {
|
2021-04-21 18:41:37 +08:00
|
|
|
for i := 0; i < n; i++ {
|
2021-05-25 15:35:37 +08:00
|
|
|
var msg = df.GenMsgStreamInsertMsg(i, "")
|
2021-10-11 16:31:44 +08:00
|
|
|
msgs = append(msgs, msg)
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-02 17:43:29 +08:00
|
|
|
func (df *DataFactory) GenMsgStreamDeleteMsg(pks []primaryKey, chanName string) *msgstream.DeleteMsg {
|
2021-10-11 16:31:44 +08:00
|
|
|
idx := 100
|
2021-10-14 14:48:34 +08:00
|
|
|
timestamps := make([]Timestamp, len(pks))
|
|
|
|
for i := 0; i < len(pks); i++ {
|
|
|
|
timestamps[i] = Timestamp(i) + 1000
|
|
|
|
}
|
2021-10-11 16:31:44 +08:00
|
|
|
var msg = &msgstream.DeleteMsg{
|
|
|
|
BaseMsg: msgstream.BaseMsg{
|
|
|
|
HashValues: []uint32{uint32(idx)},
|
|
|
|
},
|
|
|
|
DeleteRequest: internalpb.DeleteRequest{
|
|
|
|
Base: &commonpb.MsgBase{
|
|
|
|
MsgType: commonpb.MsgType_Delete,
|
|
|
|
MsgID: 0,
|
|
|
|
Timestamp: Timestamp(idx + 1000),
|
|
|
|
SourceID: 0,
|
|
|
|
},
|
|
|
|
CollectionName: "col1",
|
|
|
|
PartitionName: "default",
|
|
|
|
ShardName: chanName,
|
2022-04-02 17:43:29 +08:00
|
|
|
PrimaryKeys: s.ParsePrimaryKeys2IDs(pks),
|
2021-10-14 14:48:34 +08:00
|
|
|
Timestamps: timestamps,
|
2022-04-02 17:43:29 +08:00
|
|
|
NumRows: int64(len(pks)),
|
2021-10-11 16:31:44 +08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
return msg
|
|
|
|
}
|
|
|
|
|
2021-11-04 15:36:19 +08:00
|
|
|
func genFlowGraphInsertMsg(chanName string) flowGraphMsg {
|
2021-10-11 16:31:44 +08:00
|
|
|
timeRange := TimeRange{
|
|
|
|
timestampMin: 0,
|
|
|
|
timestampMax: math.MaxUint64,
|
|
|
|
}
|
|
|
|
|
|
|
|
startPos := []*internalpb.MsgPosition{
|
|
|
|
{
|
|
|
|
ChannelName: chanName,
|
|
|
|
MsgID: make([]byte, 0),
|
|
|
|
Timestamp: 0,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
var fgMsg = &flowGraphMsg{
|
|
|
|
insertMessages: make([]*msgstream.InsertMsg, 0),
|
|
|
|
timeRange: TimeRange{
|
|
|
|
timestampMin: timeRange.timestampMin,
|
|
|
|
timestampMax: timeRange.timestampMax,
|
|
|
|
},
|
|
|
|
startPositions: startPos,
|
|
|
|
endPositions: startPos,
|
|
|
|
}
|
|
|
|
|
|
|
|
dataFactory := NewDataFactory()
|
|
|
|
fgMsg.insertMessages = append(fgMsg.insertMessages, dataFactory.GetMsgStreamInsertMsgs(2)...)
|
|
|
|
|
|
|
|
return *fgMsg
|
|
|
|
}
|
|
|
|
|
2022-04-02 17:43:29 +08:00
|
|
|
func genFlowGraphDeleteMsg(pks []primaryKey, chanName string) flowGraphMsg {
|
2021-10-11 16:31:44 +08:00
|
|
|
timeRange := TimeRange{
|
|
|
|
timestampMin: 0,
|
|
|
|
timestampMax: math.MaxUint64,
|
|
|
|
}
|
|
|
|
|
|
|
|
startPos := []*internalpb.MsgPosition{
|
|
|
|
{
|
|
|
|
ChannelName: chanName,
|
|
|
|
MsgID: make([]byte, 0),
|
|
|
|
Timestamp: 0,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
var fgMsg = &flowGraphMsg{
|
|
|
|
insertMessages: make([]*msgstream.InsertMsg, 0),
|
|
|
|
timeRange: TimeRange{
|
|
|
|
timestampMin: timeRange.timestampMin,
|
|
|
|
timestampMax: timeRange.timestampMax,
|
|
|
|
},
|
|
|
|
startPositions: startPos,
|
|
|
|
endPositions: startPos,
|
|
|
|
}
|
|
|
|
|
|
|
|
dataFactory := NewDataFactory()
|
|
|
|
fgMsg.deleteMessages = append(fgMsg.deleteMessages, dataFactory.GenMsgStreamDeleteMsg(pks, chanName))
|
|
|
|
|
|
|
|
return *fgMsg
|
|
|
|
}
|
|
|
|
|
2021-04-21 18:41:37 +08:00
|
|
|
type AllocatorFactory struct {
|
|
|
|
sync.Mutex
|
2021-11-26 17:43:17 +08:00
|
|
|
r *rand.Rand
|
|
|
|
isvalid bool
|
|
|
|
random bool
|
|
|
|
errAllocBatch bool
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2021-05-18 19:45:00 +08:00
|
|
|
var _ allocatorInterface = &AllocatorFactory{}
|
|
|
|
|
2021-04-21 18:41:37 +08:00
|
|
|
func NewAllocatorFactory(id ...UniqueID) *AllocatorFactory {
|
|
|
|
f := &AllocatorFactory{
|
2021-11-08 19:49:07 +08:00
|
|
|
r: rand.New(rand.NewSource(time.Now().UnixNano())),
|
|
|
|
isvalid: len(id) == 0 || (len(id) > 0 && id[0] > 0),
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
return f
|
|
|
|
}
|
|
|
|
|
|
|
|
func (alloc *AllocatorFactory) allocID() (UniqueID, error) {
|
|
|
|
alloc.Lock()
|
|
|
|
defer alloc.Unlock()
|
2021-11-08 19:49:07 +08:00
|
|
|
|
|
|
|
if !alloc.isvalid {
|
|
|
|
return -1, errors.New("allocID error")
|
|
|
|
}
|
|
|
|
|
|
|
|
if alloc.random {
|
|
|
|
return alloc.r.Int63n(10000), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return 19530, nil
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2021-10-13 22:12:32 +08:00
|
|
|
func (alloc *AllocatorFactory) allocIDBatch(count uint32) (UniqueID, uint32, error) {
|
2021-11-26 17:43:17 +08:00
|
|
|
if count == 0 || alloc.errAllocBatch {
|
2021-10-25 20:13:51 +08:00
|
|
|
return 0, 0, errors.New("count should be greater than zero")
|
|
|
|
}
|
|
|
|
|
2021-10-13 22:12:32 +08:00
|
|
|
start, err := alloc.allocID()
|
|
|
|
return start, count, err
|
|
|
|
}
|
|
|
|
|
2021-12-09 11:09:06 +08:00
|
|
|
func (alloc *AllocatorFactory) genKey(ids ...UniqueID) (string, error) {
|
|
|
|
idx, err := alloc.allocID()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
2021-05-18 19:45:00 +08:00
|
|
|
}
|
2021-12-09 11:09:06 +08:00
|
|
|
ids = append(ids, idx)
|
2022-09-25 15:56:51 +08:00
|
|
|
return metautil.JoinIDPath(ids...), nil
|
2021-05-18 19:45:00 +08:00
|
|
|
}
|
|
|
|
|
2021-06-15 19:11:55 +08:00
|
|
|
// If id == 0, AllocID will return not successful status
|
|
|
|
// If id == -1, AllocID will return err
|
2021-06-21 17:28:03 +08:00
|
|
|
func (m *RootCoordFactory) setID(id UniqueID) {
|
2021-04-21 18:41:37 +08:00
|
|
|
m.ID = id // GOOSE TODO: random ID generator
|
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
func (m *RootCoordFactory) setCollectionID(id UniqueID) {
|
2021-04-21 18:41:37 +08:00
|
|
|
m.collectionID = id
|
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
func (m *RootCoordFactory) setCollectionName(name string) {
|
2021-04-21 18:41:37 +08:00
|
|
|
m.collectionName = name
|
|
|
|
}
|
|
|
|
|
2021-06-22 16:14:09 +08:00
|
|
|
func (m *RootCoordFactory) AllocID(ctx context.Context, in *rootcoordpb.AllocIDRequest) (*rootcoordpb.AllocIDResponse, error) {
|
|
|
|
resp := &rootcoordpb.AllocIDResponse{
|
2021-06-15 19:11:55 +08:00
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
}}
|
|
|
|
|
2022-04-12 22:19:34 +08:00
|
|
|
if in.Count == 12 {
|
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_Success
|
|
|
|
resp.ID = 1
|
|
|
|
resp.Count = 12
|
|
|
|
}
|
|
|
|
|
2021-06-15 19:11:55 +08:00
|
|
|
if m.ID == 0 {
|
|
|
|
resp.Status.Reason = "Zero ID"
|
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.ID == -1 {
|
2022-05-31 18:02:03 +08:00
|
|
|
return nil, errors.New(resp.Status.GetReason())
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
2021-06-15 19:11:55 +08:00
|
|
|
|
|
|
|
resp.ID = m.ID
|
2021-10-13 22:12:32 +08:00
|
|
|
resp.Count = in.GetCount()
|
2021-06-15 19:11:55 +08:00
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_Success
|
2021-04-21 18:41:37 +08:00
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
2021-06-22 16:14:09 +08:00
|
|
|
func (m *RootCoordFactory) AllocTimestamp(ctx context.Context, in *rootcoordpb.AllocTimestampRequest) (*rootcoordpb.AllocTimestampResponse, error) {
|
|
|
|
resp := &rootcoordpb.AllocTimestampResponse{
|
2021-06-07 13:58:37 +08:00
|
|
|
Status: &commonpb.Status{},
|
|
|
|
Timestamp: 1000,
|
|
|
|
}
|
2022-04-21 21:37:42 +08:00
|
|
|
|
|
|
|
v := ctx.Value(ctxKey{})
|
|
|
|
if v != nil && v.(string) == returnError {
|
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_UnexpectedError
|
|
|
|
return resp, fmt.Errorf("injected error")
|
|
|
|
}
|
|
|
|
|
2021-06-07 13:58:37 +08:00
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
func (m *RootCoordFactory) ShowCollections(ctx context.Context, in *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error) {
|
2021-04-21 18:41:37 +08:00
|
|
|
resp := &milvuspb.ShowCollectionsResponse{
|
|
|
|
Status: &commonpb.Status{},
|
|
|
|
CollectionNames: []string{m.collectionName},
|
|
|
|
}
|
|
|
|
return resp, nil
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
func (m *RootCoordFactory) DescribeCollection(ctx context.Context, in *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error) {
|
2021-04-21 18:41:37 +08:00
|
|
|
f := MetaFactory{}
|
2022-03-25 14:27:25 +08:00
|
|
|
meta := f.GetCollectionMeta(m.collectionID, m.collectionName, m.pkType)
|
2021-04-21 18:41:37 +08:00
|
|
|
resp := &milvuspb.DescribeCollectionResponse{
|
2021-09-09 10:14:00 +08:00
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.collectionID == -2 {
|
|
|
|
resp.Status.Reason = "Status not success"
|
|
|
|
return resp, nil
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
2021-09-09 10:14:00 +08:00
|
|
|
|
|
|
|
if m.collectionID == -1 {
|
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_Success
|
|
|
|
return resp, errors.New(resp.Status.GetReason())
|
|
|
|
}
|
|
|
|
|
|
|
|
resp.CollectionID = m.collectionID
|
|
|
|
resp.Schema = meta.Schema
|
2022-06-02 18:54:04 +08:00
|
|
|
resp.ShardsNum = 2
|
2021-09-09 10:14:00 +08:00
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_Success
|
2021-04-21 18:41:37 +08:00
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
2022-10-10 15:55:22 +08:00
|
|
|
func (m *RootCoordFactory) GetComponentStates(ctx context.Context) (*milvuspb.ComponentStates, error) {
|
|
|
|
return &milvuspb.ComponentStates{
|
|
|
|
State: &milvuspb.ComponentInfo{},
|
|
|
|
SubcomponentStates: make([]*milvuspb.ComponentInfo, 0),
|
2021-04-21 18:41:37 +08:00
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
}
|
2021-09-23 18:31:55 +08:00
|
|
|
|
2022-04-20 14:03:40 +08:00
|
|
|
func (m *RootCoordFactory) ReportImport(ctx context.Context, req *rootcoordpb.ImportResult) (*commonpb.Status, error) {
|
2022-06-15 12:20:10 +08:00
|
|
|
if ctx != nil && ctx.Value(ctxKey{}) != nil {
|
|
|
|
if v := ctx.Value(ctxKey{}).(string); v == returnError {
|
|
|
|
return nil, fmt.Errorf("injected error")
|
|
|
|
}
|
2022-04-20 14:03:40 +08:00
|
|
|
}
|
2022-09-26 18:06:54 +08:00
|
|
|
if m.ReportImportErr {
|
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
2022-10-14 15:15:24 +08:00
|
|
|
}, fmt.Errorf("mock report import error")
|
2022-09-26 18:06:54 +08:00
|
|
|
}
|
|
|
|
if m.ReportImportNotSuccess {
|
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
}, nil
|
|
|
|
}
|
2022-04-20 14:03:40 +08:00
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_Success,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2021-09-23 18:31:55 +08:00
|
|
|
// FailMessageStreamFactory mock MessageStreamFactory failure
|
|
|
|
type FailMessageStreamFactory struct {
|
2022-04-07 22:05:32 +08:00
|
|
|
dependency.Factory
|
2021-09-23 18:31:55 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (f *FailMessageStreamFactory) NewMsgStream(ctx context.Context) (msgstream.MsgStream, error) {
|
|
|
|
return nil, errors.New("mocked failure")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *FailMessageStreamFactory) NewTtMsgStream(ctx context.Context) (msgstream.MsgStream, error) {
|
|
|
|
return nil, errors.New("mocked failure")
|
|
|
|
}
|
2021-10-25 20:13:51 +08:00
|
|
|
|
2022-04-02 17:43:29 +08:00
|
|
|
func genInsertDataWithPKs(PKs [2]primaryKey, dataType schemapb.DataType) *InsertData {
|
2021-11-08 19:49:07 +08:00
|
|
|
iD := genInsertData()
|
2022-04-02 17:43:29 +08:00
|
|
|
switch dataType {
|
|
|
|
case schemapb.DataType_Int64:
|
|
|
|
values := make([]int64, len(PKs))
|
|
|
|
for index, pk := range PKs {
|
|
|
|
values[index] = pk.(*int64PrimaryKey).Value
|
|
|
|
}
|
|
|
|
iD.Data[106].(*s.Int64FieldData).Data = values
|
|
|
|
case schemapb.DataType_VarChar:
|
|
|
|
values := make([]string, len(PKs))
|
|
|
|
for index, pk := range PKs {
|
|
|
|
values[index] = pk.(*varCharPrimaryKey).Value
|
|
|
|
}
|
|
|
|
iD.Data[109].(*s.StringFieldData).Data = values
|
|
|
|
default:
|
|
|
|
//TODO::
|
|
|
|
}
|
2021-11-08 19:49:07 +08:00
|
|
|
return iD
|
|
|
|
}
|
|
|
|
|
2021-10-25 20:13:51 +08:00
|
|
|
func genInsertData() *InsertData {
|
|
|
|
return &InsertData{
|
|
|
|
Data: map[int64]s.FieldData{
|
|
|
|
0: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{2},
|
2021-11-17 10:07:13 +08:00
|
|
|
Data: []int64{11, 22},
|
2021-10-25 20:13:51 +08:00
|
|
|
},
|
|
|
|
1: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int64{3, 4},
|
|
|
|
},
|
|
|
|
100: &s.FloatVectorFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []float32{1.0, 6.0, 7.0, 8.0},
|
|
|
|
Dim: 2,
|
|
|
|
},
|
|
|
|
101: &s.BinaryVectorFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []byte{0, 255, 255, 255, 128, 128, 128, 0},
|
|
|
|
Dim: 32,
|
|
|
|
},
|
|
|
|
102: &s.BoolFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []bool{true, false},
|
|
|
|
},
|
|
|
|
103: &s.Int8FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int8{5, 6},
|
|
|
|
},
|
|
|
|
104: &s.Int16FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int16{7, 8},
|
|
|
|
},
|
|
|
|
105: &s.Int32FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int32{9, 10},
|
|
|
|
},
|
|
|
|
106: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{2},
|
2021-11-17 10:07:13 +08:00
|
|
|
Data: []int64{1, 2},
|
2021-10-25 20:13:51 +08:00
|
|
|
},
|
|
|
|
107: &s.FloatFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []float32{2.333, 2.334},
|
|
|
|
},
|
|
|
|
108: &s.DoubleFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []float64{3.333, 3.334},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
109: &s.StringFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []string{"test1", "test2"},
|
|
|
|
},
|
2021-10-25 20:13:51 +08:00
|
|
|
}}
|
|
|
|
}
|
2021-11-26 17:43:17 +08:00
|
|
|
|
|
|
|
func genEmptyInsertData() *InsertData {
|
|
|
|
return &InsertData{
|
|
|
|
Data: map[int64]s.FieldData{
|
|
|
|
0: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []int64{},
|
|
|
|
},
|
|
|
|
1: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []int64{},
|
|
|
|
},
|
|
|
|
100: &s.FloatVectorFieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []float32{},
|
|
|
|
Dim: 2,
|
|
|
|
},
|
|
|
|
101: &s.BinaryVectorFieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []byte{},
|
|
|
|
Dim: 32,
|
|
|
|
},
|
|
|
|
102: &s.BoolFieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []bool{},
|
|
|
|
},
|
|
|
|
103: &s.Int8FieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []int8{},
|
|
|
|
},
|
|
|
|
104: &s.Int16FieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []int16{},
|
|
|
|
},
|
|
|
|
105: &s.Int32FieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []int32{},
|
|
|
|
},
|
|
|
|
106: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []int64{},
|
|
|
|
},
|
|
|
|
107: &s.FloatFieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []float32{},
|
|
|
|
},
|
|
|
|
108: &s.DoubleFieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []float64{},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
109: &s.StringFieldData{
|
|
|
|
NumRows: []int64{0},
|
|
|
|
Data: []string{},
|
|
|
|
},
|
2021-11-26 17:43:17 +08:00
|
|
|
}}
|
|
|
|
}
|
2022-02-07 22:45:46 +08:00
|
|
|
|
|
|
|
func genInsertDataWithExpiredTS() *InsertData {
|
|
|
|
return &InsertData{
|
|
|
|
Data: map[int64]s.FieldData{
|
|
|
|
0: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int64{11, 22},
|
|
|
|
},
|
|
|
|
1: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int64{329749364736000000, 329500223078400000}, // 2009-11-10 23:00:00 +0000 UTC, 2009-10-31 23:00:00 +0000 UTC
|
|
|
|
},
|
|
|
|
100: &s.FloatVectorFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []float32{1.0, 6.0, 7.0, 8.0},
|
|
|
|
Dim: 2,
|
|
|
|
},
|
|
|
|
101: &s.BinaryVectorFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []byte{0, 255, 255, 255, 128, 128, 128, 0},
|
|
|
|
Dim: 32,
|
|
|
|
},
|
|
|
|
102: &s.BoolFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []bool{true, false},
|
|
|
|
},
|
|
|
|
103: &s.Int8FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int8{5, 6},
|
|
|
|
},
|
|
|
|
104: &s.Int16FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int16{7, 8},
|
|
|
|
},
|
|
|
|
105: &s.Int32FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int32{9, 10},
|
|
|
|
},
|
|
|
|
106: &s.Int64FieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []int64{1, 2},
|
|
|
|
},
|
|
|
|
107: &s.FloatFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []float32{2.333, 2.334},
|
|
|
|
},
|
|
|
|
108: &s.DoubleFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []float64{3.333, 3.334},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
109: &s.StringFieldData{
|
|
|
|
NumRows: []int64{2},
|
|
|
|
Data: []string{"test1", "test2"},
|
|
|
|
},
|
2022-02-07 22:45:46 +08:00
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
|
|
|
func genTimestamp() typeutil.Timestamp {
|
|
|
|
// Generate birthday of Golang
|
|
|
|
gb := time.Date(2009, time.Month(11), 10, 23, 0, 0, 0, time.UTC)
|
|
|
|
return tsoutil.ComposeTSByTime(gb, 0)
|
|
|
|
}
|