2021-10-15 18:09:00 +08:00
|
|
|
// Licensed to the LF AI & Data foundation under one
|
|
|
|
// or more contributor license agreements. See the NOTICE file
|
|
|
|
// distributed with this work for additional information
|
|
|
|
// regarding copyright ownership. The ASF licenses this file
|
|
|
|
// to you under the Apache License, Version 2.0 (the
|
|
|
|
// "License"); you may not use this file except in compliance
|
2021-04-21 18:41:37 +08:00
|
|
|
// with the License. You may obtain a copy of the License at
|
|
|
|
//
|
2021-10-15 18:09:00 +08:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2021-04-21 18:41:37 +08:00
|
|
|
//
|
2021-10-15 18:09:00 +08:00
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
2021-04-21 18:41:37 +08:00
|
|
|
|
2024-07-01 14:46:07 +08:00
|
|
|
package util
|
2021-04-21 18:41:37 +08:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
|
|
|
"encoding/binary"
|
2022-04-20 14:03:40 +08:00
|
|
|
"fmt"
|
2021-04-21 18:41:37 +08:00
|
|
|
"math"
|
|
|
|
|
2023-02-26 11:31:49 +08:00
|
|
|
"github.com/cockroachdb/errors"
|
2021-04-21 18:41:37 +08:00
|
|
|
"go.uber.org/zap"
|
2023-09-26 09:57:25 +08:00
|
|
|
"google.golang.org/grpc"
|
2021-04-21 18:41:37 +08:00
|
|
|
|
2023-06-09 01:28:37 +08:00
|
|
|
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
|
|
|
"github.com/milvus-io/milvus-proto/go-api/v2/milvuspb"
|
|
|
|
"github.com/milvus-io/milvus-proto/go-api/v2/msgpb"
|
|
|
|
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
2023-11-15 15:24:18 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/datanode/metacache"
|
2021-05-25 15:35:37 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/datapb"
|
2021-04-22 14:45:57 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/etcdpb"
|
2021-06-22 16:14:09 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
|
2023-05-29 10:21:28 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/storage"
|
2023-02-13 16:38:33 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/types"
|
|
|
|
"github.com/milvus-io/milvus/internal/util/dependency"
|
2023-04-06 19:14:32 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/common"
|
|
|
|
"github.com/milvus-io/milvus/pkg/log"
|
|
|
|
"github.com/milvus-io/milvus/pkg/mq/msgstream"
|
2023-09-21 09:45:27 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/util/merr"
|
2021-04-21 18:41:37 +08:00
|
|
|
)
|
|
|
|
|
2024-07-01 14:46:07 +08:00
|
|
|
const returnError = "ReturnError"
|
|
|
|
|
|
|
|
type ctxKey struct{}
|
2023-03-04 23:21:50 +08:00
|
|
|
|
2022-09-26 18:06:54 +08:00
|
|
|
// As used in data_sync_service_test.go
|
|
|
|
var segID2SegInfo = map[int64]*datapb.SegmentInfo{
|
|
|
|
1: {
|
|
|
|
ID: 1,
|
|
|
|
CollectionID: 1,
|
|
|
|
PartitionID: 1,
|
|
|
|
InsertChannel: "by-dev-rootcoord-dml-test_v1",
|
|
|
|
},
|
|
|
|
2: {
|
|
|
|
ID: 2,
|
|
|
|
CollectionID: 1,
|
|
|
|
InsertChannel: "by-dev-rootcoord-dml-test_v1",
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
ID: 3,
|
|
|
|
CollectionID: 1,
|
|
|
|
InsertChannel: "by-dev-rootcoord-dml-test_v1",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-09-21 09:45:27 +08:00
|
|
|
type MetaFactory struct{}
|
2021-04-21 18:41:37 +08:00
|
|
|
|
2021-11-08 19:49:07 +08:00
|
|
|
func NewMetaFactory() *MetaFactory {
|
|
|
|
return &MetaFactory{}
|
|
|
|
}
|
|
|
|
|
2021-04-21 18:41:37 +08:00
|
|
|
type DataFactory struct {
|
2022-03-25 14:27:25 +08:00
|
|
|
rawData []byte
|
|
|
|
columnData []*schemapb.FieldData
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
type RootCoordFactory struct {
|
2023-09-26 09:57:25 +08:00
|
|
|
types.RootCoordClient
|
2021-04-21 18:41:37 +08:00
|
|
|
ID UniqueID
|
|
|
|
collectionName string
|
|
|
|
collectionID UniqueID
|
2022-03-25 14:27:25 +08:00
|
|
|
pkType schemapb.DataType
|
2022-09-26 18:06:54 +08:00
|
|
|
|
|
|
|
ReportImportErr bool
|
|
|
|
ReportImportNotSuccess bool
|
2023-07-11 15:18:28 +08:00
|
|
|
|
|
|
|
ShowPartitionsErr bool
|
|
|
|
ShowPartitionsNotSuccess bool
|
|
|
|
ShowPartitionsNames []string
|
|
|
|
ShowPartitionsIDs []int64
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2021-06-21 18:22:13 +08:00
|
|
|
type DataCoordFactory struct {
|
2023-09-26 09:57:25 +08:00
|
|
|
types.DataCoordClient
|
2021-09-09 15:36:01 +08:00
|
|
|
|
2022-05-27 16:20:00 +08:00
|
|
|
SaveBinlogPathError bool
|
2023-10-07 11:29:32 +08:00
|
|
|
SaveBinlogPathStatus *commonpb.Status
|
2021-11-08 19:49:07 +08:00
|
|
|
|
|
|
|
CompleteCompactionError bool
|
|
|
|
CompleteCompactionNotSuccess bool
|
2023-10-07 11:29:32 +08:00
|
|
|
DropVirtualChannelError bool
|
2021-12-01 10:11:39 +08:00
|
|
|
|
2022-05-27 16:20:00 +08:00
|
|
|
DropVirtualChannelStatus commonpb.ErrorCode
|
2022-06-16 12:00:10 +08:00
|
|
|
|
|
|
|
GetSegmentInfosError bool
|
|
|
|
GetSegmentInfosNotSuccess bool
|
2023-01-06 14:49:36 +08:00
|
|
|
UserSegmentInfo map[int64]*datapb.SegmentInfo
|
2022-09-26 18:06:54 +08:00
|
|
|
|
|
|
|
AddSegmentError bool
|
|
|
|
AddSegmentNotSuccess bool
|
2023-06-13 10:22:38 +08:00
|
|
|
AddSegmentEmpty bool
|
2023-06-14 14:16:38 +08:00
|
|
|
|
|
|
|
ReportDataNodeTtMsgsError bool
|
|
|
|
ReportDataNodeTtMsgsNotSuccess bool
|
2021-11-08 19:49:07 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest, opts ...grpc.CallOption) (*datapb.AssignSegmentIDResponse, error) {
|
2023-06-13 10:22:38 +08:00
|
|
|
if ds.AddSegmentError {
|
|
|
|
return nil, errors.New("Error")
|
|
|
|
}
|
|
|
|
res := &datapb.AssignSegmentIDResponse{
|
2023-10-11 21:01:35 +08:00
|
|
|
Status: merr.Success(),
|
2022-04-12 22:19:34 +08:00
|
|
|
SegIDAssignments: []*datapb.SegmentIDAssignment{
|
|
|
|
{
|
|
|
|
SegID: 666,
|
|
|
|
},
|
|
|
|
},
|
2023-06-13 10:22:38 +08:00
|
|
|
}
|
|
|
|
if ds.AddSegmentNotSuccess {
|
|
|
|
res.Status = &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
}
|
|
|
|
} else if ds.AddSegmentEmpty {
|
|
|
|
res.SegIDAssignments = []*datapb.SegmentIDAssignment{}
|
|
|
|
}
|
|
|
|
return res, nil
|
2022-04-12 22:19:34 +08:00
|
|
|
}
|
|
|
|
|
2023-11-14 15:56:19 +08:00
|
|
|
func (ds *DataCoordFactory) CompleteCompaction(ctx context.Context, req *datapb.CompactionPlanResult, opts ...grpc.CallOption) (*commonpb.Status, error) {
|
2021-11-08 19:49:07 +08:00
|
|
|
if ds.CompleteCompactionError {
|
|
|
|
return nil, errors.New("Error")
|
|
|
|
}
|
|
|
|
if ds.CompleteCompactionNotSuccess {
|
|
|
|
return &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, nil
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) SaveBinlogPaths(ctx context.Context, req *datapb.SaveBinlogPathsRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
|
2021-09-09 15:36:01 +08:00
|
|
|
if ds.SaveBinlogPathError {
|
|
|
|
return nil, errors.New("Error")
|
|
|
|
}
|
2023-10-07 11:29:32 +08:00
|
|
|
return ds.SaveBinlogPathStatus, nil
|
2021-05-27 18:45:24 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) DropVirtualChannel(ctx context.Context, req *datapb.DropVirtualChannelRequest, opts ...grpc.CallOption) (*datapb.DropVirtualChannelResponse, error) {
|
2021-12-01 10:11:39 +08:00
|
|
|
if ds.DropVirtualChannelError {
|
|
|
|
return nil, errors.New("error")
|
|
|
|
}
|
|
|
|
return &datapb.DropVirtualChannelResponse{
|
|
|
|
Status: &commonpb.Status{
|
2022-05-27 16:20:00 +08:00
|
|
|
ErrorCode: ds.DropVirtualChannelStatus,
|
2021-12-01 10:11:39 +08:00
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) UpdateSegmentStatistics(ctx context.Context, req *datapb.UpdateSegmentStatisticsRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
|
2023-10-11 21:01:35 +08:00
|
|
|
return merr.Success(), nil
|
2022-04-20 14:03:40 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) UpdateChannelCheckpoint(ctx context.Context, req *datapb.UpdateChannelCheckpointRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
|
2023-10-11 21:01:35 +08:00
|
|
|
return merr.Success(), nil
|
2022-11-10 22:13:04 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) ReportDataNodeTtMsgs(ctx context.Context, req *datapb.ReportDataNodeTtMsgsRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
|
2023-06-14 14:16:38 +08:00
|
|
|
if ds.ReportDataNodeTtMsgsError {
|
|
|
|
return nil, errors.New("mock ReportDataNodeTtMsgs error")
|
|
|
|
}
|
|
|
|
if ds.ReportDataNodeTtMsgsNotSuccess {
|
|
|
|
return &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
}, nil
|
|
|
|
}
|
2023-10-11 21:01:35 +08:00
|
|
|
return merr.Success(), nil
|
2023-06-14 14:16:38 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
|
2023-10-11 21:01:35 +08:00
|
|
|
return merr.Success(), nil
|
2022-06-02 18:54:04 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) BroadcastAlteredCollection(ctx context.Context, req *datapb.AlterCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
|
2023-10-11 21:01:35 +08:00
|
|
|
return merr.Success(), nil
|
2022-10-10 20:31:22 +08:00
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) CheckHealth(ctx context.Context, req *milvuspb.CheckHealthRequest, opts ...grpc.CallOption) (*milvuspb.CheckHealthResponse, error) {
|
2022-10-18 13:39:26 +08:00
|
|
|
return &milvuspb.CheckHealthResponse{
|
|
|
|
IsHealthy: true,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (ds *DataCoordFactory) GetSegmentInfo(ctx context.Context, req *datapb.GetSegmentInfoRequest, opts ...grpc.CallOption) (*datapb.GetSegmentInfoResponse, error) {
|
2022-06-16 12:00:10 +08:00
|
|
|
if ds.GetSegmentInfosError {
|
2022-10-14 15:15:24 +08:00
|
|
|
return nil, errors.New("mock get segment info error")
|
2022-06-16 12:00:10 +08:00
|
|
|
}
|
|
|
|
if ds.GetSegmentInfosNotSuccess {
|
|
|
|
return &datapb.GetSegmentInfoResponse{
|
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
Reason: "mock GetSegmentInfo failed",
|
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
var segmentInfos []*datapb.SegmentInfo
|
|
|
|
for _, segmentID := range req.SegmentIDs {
|
2023-01-06 14:49:36 +08:00
|
|
|
if segInfo, ok := ds.UserSegmentInfo[segmentID]; ok {
|
|
|
|
segmentInfos = append(segmentInfos, segInfo)
|
|
|
|
} else if segInfo, ok := segID2SegInfo[segmentID]; ok {
|
2022-09-26 18:06:54 +08:00
|
|
|
segmentInfos = append(segmentInfos, segInfo)
|
|
|
|
} else {
|
|
|
|
segmentInfos = append(segmentInfos, &datapb.SegmentInfo{
|
2023-09-27 11:07:25 +08:00
|
|
|
ID: segmentID,
|
|
|
|
CollectionID: 1,
|
2022-09-26 18:06:54 +08:00
|
|
|
})
|
|
|
|
}
|
2022-06-16 12:00:10 +08:00
|
|
|
}
|
|
|
|
return &datapb.GetSegmentInfoResponse{
|
2023-10-11 21:01:35 +08:00
|
|
|
Status: merr.Success(),
|
2023-09-20 10:57:23 +08:00
|
|
|
Infos: segmentInfos,
|
2022-06-16 12:00:10 +08:00
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-03-25 14:27:25 +08:00
|
|
|
func (mf *MetaFactory) GetCollectionMeta(collectionID UniqueID, collectionName string, pkDataType schemapb.DataType) *etcdpb.CollectionMeta {
|
2021-04-21 18:41:37 +08:00
|
|
|
sch := schemapb.CollectionSchema{
|
|
|
|
Name: collectionName,
|
|
|
|
Description: "test collection by meta factory",
|
|
|
|
AutoID: true,
|
2022-03-25 14:27:25 +08:00
|
|
|
}
|
|
|
|
sch.Fields = mf.GetFieldSchema()
|
|
|
|
for _, field := range sch.Fields {
|
|
|
|
if field.GetDataType() == pkDataType && field.FieldID >= 100 {
|
|
|
|
field.IsPrimaryKey = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &etcdpb.CollectionMeta{
|
|
|
|
ID: collectionID,
|
|
|
|
Schema: &sch,
|
|
|
|
CreateTime: Timestamp(1),
|
|
|
|
SegmentIDs: make([]UniqueID, 0),
|
|
|
|
PartitionIDs: []UniqueID{0},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (mf *MetaFactory) GetFieldSchema() []*schemapb.FieldSchema {
|
|
|
|
fields := []*schemapb.FieldSchema{
|
|
|
|
{
|
|
|
|
FieldID: 0,
|
|
|
|
Name: "RowID",
|
|
|
|
Description: "RowID field",
|
|
|
|
DataType: schemapb.DataType_Int64,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "f0_tk1",
|
|
|
|
Value: "f0_tv1",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 1,
|
|
|
|
Name: "Timestamp",
|
|
|
|
Description: "Timestamp field",
|
|
|
|
DataType: schemapb.DataType_Int64,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "f1_tk1",
|
|
|
|
Value: "f1_tv1",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 100,
|
|
|
|
Name: "float_vector_field",
|
|
|
|
Description: "field 100",
|
|
|
|
DataType: schemapb.DataType_FloatVector,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
2023-05-16 17:41:22 +08:00
|
|
|
Key: common.DimKey,
|
2022-03-25 14:27:25 +08:00
|
|
|
Value: "2",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "indexkey",
|
|
|
|
Value: "indexvalue",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 101,
|
|
|
|
Name: "binary_vector_field",
|
|
|
|
Description: "field 101",
|
|
|
|
DataType: schemapb.DataType_BinaryVector,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
2023-05-16 17:41:22 +08:00
|
|
|
Key: common.DimKey,
|
2022-03-25 14:27:25 +08:00
|
|
|
Value: "32",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
|
|
|
Key: "indexkey",
|
|
|
|
Value: "indexvalue",
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
},
|
2022-03-25 14:27:25 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 102,
|
|
|
|
Name: "bool_field",
|
|
|
|
Description: "field 102",
|
|
|
|
DataType: schemapb.DataType_Bool,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 103,
|
|
|
|
Name: "int8_field",
|
|
|
|
Description: "field 103",
|
|
|
|
DataType: schemapb.DataType_Int8,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 104,
|
|
|
|
Name: "int16_field",
|
|
|
|
Description: "field 104",
|
|
|
|
DataType: schemapb.DataType_Int16,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 105,
|
|
|
|
Name: "int32_field",
|
|
|
|
Description: "field 105",
|
|
|
|
DataType: schemapb.DataType_Int32,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 106,
|
|
|
|
Name: "int64_field",
|
|
|
|
Description: "field 106",
|
|
|
|
DataType: schemapb.DataType_Int64,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 107,
|
|
|
|
Name: "float32_field",
|
|
|
|
Description: "field 107",
|
|
|
|
DataType: schemapb.DataType_Float,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 108,
|
|
|
|
Name: "float64_field",
|
|
|
|
Description: "field 108",
|
|
|
|
DataType: schemapb.DataType_Double,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 109,
|
|
|
|
Name: "varChar_field",
|
|
|
|
Description: "field 109",
|
|
|
|
DataType: schemapb.DataType_VarChar,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{
|
2023-05-16 17:41:22 +08:00
|
|
|
Key: common.MaxLengthKey,
|
2022-03-25 14:27:25 +08:00
|
|
|
Value: "100",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
IndexParams: []*commonpb.KeyValuePair{},
|
|
|
|
},
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
2022-03-25 14:27:25 +08:00
|
|
|
|
|
|
|
return fields
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func NewDataFactory() *DataFactory {
|
2022-03-25 14:27:25 +08:00
|
|
|
return &DataFactory{rawData: GenRowData(), columnData: GenColumnData()}
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func GenRowData() (rawData []byte) {
|
|
|
|
const DIM = 2
|
|
|
|
|
|
|
|
// Float vector
|
2023-09-21 09:45:27 +08:00
|
|
|
fvector := [DIM]float32{1, 2}
|
2021-04-21 18:41:37 +08:00
|
|
|
for _, ele := range fvector {
|
|
|
|
buf := make([]byte, 4)
|
2021-11-02 18:16:32 +08:00
|
|
|
common.Endian.PutUint32(buf, math.Float32bits(ele))
|
2021-04-21 18:41:37 +08:00
|
|
|
rawData = append(rawData, buf...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Binary vector
|
|
|
|
// Dimension of binary vector is 32
|
|
|
|
// size := 4, = 32 / 8
|
2023-09-21 09:45:27 +08:00
|
|
|
bvector := []byte{255, 255, 255, 0}
|
2021-04-21 18:41:37 +08:00
|
|
|
rawData = append(rawData, bvector...)
|
|
|
|
|
|
|
|
// Bool
|
2023-09-21 09:45:27 +08:00
|
|
|
fieldBool := true
|
2021-04-21 18:41:37 +08:00
|
|
|
buf := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(buf, common.Endian, fieldBool); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
rawData = append(rawData, buf.Bytes()...)
|
|
|
|
|
|
|
|
// int8
|
|
|
|
var dataInt8 int8 = 100
|
|
|
|
bint8 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint8, common.Endian, dataInt8); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint8.Bytes()...)
|
|
|
|
|
|
|
|
// int16
|
|
|
|
var dataInt16 int16 = 200
|
|
|
|
bint16 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint16, common.Endian, dataInt16); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint16.Bytes()...)
|
|
|
|
|
|
|
|
// int32
|
|
|
|
var dataInt32 int32 = 300
|
|
|
|
bint32 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint32, common.Endian, dataInt32); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint32.Bytes()...)
|
|
|
|
|
|
|
|
// int64
|
|
|
|
var dataInt64 int64 = 400
|
|
|
|
bint64 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bint64, common.Endian, dataInt64); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bint64.Bytes()...)
|
|
|
|
|
|
|
|
// float32
|
|
|
|
var datafloat float32 = 1.1
|
|
|
|
bfloat32 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bfloat32, common.Endian, datafloat); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bfloat32.Bytes()...)
|
|
|
|
|
|
|
|
// float64
|
2023-09-21 09:45:27 +08:00
|
|
|
datafloat64 := 2.2
|
2021-04-21 18:41:37 +08:00
|
|
|
bfloat64 := new(bytes.Buffer)
|
2021-11-02 18:16:32 +08:00
|
|
|
if err := binary.Write(bfloat64, common.Endian, datafloat64); err != nil {
|
2021-04-21 18:41:37 +08:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
rawData = append(rawData, bfloat64.Bytes()...)
|
|
|
|
log.Debug("Rawdata length:", zap.Int("Length of rawData", len(rawData)))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-25 14:27:25 +08:00
|
|
|
func GenColumnData() (fieldsData []*schemapb.FieldData) {
|
|
|
|
// Float vector
|
2023-09-21 09:45:27 +08:00
|
|
|
fVector := []float32{1, 2}
|
2022-03-25 14:27:25 +08:00
|
|
|
floatVectorData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_FloatVector,
|
|
|
|
FieldName: "float_vector_field",
|
|
|
|
FieldId: 100,
|
|
|
|
Field: &schemapb.FieldData_Vectors{
|
|
|
|
Vectors: &schemapb.VectorField{
|
|
|
|
Dim: 2,
|
|
|
|
Data: &schemapb.VectorField_FloatVector{
|
|
|
|
FloatVector: &schemapb.FloatArray{
|
|
|
|
Data: fVector,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, floatVectorData)
|
|
|
|
|
|
|
|
// Binary vector
|
|
|
|
// Dimension of binary vector is 32
|
|
|
|
// size := 4, = 32 / 8
|
|
|
|
binaryVector := []byte{255, 255, 255, 0}
|
|
|
|
binaryVectorData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_BinaryVector,
|
|
|
|
FieldName: "binary_vector_field",
|
|
|
|
FieldId: 101,
|
|
|
|
Field: &schemapb.FieldData_Vectors{
|
|
|
|
Vectors: &schemapb.VectorField{
|
|
|
|
Dim: 32,
|
|
|
|
Data: &schemapb.VectorField_BinaryVector{
|
|
|
|
BinaryVector: binaryVector,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, binaryVectorData)
|
|
|
|
|
|
|
|
// bool
|
|
|
|
boolData := []bool{true}
|
|
|
|
boolFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Bool,
|
|
|
|
FieldName: "bool_field",
|
|
|
|
FieldId: 102,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_BoolData{
|
|
|
|
BoolData: &schemapb.BoolArray{
|
|
|
|
Data: boolData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, boolFieldData)
|
|
|
|
|
|
|
|
// int8
|
|
|
|
int8Data := []int32{100}
|
|
|
|
int8FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int8,
|
|
|
|
FieldName: "int8_field",
|
|
|
|
FieldId: 103,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_IntData{
|
|
|
|
IntData: &schemapb.IntArray{
|
|
|
|
Data: int8Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int8FieldData)
|
|
|
|
|
|
|
|
// int16
|
|
|
|
int16Data := []int32{200}
|
|
|
|
int16FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int16,
|
|
|
|
FieldName: "int16_field",
|
|
|
|
FieldId: 104,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_IntData{
|
|
|
|
IntData: &schemapb.IntArray{
|
|
|
|
Data: int16Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int16FieldData)
|
|
|
|
|
|
|
|
// int32
|
|
|
|
int32Data := []int32{300}
|
|
|
|
int32FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int32,
|
|
|
|
FieldName: "int32_field",
|
|
|
|
FieldId: 105,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_IntData{
|
|
|
|
IntData: &schemapb.IntArray{
|
|
|
|
Data: int32Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int32FieldData)
|
|
|
|
|
|
|
|
// int64
|
|
|
|
int64Data := []int64{400}
|
|
|
|
int64FieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Int64,
|
|
|
|
FieldName: "int64_field",
|
|
|
|
FieldId: 106,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_LongData{
|
|
|
|
LongData: &schemapb.LongArray{
|
|
|
|
Data: int64Data,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, int64FieldData)
|
|
|
|
|
|
|
|
// float
|
|
|
|
floatData := []float32{1.1}
|
|
|
|
floatFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Float,
|
|
|
|
FieldName: "float32_field",
|
|
|
|
FieldId: 107,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_FloatData{
|
|
|
|
FloatData: &schemapb.FloatArray{
|
|
|
|
Data: floatData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, floatFieldData)
|
|
|
|
|
2023-09-21 09:45:27 +08:00
|
|
|
// double
|
2022-03-25 14:27:25 +08:00
|
|
|
doubleData := []float64{2.2}
|
|
|
|
doubleFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_Double,
|
|
|
|
FieldName: "float64_field",
|
|
|
|
FieldId: 108,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_DoubleData{
|
|
|
|
DoubleData: &schemapb.DoubleArray{
|
|
|
|
Data: doubleData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, doubleFieldData)
|
|
|
|
|
2023-09-21 09:45:27 +08:00
|
|
|
// var char
|
2022-03-25 14:27:25 +08:00
|
|
|
varCharData := []string{"test"}
|
|
|
|
varCharFieldData := &schemapb.FieldData{
|
|
|
|
Type: schemapb.DataType_VarChar,
|
|
|
|
FieldName: "varChar_field",
|
|
|
|
FieldId: 109,
|
|
|
|
Field: &schemapb.FieldData_Scalars{
|
|
|
|
Scalars: &schemapb.ScalarField{
|
|
|
|
Data: &schemapb.ScalarField_StringData{
|
|
|
|
StringData: &schemapb.StringArray{
|
|
|
|
Data: varCharData,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fieldsData = append(fieldsData, varCharFieldData)
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-05-25 15:35:37 +08:00
|
|
|
func (df *DataFactory) GenMsgStreamInsertMsg(idx int, chanName string) *msgstream.InsertMsg {
|
2023-09-21 09:45:27 +08:00
|
|
|
msg := &msgstream.InsertMsg{
|
2021-04-21 18:41:37 +08:00
|
|
|
BaseMsg: msgstream.BaseMsg{
|
|
|
|
HashValues: []uint32{uint32(idx)},
|
|
|
|
},
|
2023-03-04 23:21:50 +08:00
|
|
|
InsertRequest: msgpb.InsertRequest{
|
2021-04-21 18:41:37 +08:00
|
|
|
Base: &commonpb.MsgBase{
|
|
|
|
MsgType: commonpb.MsgType_Insert,
|
2021-05-25 15:35:37 +08:00
|
|
|
MsgID: 0,
|
2021-04-21 18:41:37 +08:00
|
|
|
Timestamp: Timestamp(idx + 1000),
|
|
|
|
SourceID: 0,
|
|
|
|
},
|
2021-05-25 15:35:37 +08:00
|
|
|
CollectionName: "col1",
|
2021-04-21 18:41:37 +08:00
|
|
|
PartitionName: "default",
|
2021-05-25 15:35:37 +08:00
|
|
|
SegmentID: 1,
|
2021-11-11 20:56:49 +08:00
|
|
|
CollectionID: UniqueID(0),
|
2021-09-27 10:01:59 +08:00
|
|
|
ShardName: chanName,
|
2021-04-21 18:41:37 +08:00
|
|
|
Timestamps: []Timestamp{Timestamp(idx + 1000)},
|
|
|
|
RowIDs: []UniqueID{UniqueID(idx)},
|
2022-03-25 14:27:25 +08:00
|
|
|
// RowData: []*commonpb.Blob{{Value: df.rawData}},
|
|
|
|
FieldsData: df.columnData,
|
2023-03-04 23:21:50 +08:00
|
|
|
Version: msgpb.InsertDataVersion_ColumnBased,
|
2022-03-25 14:27:25 +08:00
|
|
|
NumRows: 1,
|
2021-04-21 18:41:37 +08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
return msg
|
|
|
|
}
|
|
|
|
|
2023-01-06 14:49:36 +08:00
|
|
|
func (df *DataFactory) GenMsgStreamInsertMsgWithTs(idx int, chanName string, ts Timestamp) *msgstream.InsertMsg {
|
2023-09-21 09:45:27 +08:00
|
|
|
msg := &msgstream.InsertMsg{
|
2023-01-06 14:49:36 +08:00
|
|
|
BaseMsg: msgstream.BaseMsg{
|
|
|
|
HashValues: []uint32{uint32(idx)},
|
|
|
|
BeginTimestamp: ts,
|
|
|
|
EndTimestamp: ts,
|
|
|
|
},
|
2023-03-04 23:21:50 +08:00
|
|
|
InsertRequest: msgpb.InsertRequest{
|
2023-01-06 14:49:36 +08:00
|
|
|
Base: &commonpb.MsgBase{
|
|
|
|
MsgType: commonpb.MsgType_Insert,
|
|
|
|
MsgID: 0,
|
|
|
|
Timestamp: ts,
|
|
|
|
SourceID: 0,
|
|
|
|
},
|
|
|
|
CollectionName: "col1",
|
|
|
|
PartitionName: "default",
|
|
|
|
SegmentID: 1,
|
|
|
|
CollectionID: UniqueID(0),
|
|
|
|
ShardName: chanName,
|
|
|
|
Timestamps: []Timestamp{ts},
|
|
|
|
RowIDs: []UniqueID{UniqueID(idx)},
|
|
|
|
// RowData: []*commonpb.Blob{{Value: df.rawData}},
|
|
|
|
FieldsData: df.columnData,
|
2023-03-04 23:21:50 +08:00
|
|
|
Version: msgpb.InsertDataVersion_ColumnBased,
|
2023-01-06 14:49:36 +08:00
|
|
|
NumRows: 1,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return msg
|
|
|
|
}
|
|
|
|
|
|
|
|
func (df *DataFactory) GetMsgStreamTsInsertMsgs(n int, chanName string, ts Timestamp) (inMsgs []msgstream.TsMsg) {
|
2021-04-21 18:41:37 +08:00
|
|
|
for i := 0; i < n; i++ {
|
2023-09-21 09:45:27 +08:00
|
|
|
msg := df.GenMsgStreamInsertMsgWithTs(i, chanName, ts)
|
2021-04-21 18:41:37 +08:00
|
|
|
var tsMsg msgstream.TsMsg = msg
|
|
|
|
inMsgs = append(inMsgs, tsMsg)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-10-11 16:31:44 +08:00
|
|
|
func (df *DataFactory) GetMsgStreamInsertMsgs(n int) (msgs []*msgstream.InsertMsg) {
|
2021-04-21 18:41:37 +08:00
|
|
|
for i := 0; i < n; i++ {
|
2023-09-21 09:45:27 +08:00
|
|
|
msg := df.GenMsgStreamInsertMsg(i, "")
|
2021-10-11 16:31:44 +08:00
|
|
|
msgs = append(msgs, msg)
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-15 15:24:18 +08:00
|
|
|
func (df *DataFactory) GenMsgStreamDeleteMsg(pks []storage.PrimaryKey, chanName string) *msgstream.DeleteMsg {
|
2021-10-11 16:31:44 +08:00
|
|
|
idx := 100
|
2021-10-14 14:48:34 +08:00
|
|
|
timestamps := make([]Timestamp, len(pks))
|
|
|
|
for i := 0; i < len(pks); i++ {
|
|
|
|
timestamps[i] = Timestamp(i) + 1000
|
|
|
|
}
|
2023-09-21 09:45:27 +08:00
|
|
|
msg := &msgstream.DeleteMsg{
|
2021-10-11 16:31:44 +08:00
|
|
|
BaseMsg: msgstream.BaseMsg{
|
|
|
|
HashValues: []uint32{uint32(idx)},
|
|
|
|
},
|
2023-03-04 23:21:50 +08:00
|
|
|
DeleteRequest: msgpb.DeleteRequest{
|
2021-10-11 16:31:44 +08:00
|
|
|
Base: &commonpb.MsgBase{
|
|
|
|
MsgType: commonpb.MsgType_Delete,
|
|
|
|
MsgID: 0,
|
|
|
|
Timestamp: Timestamp(idx + 1000),
|
|
|
|
SourceID: 0,
|
|
|
|
},
|
|
|
|
CollectionName: "col1",
|
|
|
|
PartitionName: "default",
|
2023-01-06 14:49:36 +08:00
|
|
|
PartitionID: 1,
|
2021-10-11 16:31:44 +08:00
|
|
|
ShardName: chanName,
|
2023-09-20 10:55:23 +08:00
|
|
|
PrimaryKeys: storage.ParsePrimaryKeys2IDs(pks),
|
2021-10-14 14:48:34 +08:00
|
|
|
Timestamps: timestamps,
|
2022-04-02 17:43:29 +08:00
|
|
|
NumRows: int64(len(pks)),
|
2021-10-11 16:31:44 +08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
return msg
|
|
|
|
}
|
|
|
|
|
2023-11-15 15:24:18 +08:00
|
|
|
func (df *DataFactory) GenMsgStreamDeleteMsgWithTs(idx int, pks []storage.PrimaryKey, chanName string, ts Timestamp) *msgstream.DeleteMsg {
|
2023-09-21 09:45:27 +08:00
|
|
|
msg := &msgstream.DeleteMsg{
|
2023-01-06 14:49:36 +08:00
|
|
|
BaseMsg: msgstream.BaseMsg{
|
|
|
|
HashValues: []uint32{uint32(idx)},
|
|
|
|
BeginTimestamp: ts,
|
|
|
|
EndTimestamp: ts,
|
|
|
|
},
|
2023-03-04 23:21:50 +08:00
|
|
|
DeleteRequest: msgpb.DeleteRequest{
|
2023-01-06 14:49:36 +08:00
|
|
|
Base: &commonpb.MsgBase{
|
|
|
|
MsgType: commonpb.MsgType_Delete,
|
|
|
|
MsgID: 1,
|
|
|
|
Timestamp: ts,
|
|
|
|
SourceID: 0,
|
|
|
|
},
|
|
|
|
CollectionName: "col1",
|
|
|
|
PartitionName: "default",
|
|
|
|
PartitionID: 1,
|
|
|
|
CollectionID: UniqueID(0),
|
|
|
|
ShardName: chanName,
|
2023-09-20 10:55:23 +08:00
|
|
|
PrimaryKeys: storage.ParsePrimaryKeys2IDs(pks),
|
2023-01-06 14:49:36 +08:00
|
|
|
Timestamps: []Timestamp{ts},
|
|
|
|
NumRows: int64(len(pks)),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return msg
|
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
func (m *RootCoordFactory) setCollectionID(id UniqueID) {
|
2021-04-21 18:41:37 +08:00
|
|
|
m.collectionID = id
|
|
|
|
}
|
|
|
|
|
2021-06-21 17:28:03 +08:00
|
|
|
func (m *RootCoordFactory) setCollectionName(name string) {
|
2021-04-21 18:41:37 +08:00
|
|
|
m.collectionName = name
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (m *RootCoordFactory) AllocID(ctx context.Context, in *rootcoordpb.AllocIDRequest, opts ...grpc.CallOption) (*rootcoordpb.AllocIDResponse, error) {
|
2021-06-22 16:14:09 +08:00
|
|
|
resp := &rootcoordpb.AllocIDResponse{
|
2021-06-15 19:11:55 +08:00
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
2023-09-21 09:45:27 +08:00
|
|
|
},
|
|
|
|
}
|
2021-06-15 19:11:55 +08:00
|
|
|
|
2022-04-12 22:19:34 +08:00
|
|
|
if in.Count == 12 {
|
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_Success
|
|
|
|
resp.ID = 1
|
|
|
|
resp.Count = 12
|
|
|
|
}
|
|
|
|
|
2021-06-15 19:11:55 +08:00
|
|
|
if m.ID == 0 {
|
|
|
|
resp.Status.Reason = "Zero ID"
|
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.ID == -1 {
|
2023-10-07 11:29:32 +08:00
|
|
|
return nil, merr.Error(resp.Status)
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
2021-06-15 19:11:55 +08:00
|
|
|
|
|
|
|
resp.ID = m.ID
|
2021-10-13 22:12:32 +08:00
|
|
|
resp.Count = in.GetCount()
|
2021-06-15 19:11:55 +08:00
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_Success
|
2021-04-21 18:41:37 +08:00
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (m *RootCoordFactory) AllocTimestamp(ctx context.Context, in *rootcoordpb.AllocTimestampRequest, opts ...grpc.CallOption) (*rootcoordpb.AllocTimestampResponse, error) {
|
2021-06-22 16:14:09 +08:00
|
|
|
resp := &rootcoordpb.AllocTimestampResponse{
|
2021-06-07 13:58:37 +08:00
|
|
|
Status: &commonpb.Status{},
|
|
|
|
Timestamp: 1000,
|
|
|
|
}
|
2022-04-21 21:37:42 +08:00
|
|
|
|
|
|
|
v := ctx.Value(ctxKey{})
|
|
|
|
if v != nil && v.(string) == returnError {
|
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_UnexpectedError
|
|
|
|
return resp, fmt.Errorf("injected error")
|
|
|
|
}
|
|
|
|
|
2021-06-07 13:58:37 +08:00
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (m *RootCoordFactory) ShowCollections(ctx context.Context, in *milvuspb.ShowCollectionsRequest, opts ...grpc.CallOption) (*milvuspb.ShowCollectionsResponse, error) {
|
2021-04-21 18:41:37 +08:00
|
|
|
resp := &milvuspb.ShowCollectionsResponse{
|
|
|
|
Status: &commonpb.Status{},
|
|
|
|
CollectionNames: []string{m.collectionName},
|
|
|
|
}
|
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (m *RootCoordFactory) DescribeCollectionInternal(ctx context.Context, in *milvuspb.DescribeCollectionRequest, opts ...grpc.CallOption) (*milvuspb.DescribeCollectionResponse, error) {
|
2021-04-21 18:41:37 +08:00
|
|
|
f := MetaFactory{}
|
2022-03-25 14:27:25 +08:00
|
|
|
meta := f.GetCollectionMeta(m.collectionID, m.collectionName, m.pkType)
|
2021-04-21 18:41:37 +08:00
|
|
|
resp := &milvuspb.DescribeCollectionResponse{
|
2021-09-09 10:14:00 +08:00
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.collectionID == -2 {
|
|
|
|
resp.Status.Reason = "Status not success"
|
|
|
|
return resp, nil
|
2021-04-21 18:41:37 +08:00
|
|
|
}
|
2021-09-09 10:14:00 +08:00
|
|
|
|
|
|
|
if m.collectionID == -1 {
|
2023-10-07 11:29:32 +08:00
|
|
|
return nil, merr.Error(resp.Status)
|
2021-09-09 10:14:00 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
resp.CollectionID = m.collectionID
|
|
|
|
resp.Schema = meta.Schema
|
2023-04-21 07:08:32 +08:00
|
|
|
resp.ShardsNum = common.DefaultShardsNum
|
2021-09-09 10:14:00 +08:00
|
|
|
resp.Status.ErrorCode = commonpb.ErrorCode_Success
|
2021-04-21 18:41:37 +08:00
|
|
|
return resp, nil
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (m *RootCoordFactory) ShowPartitions(ctx context.Context, req *milvuspb.ShowPartitionsRequest, opts ...grpc.CallOption) (*milvuspb.ShowPartitionsResponse, error) {
|
2023-07-11 15:18:28 +08:00
|
|
|
if m.ShowPartitionsErr {
|
|
|
|
return &milvuspb.ShowPartitionsResponse{
|
2023-10-11 21:01:35 +08:00
|
|
|
Status: merr.Success(),
|
2023-07-11 15:18:28 +08:00
|
|
|
}, fmt.Errorf("mock show partitions error")
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.ShowPartitionsNotSuccess {
|
|
|
|
return &milvuspb.ShowPartitionsResponse{
|
|
|
|
Status: &commonpb.Status{
|
|
|
|
ErrorCode: commonpb.ErrorCode_UnexpectedError,
|
|
|
|
Reason: "not success",
|
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return &milvuspb.ShowPartitionsResponse{
|
2023-10-11 21:01:35 +08:00
|
|
|
Status: merr.Success(),
|
2023-07-11 15:18:28 +08:00
|
|
|
PartitionNames: m.ShowPartitionsNames,
|
|
|
|
PartitionIDs: m.ShowPartitionsIDs,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2023-09-26 09:57:25 +08:00
|
|
|
func (m *RootCoordFactory) GetComponentStates(ctx context.Context, req *milvuspb.GetComponentStatesRequest, opts ...grpc.CallOption) (*milvuspb.ComponentStates, error) {
|
2022-10-10 15:55:22 +08:00
|
|
|
return &milvuspb.ComponentStates{
|
|
|
|
State: &milvuspb.ComponentInfo{},
|
|
|
|
SubcomponentStates: make([]*milvuspb.ComponentInfo, 0),
|
2023-10-11 21:01:35 +08:00
|
|
|
Status: merr.Success(),
|
2021-04-21 18:41:37 +08:00
|
|
|
}, nil
|
|
|
|
}
|
2021-09-23 18:31:55 +08:00
|
|
|
|
|
|
|
// FailMessageStreamFactory mock MessageStreamFactory failure
|
|
|
|
type FailMessageStreamFactory struct {
|
2022-04-07 22:05:32 +08:00
|
|
|
dependency.Factory
|
2021-09-23 18:31:55 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (f *FailMessageStreamFactory) NewMsgStream(ctx context.Context) (msgstream.MsgStream, error) {
|
|
|
|
return nil, errors.New("mocked failure")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *FailMessageStreamFactory) NewTtMsgStream(ctx context.Context) (msgstream.MsgStream, error) {
|
|
|
|
return nil, errors.New("mocked failure")
|
|
|
|
}
|
2021-10-25 20:13:51 +08:00
|
|
|
|
2023-11-15 15:24:18 +08:00
|
|
|
// MockDataSuiteBase compose some mock dependency to generate test dataset
|
|
|
|
type MockDataSuiteBase struct {
|
|
|
|
schema *schemapb.CollectionSchema
|
|
|
|
}
|
|
|
|
|
2024-07-01 14:46:07 +08:00
|
|
|
func (s *MockDataSuiteBase) PrepareData() {
|
2023-11-15 15:24:18 +08:00
|
|
|
s.schema = &schemapb.CollectionSchema{
|
|
|
|
Name: "test_collection",
|
|
|
|
Fields: []*schemapb.FieldSchema{
|
|
|
|
{FieldID: common.RowIDField, Name: common.RowIDFieldName, DataType: schemapb.DataType_Int64},
|
|
|
|
{FieldID: common.TimeStampField, Name: common.TimeStampFieldName, DataType: schemapb.DataType_Int64},
|
|
|
|
{FieldID: common.StartOfUserFieldID, DataType: schemapb.DataType_Int64, IsPrimaryKey: true, Name: "pk"},
|
|
|
|
{FieldID: common.StartOfUserFieldID + 1, DataType: schemapb.DataType_FloatVector, TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{Key: common.DimKey, Value: "128"},
|
|
|
|
}},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func EmptyBfsFactory(info *datapb.SegmentInfo) *metacache.BloomFilterSet {
|
|
|
|
return metacache.NewBloomFilterSet()
|
|
|
|
}
|
2024-07-01 14:46:07 +08:00
|
|
|
|
|
|
|
func GetWatchInfoByOpID(opID UniqueID, channel string, state datapb.ChannelWatchState) *datapb.ChannelWatchInfo {
|
|
|
|
return &datapb.ChannelWatchInfo{
|
|
|
|
OpID: opID,
|
|
|
|
State: state,
|
|
|
|
Vchan: &datapb.VchannelInfo{
|
|
|
|
CollectionID: 1,
|
|
|
|
ChannelName: channel,
|
|
|
|
},
|
|
|
|
Schema: &schemapb.CollectionSchema{
|
|
|
|
Name: "test_collection",
|
|
|
|
Fields: []*schemapb.FieldSchema{
|
|
|
|
{
|
|
|
|
FieldID: common.RowIDField, Name: common.RowIDFieldName, DataType: schemapb.DataType_Int64,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: common.TimeStampField, Name: common.TimeStampFieldName, DataType: schemapb.DataType_Int64,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 100, Name: "pk", DataType: schemapb.DataType_Int64, IsPrimaryKey: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
FieldID: 101, Name: "vector", DataType: schemapb.DataType_FloatVector,
|
|
|
|
TypeParams: []*commonpb.KeyValuePair{
|
|
|
|
{Key: common.DimKey, Value: "128"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|