2022-10-11 11:39:22 +08:00
|
|
|
// Licensed to the LF AI & Data foundation under one
|
|
|
|
// or more contributor license agreements. See the NOTICE file
|
|
|
|
// distributed with this work for additional information
|
|
|
|
// regarding copyright ownership. The ASF licenses this file
|
|
|
|
// to you under the Apache License, Version 2.0 (the
|
|
|
|
// "License"); you may not use this file except in compliance
|
|
|
|
// with the License. You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2022-09-15 18:48:32 +08:00
|
|
|
package balance
|
|
|
|
|
|
|
|
import (
|
2023-12-18 10:00:40 +08:00
|
|
|
"fmt"
|
2022-09-15 18:48:32 +08:00
|
|
|
"testing"
|
|
|
|
|
2023-11-27 14:58:26 +08:00
|
|
|
"github.com/samber/lo"
|
2024-06-25 21:18:15 +08:00
|
|
|
"github.com/stretchr/testify/mock"
|
2023-04-06 19:14:32 +08:00
|
|
|
"github.com/stretchr/testify/suite"
|
2022-12-06 22:59:19 +08:00
|
|
|
|
2022-09-15 18:48:32 +08:00
|
|
|
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
|
2023-07-31 13:57:04 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/metastore/kv/querycoord"
|
2022-09-15 18:48:32 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/datapb"
|
2022-11-10 17:53:04 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/querypb"
|
2022-09-15 18:48:32 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/querycoordv2/meta"
|
|
|
|
. "github.com/milvus-io/milvus/internal/querycoordv2/params"
|
|
|
|
"github.com/milvus-io/milvus/internal/querycoordv2/session"
|
2023-04-06 19:14:32 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/querycoordv2/task"
|
2022-09-15 18:48:32 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/querycoordv2/utils"
|
2024-04-15 11:23:23 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/common"
|
2024-06-25 21:18:15 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/kv"
|
2023-04-06 19:14:32 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/util/etcd"
|
2023-09-05 10:31:48 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/util/paramtable"
|
2024-03-21 11:59:12 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
2022-09-15 18:48:32 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
type RowCountBasedBalancerTestSuite struct {
|
|
|
|
suite.Suite
|
2022-12-06 22:59:19 +08:00
|
|
|
balancer *RowCountBasedBalancer
|
2023-06-13 10:52:38 +08:00
|
|
|
kv kv.MetaKv
|
2022-12-06 22:59:19 +08:00
|
|
|
broker *meta.MockBroker
|
|
|
|
mockScheduler *task.MockScheduler
|
2022-09-15 18:48:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (suite *RowCountBasedBalancerTestSuite) SetupSuite() {
|
2023-09-05 10:31:48 +08:00
|
|
|
paramtable.Init()
|
2022-09-15 18:48:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (suite *RowCountBasedBalancerTestSuite) SetupTest() {
|
|
|
|
var err error
|
|
|
|
config := GenerateEtcdConfig()
|
2022-11-30 18:23:15 +08:00
|
|
|
cli, err := etcd.GetEtcdClient(
|
|
|
|
config.UseEmbedEtcd.GetAsBool(),
|
|
|
|
config.EtcdUseSSL.GetAsBool(),
|
|
|
|
config.Endpoints.GetAsStrings(),
|
|
|
|
config.EtcdTLSCert.GetValue(),
|
|
|
|
config.EtcdTLSKey.GetValue(),
|
|
|
|
config.EtcdTLSCACert.GetValue(),
|
|
|
|
config.EtcdTLSMinVersion.GetValue())
|
2022-09-15 18:48:32 +08:00
|
|
|
suite.Require().NoError(err)
|
2022-11-17 18:59:09 +08:00
|
|
|
suite.kv = etcdkv.NewEtcdKV(cli, config.MetaRootPath.GetValue())
|
2022-11-21 16:21:11 +08:00
|
|
|
suite.broker = meta.NewMockBroker(suite.T())
|
2022-09-15 18:48:32 +08:00
|
|
|
|
2023-07-31 13:57:04 +08:00
|
|
|
store := querycoord.NewCatalog(suite.kv)
|
2022-09-15 18:48:32 +08:00
|
|
|
idAllocator := RandomIncrementIDAllocator()
|
2023-01-30 10:19:48 +08:00
|
|
|
nodeManager := session.NewNodeManager()
|
|
|
|
testMeta := meta.NewMeta(idAllocator, store, nodeManager)
|
2022-11-21 16:21:11 +08:00
|
|
|
testTarget := meta.NewTargetManager(suite.broker, testMeta)
|
2022-09-15 18:48:32 +08:00
|
|
|
|
|
|
|
distManager := meta.NewDistributionManager()
|
2022-12-06 22:59:19 +08:00
|
|
|
suite.mockScheduler = task.NewMockScheduler(suite.T())
|
|
|
|
suite.balancer = NewRowCountBasedBalancer(suite.mockScheduler, nodeManager, distManager, testMeta, testTarget)
|
2023-03-20 14:55:57 +08:00
|
|
|
|
|
|
|
suite.broker.EXPECT().GetPartitions(mock.Anything, int64(1)).Return([]int64{1}, nil).Maybe()
|
2024-06-27 19:06:05 +08:00
|
|
|
|
|
|
|
suite.mockScheduler.EXPECT().GetSegmentTaskDelta(mock.Anything, mock.Anything).Return(0).Maybe()
|
|
|
|
suite.mockScheduler.EXPECT().GetChannelTaskDelta(mock.Anything, mock.Anything).Return(0).Maybe()
|
2022-09-15 18:48:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TearDownTest() {
|
|
|
|
suite.kv.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestAssignSegment() {
|
|
|
|
cases := []struct {
|
|
|
|
name string
|
|
|
|
distributions map[int64][]*meta.Segment
|
|
|
|
assignments []*meta.Segment
|
|
|
|
nodes []int64
|
2022-12-06 22:59:19 +08:00
|
|
|
segmentCnts []int
|
|
|
|
states []session.State
|
2022-09-15 18:48:32 +08:00
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "test normal assignment",
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
2: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, NumOfRows: 20}, Node: 2}},
|
|
|
|
3: {{SegmentInfo: &datapb.SegmentInfo{ID: 2, NumOfRows: 30}, Node: 3}},
|
|
|
|
},
|
|
|
|
assignments: []*meta.Segment{
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, NumOfRows: 5}},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, NumOfRows: 10}},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, NumOfRows: 15}},
|
|
|
|
},
|
2022-12-06 22:59:19 +08:00
|
|
|
nodes: []int64{1, 2, 3, 4},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateNormal, session.NodeStateStopping},
|
|
|
|
segmentCnts: []int{0, 1, 1, 0},
|
2022-09-15 18:48:32 +08:00
|
|
|
expectPlans: []SegmentAssignPlan{
|
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 3, NumOfRows: 5}}, From: -1, To: 2},
|
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, NumOfRows: 10}}, From: -1, To: 1},
|
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, NumOfRows: 15}}, From: -1, To: 1},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// TODO: add more cases
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
// I do not find a better way to do the setup and teardown work for subtests yet.
|
|
|
|
// If you do, please replace with it.
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
for node, s := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
2022-12-06 22:59:19 +08:00
|
|
|
for i := range c.nodes {
|
2024-03-15 10:45:06 +08:00
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: c.nodes[i],
|
|
|
|
Address: "127.0.0.1:0",
|
|
|
|
Hostname: "localhost",
|
|
|
|
})
|
2022-12-06 22:59:19 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithSegmentCnt(c.segmentCnts[i]))
|
|
|
|
nodeInfo.SetState(c.states[i])
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
|
|
|
}
|
2024-03-27 16:15:19 +08:00
|
|
|
plans := balancer.AssignSegment(0, c.assignments, c.nodes, false)
|
2024-03-21 11:59:12 +08:00
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, plans)
|
2022-09-15 18:48:32 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-27 16:15:19 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestSuspendNode() {
|
|
|
|
cases := []struct {
|
|
|
|
name string
|
|
|
|
distributions map[int64][]*meta.Segment
|
|
|
|
assignments []*meta.Segment
|
|
|
|
nodes []int64
|
|
|
|
segmentCnts []int
|
|
|
|
states []session.State
|
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "test suspend node",
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
2: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, NumOfRows: 20}, Node: 2}},
|
|
|
|
3: {{SegmentInfo: &datapb.SegmentInfo{ID: 2, NumOfRows: 30}, Node: 3}},
|
|
|
|
},
|
|
|
|
assignments: []*meta.Segment{
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, NumOfRows: 5}},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, NumOfRows: 10}},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, NumOfRows: 15}},
|
|
|
|
},
|
|
|
|
nodes: []int64{1, 2, 3, 4},
|
|
|
|
states: []session.State{session.NodeStateSuspend, session.NodeStateSuspend, session.NodeStateSuspend, session.NodeStateSuspend},
|
|
|
|
segmentCnts: []int{0, 1, 1, 0},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
// I do not find a better way to do the setup and teardown work for subtests yet.
|
|
|
|
// If you do, please replace with it.
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
for node, s := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
|
|
|
for i := range c.nodes {
|
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: c.nodes[i],
|
|
|
|
Address: "localhost",
|
|
|
|
Hostname: "localhost",
|
|
|
|
})
|
|
|
|
nodeInfo.UpdateStats(session.WithSegmentCnt(c.segmentCnts[i]))
|
|
|
|
nodeInfo.SetState(c.states[i])
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
|
|
|
}
|
|
|
|
plans := balancer.AssignSegment(0, c.assignments, c.nodes, false)
|
|
|
|
// all node has been suspend, so no node to assign segment
|
|
|
|
suite.ElementsMatch(c.expectPlans, plans)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-15 18:48:32 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestBalance() {
|
|
|
|
cases := []struct {
|
2022-12-06 22:59:19 +08:00
|
|
|
name string
|
|
|
|
nodes []int64
|
2022-12-28 10:17:30 +08:00
|
|
|
notExistedNodes []int64
|
2022-12-06 22:59:19 +08:00
|
|
|
segmentCnts []int
|
|
|
|
states []session.State
|
|
|
|
shouldMock bool
|
|
|
|
distributions map[int64][]*meta.Segment
|
|
|
|
distributionChannels map[int64][]*meta.DmChannel
|
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
expectChannelPlans []ChannelAssignPlan
|
2023-12-12 10:02:39 +08:00
|
|
|
multiple bool
|
2022-09-15 18:48:32 +08:00
|
|
|
}{
|
|
|
|
{
|
2022-12-06 22:59:19 +08:00
|
|
|
name: "normal balance",
|
|
|
|
nodes: []int64{1, 2},
|
|
|
|
segmentCnts: []int{1, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal},
|
2022-09-15 18:48:32 +08:00
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2}, From: 2, To: 1, Replica: newReplicaDefaultRG(1)},
|
2022-09-15 18:48:32 +08:00
|
|
|
},
|
2022-12-06 22:59:19 +08:00
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
2022-09-15 18:48:32 +08:00
|
|
|
},
|
2023-04-18 18:32:32 +08:00
|
|
|
{
|
|
|
|
name: "skip balance for redundant segment",
|
|
|
|
nodes: []int64{1, 2},
|
|
|
|
segmentCnts: []int{1, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 20}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 40}, Node: 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
|
|
|
},
|
2023-04-11 14:38:30 +08:00
|
|
|
{
|
|
|
|
name: "balance won't trigger",
|
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{1, 2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 40}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 10}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 40}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 40}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
|
|
|
},
|
2022-09-15 18:48:32 +08:00
|
|
|
{
|
2022-12-06 22:59:19 +08:00
|
|
|
name: "all stopping balance",
|
|
|
|
nodes: []int64{1, 2},
|
|
|
|
segmentCnts: []int{1, 2},
|
|
|
|
states: []session.State{session.NodeStateStopping, session.NodeStateStopping},
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
},
|
2023-02-21 19:06:27 +08:00
|
|
|
expectPlans: []SegmentAssignPlan{},
|
2022-12-06 22:59:19 +08:00
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
|
|
|
},
|
|
|
|
{
|
2023-12-14 16:44:43 +08:00
|
|
|
name: "part stopping balance channel",
|
2022-12-06 22:59:19 +08:00
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{1, 2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateStopping},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
2023-12-14 16:44:43 +08:00
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
2023-12-14 16:44:43 +08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "part stopping balance segment",
|
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{1, 2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateStopping},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
},
|
|
|
|
1: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 1},
|
|
|
|
},
|
|
|
|
},
|
2022-12-06 22:59:19 +08:00
|
|
|
expectPlans: []SegmentAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
2022-12-06 22:59:19 +08:00
|
|
|
},
|
2023-12-14 16:44:43 +08:00
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
2022-12-06 22:59:19 +08:00
|
|
|
},
|
2023-12-11 14:18:37 +08:00
|
|
|
{
|
|
|
|
name: "balance channel",
|
|
|
|
nodes: []int64{2, 3},
|
|
|
|
segmentCnts: []int{2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2}, From: 2, To: 3, Replica: newReplicaDefaultRG(1)},
|
2023-12-11 14:18:37 +08:00
|
|
|
},
|
|
|
|
},
|
2023-04-25 10:22:37 +08:00
|
|
|
{
|
|
|
|
name: "unbalance stable view",
|
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{0, 0, 0},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
1: {
|
2023-04-26 19:26:39 +08:00
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v1"}, Node: 1},
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1},
|
2023-04-25 10:22:37 +08:00
|
|
|
},
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
2023-04-26 19:26:39 +08:00
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v4"}, Node: 3},
|
2023-04-25 10:22:37 +08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
|
|
|
},
|
2023-12-11 14:18:37 +08:00
|
|
|
{
|
|
|
|
name: "balance unstable view",
|
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{0, 0, 0},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
1: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v1"}, Node: 1},
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1},
|
|
|
|
},
|
|
|
|
2: {},
|
|
|
|
3: {},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1}, From: 1, To: 2, Replica: newReplicaDefaultRG(1)},
|
|
|
|
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1}, From: 1, To: 3, Replica: newReplicaDefaultRG(1)},
|
2023-12-11 14:18:37 +08:00
|
|
|
},
|
2023-12-12 10:02:39 +08:00
|
|
|
multiple: true,
|
2023-12-11 14:18:37 +08:00
|
|
|
},
|
2022-12-06 22:59:19 +08:00
|
|
|
{
|
2022-12-28 10:17:30 +08:00
|
|
|
name: "already balanced",
|
2023-04-07 19:06:28 +08:00
|
|
|
nodes: []int64{11, 22},
|
2022-12-28 10:17:30 +08:00
|
|
|
notExistedNodes: []int64{10},
|
|
|
|
segmentCnts: []int{1, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal},
|
2022-09-15 18:48:32 +08:00
|
|
|
distributions: map[int64][]*meta.Segment{
|
2024-05-20 10:21:38 +08:00
|
|
|
11: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 30}, Node: 11}},
|
|
|
|
22: {
|
2023-04-07 19:06:28 +08:00
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 22},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 22},
|
2022-09-15 18:48:32 +08:00
|
|
|
},
|
2022-12-28 10:17:30 +08:00
|
|
|
10: {{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 30}, Node: 10}},
|
2022-09-15 18:48:32 +08:00
|
|
|
},
|
2022-12-06 22:59:19 +08:00
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
2022-09-15 18:48:32 +08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
2023-04-18 18:30:32 +08:00
|
|
|
segments := []*datapb.SegmentInfo{
|
2022-11-21 16:21:11 +08:00
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 1,
|
|
|
|
PartitionID: 1,
|
2022-11-21 16:21:11 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 2,
|
|
|
|
PartitionID: 1,
|
2022-11-21 16:21:11 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 3,
|
|
|
|
PartitionID: 1,
|
2022-11-21 16:21:11 +08:00
|
|
|
},
|
2022-12-06 22:59:19 +08:00
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 4,
|
|
|
|
PartitionID: 1,
|
2022-12-06 22:59:19 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 5,
|
|
|
|
PartitionID: 1,
|
2022-12-06 22:59:19 +08:00
|
|
|
},
|
2022-11-21 16:21:11 +08:00
|
|
|
}
|
2023-04-18 18:30:32 +08:00
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, int64(1)).Return(nil, segments, nil)
|
2023-08-04 10:31:06 +08:00
|
|
|
collection := utils.CreateTestCollection(1, 1)
|
2022-11-10 17:53:04 +08:00
|
|
|
collection.LoadPercentage = 100
|
|
|
|
collection.Status = querypb.LoadStatus_Loaded
|
2023-03-20 14:55:57 +08:00
|
|
|
collection.LoadType = querypb.LoadType_LoadCollection
|
2022-11-10 17:53:04 +08:00
|
|
|
balancer.meta.CollectionManager.PutCollection(collection)
|
2023-08-04 10:31:06 +08:00
|
|
|
balancer.meta.CollectionManager.PutPartition(utils.CreateTestPartition(1, 1))
|
2024-05-20 10:21:38 +08:00
|
|
|
balancer.meta.ReplicaManager.Put(utils.CreateTestReplica(1, 1, c.nodes))
|
2023-04-11 14:38:30 +08:00
|
|
|
suite.broker.ExpectedCalls = nil
|
2023-04-18 18:30:32 +08:00
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, int64(1)).Return(nil, segments, nil)
|
2023-08-04 10:31:06 +08:00
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
|
|
|
balancer.targetMgr.UpdateCollectionCurrentTarget(1)
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
2023-04-11 14:38:30 +08:00
|
|
|
for node, s := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
|
|
|
for node, v := range c.distributionChannels {
|
|
|
|
balancer.dist.ChannelDistManager.Update(node, v...)
|
|
|
|
}
|
|
|
|
for i := range c.nodes {
|
2024-03-15 10:45:06 +08:00
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: c.nodes[i],
|
|
|
|
Address: "127.0.0.1:0",
|
|
|
|
Hostname: "localhost",
|
2024-04-15 11:23:23 +08:00
|
|
|
Version: common.Version,
|
2024-03-15 10:45:06 +08:00
|
|
|
})
|
2023-04-11 14:38:30 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithSegmentCnt(c.segmentCnts[i]))
|
|
|
|
nodeInfo.UpdateStats(session.WithChannelCnt(len(c.distributionChannels[c.nodes[i]])))
|
|
|
|
nodeInfo.SetState(c.states[i])
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
2024-04-15 08:13:19 +08:00
|
|
|
suite.balancer.meta.ResourceManager.HandleNodeUp(c.nodes[i])
|
2023-04-11 14:38:30 +08:00
|
|
|
}
|
2024-05-20 10:21:38 +08:00
|
|
|
utils.RecoverAllCollection(balancer.meta)
|
2023-05-04 12:22:40 +08:00
|
|
|
|
|
|
|
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
|
2023-12-12 10:02:39 +08:00
|
|
|
if !c.multiple {
|
2024-03-21 11:59:12 +08:00
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
|
|
|
|
assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
|
2023-12-12 10:02:39 +08:00
|
|
|
} else {
|
2024-03-21 11:59:12 +08:00
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans, true)
|
|
|
|
assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans, true)
|
2023-12-12 10:02:39 +08:00
|
|
|
}
|
2023-12-11 14:18:37 +08:00
|
|
|
|
|
|
|
// clear distribution
|
2024-05-20 10:21:38 +08:00
|
|
|
|
|
|
|
for _, node := range c.nodes {
|
|
|
|
balancer.meta.ResourceManager.HandleNodeDown(node)
|
|
|
|
balancer.nodeManager.Remove(node)
|
2023-12-11 14:18:37 +08:00
|
|
|
balancer.dist.SegmentDistManager.Update(node)
|
|
|
|
balancer.dist.ChannelDistManager.Update(node)
|
|
|
|
}
|
2023-04-11 14:38:30 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnPartStopping() {
|
|
|
|
cases := []struct {
|
|
|
|
name string
|
|
|
|
nodes []int64
|
|
|
|
notExistedNodes []int64
|
|
|
|
segmentCnts []int
|
|
|
|
states []session.State
|
|
|
|
shouldMock bool
|
|
|
|
distributions map[int64][]*meta.Segment
|
|
|
|
distributionChannels map[int64][]*meta.DmChannel
|
2023-04-18 18:30:32 +08:00
|
|
|
segmentInCurrent []*datapb.SegmentInfo
|
|
|
|
segmentInNext []*datapb.SegmentInfo
|
2023-04-11 14:38:30 +08:00
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
expectChannelPlans []ChannelAssignPlan
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "exist in next target",
|
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{1, 2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateStopping},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
2023-04-18 18:30:32 +08:00
|
|
|
segmentInCurrent: []*datapb.SegmentInfo{
|
2023-04-11 14:38:30 +08:00
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 1,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 2,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 3,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 4,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 5,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
|
2023-04-18 18:30:32 +08:00
|
|
|
segmentInNext: []*datapb.SegmentInfo{
|
2023-04-11 14:38:30 +08:00
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 1,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 2,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 3,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 4,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 5,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
2023-12-14 16:44:43 +08:00
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "not exist in next target",
|
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{1, 2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateStopping},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
2023-04-18 18:30:32 +08:00
|
|
|
segmentInCurrent: []*datapb.SegmentInfo{
|
2023-04-11 14:38:30 +08:00
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 1,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 2,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 3,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 4,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 5,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
},
|
2023-04-18 18:30:32 +08:00
|
|
|
segmentInNext: []*datapb.SegmentInfo{
|
2023-04-11 14:38:30 +08:00
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 1,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 2,
|
|
|
|
PartitionID: 1,
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
2023-04-11 14:38:30 +08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
collection := utils.CreateTestCollection(1, 1)
|
|
|
|
|
|
|
|
collection.LoadPercentage = 100
|
|
|
|
collection.LoadType = querypb.LoadType_LoadCollection
|
|
|
|
collection.Status = querypb.LoadStatus_Loaded
|
|
|
|
balancer.meta.CollectionManager.PutCollection(collection)
|
2023-08-04 10:31:06 +08:00
|
|
|
balancer.meta.CollectionManager.PutPartition(utils.CreateTestPartition(1, 1))
|
2023-04-11 14:38:30 +08:00
|
|
|
balancer.meta.ReplicaManager.Put(utils.CreateTestReplica(1, 1, append(c.nodes, c.notExistedNodes...)))
|
2023-08-04 10:31:06 +08:00
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, int64(1)).Return(nil, c.segmentInCurrent, nil)
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
|
|
|
balancer.targetMgr.UpdateCollectionCurrentTarget(1)
|
2023-04-11 14:38:30 +08:00
|
|
|
suite.broker.ExpectedCalls = nil
|
2023-04-18 18:30:32 +08:00
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, int64(1)).Return(nil, c.segmentInNext, nil)
|
2023-08-04 10:31:06 +08:00
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
2022-11-10 17:53:04 +08:00
|
|
|
for node, s := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
2022-12-06 22:59:19 +08:00
|
|
|
for node, v := range c.distributionChannels {
|
|
|
|
balancer.dist.ChannelDistManager.Update(node, v...)
|
|
|
|
}
|
|
|
|
for i := range c.nodes {
|
2024-03-15 10:45:06 +08:00
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: c.nodes[i],
|
|
|
|
Address: "127.0.0.1:0",
|
|
|
|
Hostname: "localhost",
|
2024-04-15 11:23:23 +08:00
|
|
|
Version: common.Version,
|
2024-03-15 10:45:06 +08:00
|
|
|
})
|
2022-12-06 22:59:19 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithSegmentCnt(c.segmentCnts[i]))
|
2023-01-30 10:19:48 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithChannelCnt(len(c.distributionChannels[c.nodes[i]])))
|
2022-12-06 22:59:19 +08:00
|
|
|
nodeInfo.SetState(c.states[i])
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
2024-04-15 08:13:19 +08:00
|
|
|
suite.balancer.meta.ResourceManager.HandleNodeUp(c.nodes[i])
|
2022-12-06 22:59:19 +08:00
|
|
|
}
|
2024-05-20 10:21:38 +08:00
|
|
|
utils.RecoverAllCollection(balancer.meta)
|
|
|
|
|
2023-05-04 12:22:40 +08:00
|
|
|
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
|
2024-03-21 11:59:12 +08:00
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
|
|
|
|
assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
|
2022-11-10 17:53:04 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-30 10:19:48 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestBalanceOutboundNodes() {
|
|
|
|
cases := []struct {
|
|
|
|
name string
|
|
|
|
nodes []int64
|
|
|
|
notExistedNodes []int64
|
|
|
|
segmentCnts []int
|
|
|
|
states []session.State
|
|
|
|
shouldMock bool
|
|
|
|
distributions map[int64][]*meta.Segment
|
|
|
|
distributionChannels map[int64][]*meta.DmChannel
|
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
expectChannelPlans []ChannelAssignPlan
|
|
|
|
}{
|
|
|
|
{
|
2023-12-14 16:44:43 +08:00
|
|
|
name: "balance channel with outbound nodes",
|
2023-01-30 10:19:48 +08:00
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{1, 2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
2023-12-14 16:44:43 +08:00
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
2023-12-14 16:44:43 +08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "balance segment with outbound node",
|
|
|
|
nodes: []int64{1, 2, 3},
|
|
|
|
segmentCnts: []int{1, 2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
},
|
|
|
|
1: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 1},
|
|
|
|
},
|
|
|
|
},
|
2023-01-30 10:19:48 +08:00
|
|
|
expectPlans: []SegmentAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
|
|
|
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
|
2023-01-30 10:19:48 +08:00
|
|
|
},
|
2023-12-14 16:44:43 +08:00
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
2023-01-30 10:19:48 +08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
collection := utils.CreateTestCollection(1, 1)
|
2023-04-18 18:30:32 +08:00
|
|
|
segments := []*datapb.SegmentInfo{
|
2023-01-30 10:19:48 +08:00
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 1,
|
|
|
|
PartitionID: 1,
|
2023-01-30 10:19:48 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 2,
|
|
|
|
PartitionID: 1,
|
2023-01-30 10:19:48 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 3,
|
|
|
|
PartitionID: 1,
|
2023-01-30 10:19:48 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 4,
|
|
|
|
PartitionID: 1,
|
2023-01-30 10:19:48 +08:00
|
|
|
},
|
|
|
|
{
|
2023-04-18 18:30:32 +08:00
|
|
|
ID: 5,
|
|
|
|
PartitionID: 1,
|
2023-01-30 10:19:48 +08:00
|
|
|
},
|
|
|
|
}
|
2023-08-04 10:31:06 +08:00
|
|
|
|
2023-01-30 10:19:48 +08:00
|
|
|
collection.LoadPercentage = 100
|
|
|
|
collection.Status = querypb.LoadStatus_Loaded
|
2023-03-20 14:55:57 +08:00
|
|
|
collection.LoadType = querypb.LoadType_LoadCollection
|
2023-01-30 10:19:48 +08:00
|
|
|
balancer.meta.CollectionManager.PutCollection(collection)
|
2023-08-04 10:31:06 +08:00
|
|
|
balancer.meta.CollectionManager.PutPartition(utils.CreateTestPartition(1, 1))
|
2023-01-30 10:19:48 +08:00
|
|
|
balancer.meta.ReplicaManager.Put(utils.CreateTestReplica(1, 1, append(c.nodes, c.notExistedNodes...)))
|
2023-08-04 10:31:06 +08:00
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, int64(1)).Return(nil, segments, nil)
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
|
|
|
balancer.targetMgr.UpdateCollectionCurrentTarget(1)
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
2023-01-30 10:19:48 +08:00
|
|
|
for node, s := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
|
|
|
for node, v := range c.distributionChannels {
|
|
|
|
balancer.dist.ChannelDistManager.Update(node, v...)
|
|
|
|
}
|
|
|
|
for i := range c.nodes {
|
2024-03-15 10:45:06 +08:00
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: c.nodes[i],
|
|
|
|
Address: "127.0.0.1:0",
|
|
|
|
Hostname: "localhost",
|
2024-04-15 11:23:23 +08:00
|
|
|
Version: common.Version,
|
2024-03-15 10:45:06 +08:00
|
|
|
})
|
2023-01-30 10:19:48 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithSegmentCnt(c.segmentCnts[i]))
|
|
|
|
nodeInfo.UpdateStats(session.WithChannelCnt(len(c.distributionChannels[c.nodes[i]])))
|
|
|
|
nodeInfo.SetState(c.states[i])
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
|
|
|
}
|
|
|
|
// make node-3 outbound
|
2024-04-15 08:13:19 +08:00
|
|
|
balancer.meta.ResourceManager.HandleNodeUp(1)
|
|
|
|
balancer.meta.ResourceManager.HandleNodeUp(2)
|
2024-04-05 04:57:16 +08:00
|
|
|
utils.RecoverAllCollection(balancer.meta)
|
2023-05-04 12:22:40 +08:00
|
|
|
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
|
2024-03-21 11:59:12 +08:00
|
|
|
assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
|
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
|
2023-12-14 16:44:43 +08:00
|
|
|
|
|
|
|
// clean up distribution for next test
|
|
|
|
for node := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node)
|
|
|
|
balancer.dist.ChannelDistManager.Update(node)
|
|
|
|
}
|
2023-01-30 10:19:48 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-10 17:53:04 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnLoadingCollection() {
|
|
|
|
cases := []struct {
|
|
|
|
name string
|
|
|
|
nodes []int64
|
|
|
|
distributions map[int64][]*meta.Segment
|
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "normal balance",
|
|
|
|
nodes: []int64{1, 2},
|
|
|
|
distributions: map[int64][]*meta.Segment{
|
|
|
|
1: {{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, Node: 1}},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
collection := utils.CreateTestCollection(1, 1)
|
|
|
|
collection.LoadPercentage = 100
|
|
|
|
collection.Status = querypb.LoadStatus_Loading
|
2023-03-20 14:55:57 +08:00
|
|
|
collection.LoadType = querypb.LoadType_LoadCollection
|
2022-11-10 17:53:04 +08:00
|
|
|
balancer.meta.CollectionManager.PutCollection(collection)
|
2022-09-15 18:48:32 +08:00
|
|
|
balancer.meta.ReplicaManager.Put(utils.CreateTestReplica(1, 1, c.nodes))
|
|
|
|
for node, s := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
2023-05-04 12:22:40 +08:00
|
|
|
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
|
2022-09-15 18:48:32 +08:00
|
|
|
suite.Empty(channelPlans)
|
2024-03-21 11:59:12 +08:00
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
|
2022-09-15 18:48:32 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-04 12:22:40 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) getCollectionBalancePlans(balancer *RowCountBasedBalancer,
|
2023-09-21 09:45:27 +08:00
|
|
|
collectionID int64,
|
|
|
|
) ([]SegmentAssignPlan, []ChannelAssignPlan) {
|
2023-05-04 12:22:40 +08:00
|
|
|
replicas := balancer.meta.ReplicaManager.GetByCollection(collectionID)
|
|
|
|
segmentPlans, channelPlans := make([]SegmentAssignPlan, 0), make([]ChannelAssignPlan, 0)
|
|
|
|
for _, replica := range replicas {
|
|
|
|
sPlans, cPlans := balancer.BalanceReplica(replica)
|
|
|
|
segmentPlans = append(segmentPlans, sPlans...)
|
|
|
|
channelPlans = append(channelPlans, cPlans...)
|
|
|
|
}
|
|
|
|
return segmentPlans, channelPlans
|
|
|
|
}
|
|
|
|
|
2023-11-27 14:58:26 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestAssignSegmentWithGrowing() {
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
|
|
|
|
distributions := map[int64][]*meta.Segment{
|
|
|
|
1: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 1, NumOfRows: 20, CollectionID: 1}, Node: 1},
|
|
|
|
},
|
|
|
|
2: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, NumOfRows: 20, CollectionID: 1}, Node: 2},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for node, s := range distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, node := range lo.Keys(distributions) {
|
2024-03-15 10:45:06 +08:00
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: node,
|
|
|
|
Address: "127.0.0.1:0",
|
|
|
|
Hostname: "localhost",
|
|
|
|
})
|
2023-11-27 14:58:26 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithSegmentCnt(20))
|
|
|
|
nodeInfo.SetState(session.NodeStateNormal)
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
|
|
|
}
|
|
|
|
|
|
|
|
toAssign := []*meta.Segment{
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, NumOfRows: 10, CollectionID: 1}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, NumOfRows: 10, CollectionID: 1}, Node: 3},
|
|
|
|
}
|
|
|
|
|
|
|
|
// mock 50 growing row count in node 1, which is delegator, expect all segment assign to node 2
|
|
|
|
leaderView := &meta.LeaderView{
|
|
|
|
ID: 1,
|
|
|
|
CollectionID: 1,
|
|
|
|
NumOfGrowingRows: 50,
|
|
|
|
}
|
|
|
|
suite.balancer.dist.LeaderViewManager.Update(1, leaderView)
|
2024-03-27 16:15:19 +08:00
|
|
|
plans := balancer.AssignSegment(1, toAssign, lo.Keys(distributions), false)
|
2023-11-27 14:58:26 +08:00
|
|
|
for _, p := range plans {
|
|
|
|
suite.Equal(int64(2), p.To)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-18 10:00:40 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestDisableBalanceChannel() {
|
|
|
|
cases := []struct {
|
|
|
|
name string
|
|
|
|
nodes []int64
|
|
|
|
notExistedNodes []int64
|
|
|
|
segmentCnts []int
|
|
|
|
states []session.State
|
|
|
|
shouldMock bool
|
|
|
|
distributions map[int64][]*meta.Segment
|
|
|
|
distributionChannels map[int64][]*meta.DmChannel
|
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
expectChannelPlans []ChannelAssignPlan
|
|
|
|
multiple bool
|
|
|
|
enableBalanceChannel bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "balance channel",
|
|
|
|
nodes: []int64{2, 3},
|
|
|
|
segmentCnts: []int{2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{
|
2024-03-21 11:59:12 +08:00
|
|
|
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2}, From: 2, To: 3, Replica: newReplicaDefaultRG(1)},
|
2023-12-18 10:00:40 +08:00
|
|
|
},
|
|
|
|
enableBalanceChannel: true,
|
|
|
|
},
|
|
|
|
|
|
|
|
{
|
|
|
|
name: "disable balance channel",
|
|
|
|
nodes: []int64{2, 3},
|
|
|
|
segmentCnts: []int{2, 2},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
shouldMock: true,
|
|
|
|
distributions: map[int64][]*meta.Segment{},
|
|
|
|
distributionChannels: map[int64][]*meta.DmChannel{
|
|
|
|
2: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 2},
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2},
|
|
|
|
},
|
|
|
|
3: {},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
|
|
|
enableBalanceChannel: false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
segments := []*datapb.SegmentInfo{
|
|
|
|
{
|
|
|
|
ID: 1,
|
|
|
|
PartitionID: 1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ID: 2,
|
|
|
|
PartitionID: 1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ID: 3,
|
|
|
|
PartitionID: 1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ID: 4,
|
|
|
|
PartitionID: 1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ID: 5,
|
|
|
|
PartitionID: 1,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, int64(1)).Return(nil, segments, nil)
|
|
|
|
collection := utils.CreateTestCollection(1, 1)
|
|
|
|
collection.LoadPercentage = 100
|
|
|
|
collection.Status = querypb.LoadStatus_Loaded
|
|
|
|
collection.LoadType = querypb.LoadType_LoadCollection
|
|
|
|
balancer.meta.CollectionManager.PutCollection(collection)
|
|
|
|
balancer.meta.CollectionManager.PutPartition(utils.CreateTestPartition(1, 1))
|
|
|
|
balancer.meta.ReplicaManager.Put(utils.CreateTestReplica(1, 1, append(c.nodes, c.notExistedNodes...)))
|
|
|
|
suite.broker.ExpectedCalls = nil
|
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, int64(1)).Return(nil, segments, nil)
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
|
|
|
balancer.targetMgr.UpdateCollectionCurrentTarget(1)
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(int64(1))
|
|
|
|
for node, s := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
|
|
|
for node, v := range c.distributionChannels {
|
|
|
|
balancer.dist.ChannelDistManager.Update(node, v...)
|
|
|
|
}
|
|
|
|
for i := range c.nodes {
|
2024-03-15 10:45:06 +08:00
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: c.nodes[i],
|
|
|
|
Address: "127.0.0.1:0",
|
|
|
|
Hostname: "localhost",
|
2024-04-15 11:23:23 +08:00
|
|
|
Version: common.Version,
|
2024-03-15 10:45:06 +08:00
|
|
|
})
|
2023-12-18 10:00:40 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithSegmentCnt(c.segmentCnts[i]))
|
|
|
|
nodeInfo.UpdateStats(session.WithChannelCnt(len(c.distributionChannels[c.nodes[i]])))
|
|
|
|
nodeInfo.SetState(c.states[i])
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
2024-04-15 08:13:19 +08:00
|
|
|
suite.balancer.meta.ResourceManager.HandleNodeUp(c.nodes[i])
|
2023-12-18 10:00:40 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Params.Save(Params.QueryCoordCfg.AutoBalanceChannel.Key, fmt.Sprint(c.enableBalanceChannel))
|
2024-03-11 20:35:04 +08:00
|
|
|
defer Params.Reset(Params.QueryCoordCfg.AutoBalanceChannel.Key)
|
2023-12-18 10:00:40 +08:00
|
|
|
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
|
|
|
|
if !c.multiple {
|
2024-03-21 11:59:12 +08:00
|
|
|
assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
|
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
|
2023-12-18 10:00:40 +08:00
|
|
|
} else {
|
2024-03-21 11:59:12 +08:00
|
|
|
assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans, true)
|
|
|
|
assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans, true)
|
2023-12-18 10:00:40 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// clear distribution
|
|
|
|
for node := range c.distributions {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node)
|
|
|
|
}
|
|
|
|
for node := range c.distributionChannels {
|
|
|
|
balancer.dist.ChannelDistManager.Update(node)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-11 20:35:04 +08:00
|
|
|
func (suite *RowCountBasedBalancerTestSuite) TestMultiReplicaBalance() {
|
|
|
|
cases := []struct {
|
|
|
|
name string
|
|
|
|
collectionID int64
|
|
|
|
replicaWithNodes map[int64][]int64
|
|
|
|
segments []*datapb.SegmentInfo
|
|
|
|
channels []*datapb.VchannelInfo
|
|
|
|
states []session.State
|
|
|
|
shouldMock bool
|
|
|
|
segmentDist map[int64][]*meta.Segment
|
|
|
|
channelDist map[int64][]*meta.DmChannel
|
|
|
|
expectPlans []SegmentAssignPlan
|
|
|
|
expectChannelPlans []ChannelAssignPlan
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "balance on multi replica",
|
|
|
|
collectionID: 1,
|
|
|
|
replicaWithNodes: map[int64][]int64{1: {1, 2}, 2: {3, 4}},
|
|
|
|
segments: []*datapb.SegmentInfo{
|
|
|
|
{ID: 1, CollectionID: 1, PartitionID: 1},
|
|
|
|
{ID: 2, CollectionID: 1, PartitionID: 1},
|
|
|
|
{ID: 3, CollectionID: 1, PartitionID: 1},
|
|
|
|
{ID: 4, CollectionID: 1, PartitionID: 1},
|
|
|
|
},
|
|
|
|
channels: []*datapb.VchannelInfo{
|
|
|
|
{
|
|
|
|
CollectionID: 1, ChannelName: "channel1", FlushedSegmentIds: []int64{1},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
CollectionID: 1, ChannelName: "channel2", FlushedSegmentIds: []int64{2},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
CollectionID: 1, ChannelName: "channel3", FlushedSegmentIds: []int64{3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
CollectionID: 1, ChannelName: "channel4", FlushedSegmentIds: []int64{4},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
states: []session.State{session.NodeStateNormal, session.NodeStateNormal},
|
|
|
|
segmentDist: map[int64][]*meta.Segment{
|
|
|
|
1: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 30}, Node: 1},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 30}, Node: 1},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 30}, Node: 3},
|
|
|
|
{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 30}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
channelDist: map[int64][]*meta.DmChannel{
|
|
|
|
1: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel1"}, Node: 1},
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel2"}, Node: 1},
|
|
|
|
},
|
|
|
|
3: {
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel3"}, Node: 3},
|
|
|
|
{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel4"}, Node: 3},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expectPlans: []SegmentAssignPlan{},
|
|
|
|
expectChannelPlans: []ChannelAssignPlan{},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
suite.Run(c.name, func() {
|
|
|
|
suite.SetupSuite()
|
|
|
|
defer suite.TearDownTest()
|
|
|
|
balancer := suite.balancer
|
|
|
|
|
|
|
|
// 1. set up target for multi collections
|
|
|
|
collection := utils.CreateTestCollection(c.collectionID, int32(len(c.replicaWithNodes)))
|
|
|
|
suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, c.collectionID).Return(
|
|
|
|
c.channels, c.segments, nil)
|
|
|
|
suite.broker.EXPECT().GetPartitions(mock.Anything, c.collectionID).Return([]int64{c.collectionID}, nil).Maybe()
|
|
|
|
collection.LoadPercentage = 100
|
|
|
|
collection.Status = querypb.LoadStatus_Loaded
|
|
|
|
balancer.meta.CollectionManager.PutCollection(collection)
|
|
|
|
balancer.meta.CollectionManager.PutPartition(utils.CreateTestPartition(c.collectionID, c.collectionID))
|
|
|
|
for replicaID, nodes := range c.replicaWithNodes {
|
|
|
|
balancer.meta.ReplicaManager.Put(utils.CreateTestReplica(replicaID, c.collectionID, nodes))
|
|
|
|
}
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(c.collectionID)
|
|
|
|
balancer.targetMgr.UpdateCollectionCurrentTarget(c.collectionID)
|
|
|
|
balancer.targetMgr.UpdateCollectionNextTarget(c.collectionID)
|
|
|
|
|
|
|
|
// 2. set up target for distribution for multi collections
|
|
|
|
for node, s := range c.segmentDist {
|
|
|
|
balancer.dist.SegmentDistManager.Update(node, s...)
|
|
|
|
}
|
|
|
|
for node, v := range c.channelDist {
|
|
|
|
balancer.dist.ChannelDistManager.Update(node, v...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// 3. set up nodes info and resourceManager for balancer
|
|
|
|
for _, nodes := range c.replicaWithNodes {
|
|
|
|
for i := range nodes {
|
2024-03-15 10:45:06 +08:00
|
|
|
nodeInfo := session.NewNodeInfo(session.ImmutableNodeInfo{
|
|
|
|
NodeID: nodes[i],
|
|
|
|
Address: "127.0.0.1:0",
|
2024-04-15 11:23:23 +08:00
|
|
|
Version: common.Version,
|
2024-03-15 10:45:06 +08:00
|
|
|
})
|
2024-03-11 20:35:04 +08:00
|
|
|
nodeInfo.UpdateStats(session.WithChannelCnt(len(c.channelDist[nodes[i]])))
|
|
|
|
nodeInfo.SetState(c.states[i])
|
|
|
|
suite.balancer.nodeManager.Add(nodeInfo)
|
2024-04-15 08:13:19 +08:00
|
|
|
suite.balancer.meta.ResourceManager.HandleNodeUp(nodes[i])
|
2024-03-11 20:35:04 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// expected to balance channel first
|
|
|
|
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, c.collectionID)
|
|
|
|
suite.Len(segmentPlans, 0)
|
|
|
|
suite.Len(channelPlans, 2)
|
|
|
|
|
|
|
|
// mock new distribution after channel balance
|
|
|
|
balancer.dist.ChannelDistManager.Update(1, &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel1"}, Node: 1})
|
|
|
|
balancer.dist.ChannelDistManager.Update(2, &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel2"}, Node: 2})
|
|
|
|
balancer.dist.ChannelDistManager.Update(3, &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel3"}, Node: 3})
|
|
|
|
balancer.dist.ChannelDistManager.Update(4, &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "channel4"}, Node: 4})
|
|
|
|
|
|
|
|
// expected to balance segment
|
|
|
|
segmentPlans, channelPlans = suite.getCollectionBalancePlans(balancer, c.collectionID)
|
|
|
|
suite.Len(segmentPlans, 2)
|
|
|
|
suite.Len(channelPlans, 0)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-15 18:48:32 +08:00
|
|
|
func TestRowCountBasedBalancerSuite(t *testing.T) {
|
|
|
|
suite.Run(t, new(RowCountBasedBalancerTestSuite))
|
|
|
|
}
|
2024-03-21 11:59:12 +08:00
|
|
|
|
|
|
|
func newReplicaDefaultRG(replicaID int64) *meta.Replica {
|
|
|
|
return meta.NewReplica(
|
|
|
|
&querypb.Replica{
|
|
|
|
ID: replicaID,
|
|
|
|
ResourceGroup: meta.DefaultResourceGroupName,
|
|
|
|
},
|
|
|
|
typeutil.NewUniqueSet(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove it after resource group enhancement.
|
|
|
|
func assertSegmentAssignPlanElementMatch(suite *suite.Suite, left []SegmentAssignPlan, right []SegmentAssignPlan, subset ...bool) {
|
|
|
|
suite.Equal(len(left), len(right))
|
|
|
|
|
|
|
|
type comparablePlan struct {
|
|
|
|
Segment *meta.Segment
|
|
|
|
ReplicaID int64
|
|
|
|
From int64
|
|
|
|
To int64
|
|
|
|
}
|
|
|
|
|
|
|
|
leftPlan := make([]comparablePlan, 0)
|
|
|
|
for _, p := range left {
|
|
|
|
replicaID := int64(-1)
|
|
|
|
if p.Replica != nil {
|
|
|
|
replicaID = p.Replica.GetID()
|
|
|
|
}
|
|
|
|
leftPlan = append(leftPlan, comparablePlan{
|
|
|
|
Segment: p.Segment,
|
|
|
|
ReplicaID: replicaID,
|
|
|
|
From: p.From,
|
|
|
|
To: p.To,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
rightPlan := make([]comparablePlan, 0)
|
|
|
|
for _, p := range right {
|
|
|
|
replicaID := int64(-1)
|
|
|
|
if p.Replica != nil {
|
|
|
|
replicaID = p.Replica.GetID()
|
|
|
|
}
|
|
|
|
rightPlan = append(rightPlan, comparablePlan{
|
|
|
|
Segment: p.Segment,
|
|
|
|
ReplicaID: replicaID,
|
|
|
|
From: p.From,
|
|
|
|
To: p.To,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if len(subset) > 0 && subset[0] {
|
|
|
|
suite.Subset(leftPlan, rightPlan)
|
|
|
|
} else {
|
|
|
|
suite.ElementsMatch(leftPlan, rightPlan)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove it after resource group enhancement.
|
|
|
|
func assertChannelAssignPlanElementMatch(suite *suite.Suite, left []ChannelAssignPlan, right []ChannelAssignPlan, subset ...bool) {
|
|
|
|
type comparablePlan struct {
|
|
|
|
Channel *meta.DmChannel
|
|
|
|
ReplicaID int64
|
|
|
|
From int64
|
|
|
|
To int64
|
|
|
|
}
|
|
|
|
|
|
|
|
leftPlan := make([]comparablePlan, 0)
|
|
|
|
for _, p := range left {
|
|
|
|
replicaID := int64(-1)
|
|
|
|
if p.Replica != nil {
|
|
|
|
replicaID = p.Replica.GetID()
|
|
|
|
}
|
|
|
|
leftPlan = append(leftPlan, comparablePlan{
|
|
|
|
Channel: p.Channel,
|
|
|
|
ReplicaID: replicaID,
|
|
|
|
From: p.From,
|
|
|
|
To: p.To,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
rightPlan := make([]comparablePlan, 0)
|
|
|
|
for _, p := range right {
|
|
|
|
replicaID := int64(-1)
|
|
|
|
if p.Replica != nil {
|
|
|
|
replicaID = p.Replica.GetID()
|
|
|
|
}
|
|
|
|
rightPlan = append(rightPlan, comparablePlan{
|
|
|
|
Channel: p.Channel,
|
|
|
|
ReplicaID: replicaID,
|
|
|
|
From: p.From,
|
|
|
|
To: p.To,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if len(subset) > 0 && subset[0] {
|
|
|
|
suite.Subset(leftPlan, rightPlan)
|
|
|
|
} else {
|
|
|
|
suite.ElementsMatch(leftPlan, rightPlan)
|
|
|
|
}
|
|
|
|
}
|