2021-10-25 20:23:16 +08:00
|
|
|
// Licensed to the LF AI & Data foundation under one
|
|
|
|
// or more contributor license agreements. See the NOTICE file
|
|
|
|
// distributed with this work for additional information
|
|
|
|
// regarding copyright ownership. The ASF licenses this file
|
|
|
|
// to you under the Apache License, Version 2.0 (the
|
|
|
|
// "License"); you may not use this file except in compliance
|
2021-04-19 11:35:38 +08:00
|
|
|
// with the License. You may obtain a copy of the License at
|
|
|
|
//
|
2021-10-25 20:23:16 +08:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2021-04-19 11:35:38 +08:00
|
|
|
//
|
2021-10-25 20:23:16 +08:00
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
2021-06-30 10:20:15 +08:00
|
|
|
|
2021-06-22 10:42:07 +08:00
|
|
|
package datacoord
|
2021-01-22 19:43:27 +08:00
|
|
|
|
|
|
|
import (
|
2021-08-27 15:37:56 +08:00
|
|
|
"context"
|
2022-10-22 12:09:28 +08:00
|
|
|
"fmt"
|
2021-01-22 19:43:27 +08:00
|
|
|
|
2023-09-21 09:45:27 +08:00
|
|
|
"github.com/samber/lo"
|
|
|
|
"go.uber.org/zap"
|
|
|
|
|
2023-06-09 01:28:37 +08:00
|
|
|
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
2021-04-22 14:45:57 +08:00
|
|
|
"github.com/milvus-io/milvus/internal/proto/datapb"
|
2023-04-06 19:14:32 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/log"
|
|
|
|
"github.com/milvus-io/milvus/pkg/util/commonpbutil"
|
2023-12-11 17:52:37 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/util/merr"
|
2023-04-06 19:14:32 +08:00
|
|
|
"github.com/milvus-io/milvus/pkg/util/paramtable"
|
2021-01-22 19:43:27 +08:00
|
|
|
)
|
|
|
|
|
2021-10-14 15:44:34 +08:00
|
|
|
// Cluster provides interfaces to interact with datanode cluster
|
2023-12-11 17:52:37 +08:00
|
|
|
type Cluster interface {
|
|
|
|
Startup(ctx context.Context, nodes []*NodeInfo) error
|
|
|
|
Register(node *NodeInfo) error
|
|
|
|
UnRegister(node *NodeInfo) error
|
|
|
|
Watch(ctx context.Context, ch string, collectionID UniqueID) error
|
|
|
|
Flush(ctx context.Context, nodeID int64, channel string, segments []*datapb.SegmentInfo) error
|
|
|
|
FlushChannels(ctx context.Context, nodeID int64, flushTs Timestamp, channels []string) error
|
|
|
|
Import(ctx context.Context, nodeID int64, it *datapb.ImportTaskRequest)
|
|
|
|
AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error)
|
|
|
|
GetSessions() []*Session
|
|
|
|
Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
var _ Cluster = (*ClusterImpl)(nil)
|
|
|
|
|
|
|
|
type ClusterImpl struct {
|
|
|
|
sessionManager SessionManager
|
2021-10-14 15:44:34 +08:00
|
|
|
channelManager *ChannelManager
|
2021-01-22 19:43:27 +08:00
|
|
|
}
|
|
|
|
|
2023-12-11 17:52:37 +08:00
|
|
|
// NewClusterImpl creates a new cluster
|
|
|
|
func NewClusterImpl(sessionManager SessionManager, channelManager *ChannelManager) *ClusterImpl {
|
|
|
|
c := &ClusterImpl{
|
2021-10-14 15:44:34 +08:00
|
|
|
sessionManager: sessionManager,
|
|
|
|
channelManager: channelManager,
|
2021-07-12 11:03:52 +08:00
|
|
|
}
|
|
|
|
|
2021-10-14 15:44:34 +08:00
|
|
|
return c
|
2021-06-30 10:20:15 +08:00
|
|
|
}
|
|
|
|
|
2022-02-22 13:15:51 +08:00
|
|
|
// Startup inits the cluster with the given data nodes.
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) Startup(ctx context.Context, nodes []*NodeInfo) error {
|
2021-07-12 11:03:52 +08:00
|
|
|
for _, node := range nodes {
|
2021-10-14 15:44:34 +08:00
|
|
|
c.sessionManager.AddSession(node)
|
2021-07-12 11:03:52 +08:00
|
|
|
}
|
2021-10-14 15:44:34 +08:00
|
|
|
currs := make([]int64, 0, len(nodes))
|
2021-07-12 11:03:52 +08:00
|
|
|
for _, node := range nodes {
|
2021-10-14 15:44:34 +08:00
|
|
|
currs = append(currs, node.NodeID)
|
2021-06-28 13:28:14 +08:00
|
|
|
}
|
2022-03-28 22:33:27 +08:00
|
|
|
return c.channelManager.Startup(ctx, currs)
|
2021-05-26 19:06:56 +08:00
|
|
|
}
|
|
|
|
|
2021-10-14 15:44:34 +08:00
|
|
|
// Register registers a new node in cluster
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) Register(node *NodeInfo) error {
|
2021-10-14 15:44:34 +08:00
|
|
|
c.sessionManager.AddSession(node)
|
2023-07-28 10:23:02 +08:00
|
|
|
return c.channelManager.AddNode(node.NodeID)
|
2021-05-26 19:06:56 +08:00
|
|
|
}
|
|
|
|
|
2021-10-14 15:44:34 +08:00
|
|
|
// UnRegister removes a node from cluster
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) UnRegister(node *NodeInfo) error {
|
2021-10-14 15:44:34 +08:00
|
|
|
c.sessionManager.DeleteSession(node)
|
2023-07-28 10:23:02 +08:00
|
|
|
return c.channelManager.DeleteNode(node.NodeID)
|
2021-05-26 19:06:56 +08:00
|
|
|
}
|
|
|
|
|
2021-12-15 11:58:31 +08:00
|
|
|
// Watch tries to add a channel in datanode cluster
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) Watch(ctx context.Context, ch string, collectionID UniqueID) error {
|
2023-11-13 11:16:18 +08:00
|
|
|
return c.channelManager.Watch(ctx, &channelMeta{Name: ch, CollectionID: collectionID})
|
2021-10-14 15:44:34 +08:00
|
|
|
}
|
2021-05-26 19:06:56 +08:00
|
|
|
|
2022-10-22 12:09:28 +08:00
|
|
|
// Flush sends flush requests to dataNodes specified
|
|
|
|
// which also according to channels where segments are assigned to.
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) Flush(ctx context.Context, nodeID int64, channel string, segments []*datapb.SegmentInfo) error {
|
2022-10-22 12:09:28 +08:00
|
|
|
if !c.channelManager.Match(nodeID, channel) {
|
|
|
|
log.Warn("node is not matched with channel",
|
|
|
|
zap.String("channel", channel),
|
|
|
|
zap.Int64("nodeID", nodeID),
|
|
|
|
)
|
|
|
|
return fmt.Errorf("channel %s is not watched on node %d", channel, nodeID)
|
2021-01-22 19:43:27 +08:00
|
|
|
}
|
2022-10-22 12:09:28 +08:00
|
|
|
|
2023-11-13 11:16:18 +08:00
|
|
|
_, collID := c.channelManager.getCollectionIDByChannel(channel)
|
2022-10-22 12:09:28 +08:00
|
|
|
|
|
|
|
getSegmentID := func(segment *datapb.SegmentInfo, _ int) int64 {
|
|
|
|
return segment.GetID()
|
2021-07-12 11:03:52 +08:00
|
|
|
}
|
|
|
|
|
2022-10-22 12:09:28 +08:00
|
|
|
req := &datapb.FlushSegmentsRequest{
|
|
|
|
Base: commonpbutil.NewMsgBase(
|
|
|
|
commonpbutil.WithMsgType(commonpb.MsgType_Flush),
|
2022-11-04 14:25:38 +08:00
|
|
|
commonpbutil.WithSourceID(paramtable.GetNodeID()),
|
2022-11-03 22:47:35 +08:00
|
|
|
commonpbutil.WithTargetID(nodeID),
|
2022-10-22 12:09:28 +08:00
|
|
|
),
|
2023-11-13 11:16:18 +08:00
|
|
|
CollectionID: collID,
|
2022-11-10 22:13:04 +08:00
|
|
|
SegmentIDs: lo.Map(segments, getSegmentID),
|
2023-10-17 12:00:10 +08:00
|
|
|
ChannelName: channel,
|
2021-01-28 11:24:41 +08:00
|
|
|
}
|
2022-10-22 12:09:28 +08:00
|
|
|
|
|
|
|
c.sessionManager.Flush(ctx, nodeID, req)
|
|
|
|
return nil
|
2021-07-12 11:03:52 +08:00
|
|
|
}
|
|
|
|
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) FlushChannels(ctx context.Context, nodeID int64, flushTs Timestamp, channels []string) error {
|
2023-09-12 21:07:19 +08:00
|
|
|
if len(channels) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, channel := range channels {
|
|
|
|
if !c.channelManager.Match(nodeID, channel) {
|
|
|
|
return fmt.Errorf("channel %s is not watched on node %d", channel, nodeID)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
req := &datapb.FlushChannelsRequest{
|
|
|
|
Base: commonpbutil.NewMsgBase(
|
|
|
|
commonpbutil.WithSourceID(paramtable.GetNodeID()),
|
|
|
|
commonpbutil.WithTargetID(nodeID),
|
|
|
|
),
|
|
|
|
FlushTs: flushTs,
|
|
|
|
Channels: channels,
|
|
|
|
}
|
|
|
|
|
|
|
|
return c.sessionManager.FlushChannels(ctx, nodeID, req)
|
|
|
|
}
|
|
|
|
|
2022-04-01 11:33:28 +08:00
|
|
|
// Import sends import requests to DataNodes whose ID==nodeID.
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) Import(ctx context.Context, nodeID int64, it *datapb.ImportTaskRequest) {
|
2022-04-01 11:33:28 +08:00
|
|
|
c.sessionManager.Import(ctx, nodeID, it)
|
|
|
|
}
|
|
|
|
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error) {
|
|
|
|
// Look for the DataNode that watches the channel.
|
|
|
|
ok, nodeID := c.channelManager.getNodeIDByChannelName(req.GetChannelName())
|
|
|
|
if !ok {
|
|
|
|
err := merr.WrapErrChannelNotFound(req.GetChannelName(), "no DataNode watches this channel")
|
|
|
|
log.Error("no DataNode found for channel", zap.String("channelName", req.GetChannelName()), zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return c.sessionManager.AddImportSegment(ctx, nodeID, req)
|
|
|
|
}
|
|
|
|
|
2021-10-14 15:44:34 +08:00
|
|
|
// GetSessions returns all sessions
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) GetSessions() []*Session {
|
2021-10-14 15:44:34 +08:00
|
|
|
return c.sessionManager.GetSessions()
|
2021-07-12 11:03:52 +08:00
|
|
|
}
|
|
|
|
|
2021-10-14 15:44:34 +08:00
|
|
|
// Close releases resources opened in Cluster
|
2023-12-11 17:52:37 +08:00
|
|
|
func (c *ClusterImpl) Close() {
|
2021-10-14 15:44:34 +08:00
|
|
|
c.sessionManager.Close()
|
2023-04-18 17:54:31 +08:00
|
|
|
c.channelManager.Close()
|
2021-02-02 18:53:10 +08:00
|
|
|
}
|