2020-11-26 16:01:31 +08:00
|
|
|
package querynode
|
|
|
|
|
|
|
|
import "C"
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"errors"
|
2020-12-08 14:41:04 +08:00
|
|
|
"fmt"
|
2020-11-26 16:01:31 +08:00
|
|
|
"log"
|
|
|
|
"sync"
|
|
|
|
|
2020-12-08 14:41:04 +08:00
|
|
|
"github.com/golang/protobuf/proto"
|
|
|
|
|
2020-11-26 16:01:31 +08:00
|
|
|
"github.com/zilliztech/milvus-distributed/internal/msgstream"
|
|
|
|
"github.com/zilliztech/milvus-distributed/internal/proto/commonpb"
|
|
|
|
"github.com/zilliztech/milvus-distributed/internal/proto/internalpb"
|
|
|
|
"github.com/zilliztech/milvus-distributed/internal/proto/servicepb"
|
|
|
|
)
|
|
|
|
|
|
|
|
type searchService struct {
|
|
|
|
ctx context.Context
|
|
|
|
wait sync.WaitGroup
|
|
|
|
cancel context.CancelFunc
|
|
|
|
|
2020-12-08 14:41:04 +08:00
|
|
|
replica collectionReplica
|
2020-11-26 16:01:31 +08:00
|
|
|
tSafeWatcher *tSafeWatcher
|
|
|
|
|
|
|
|
serviceableTime Timestamp
|
|
|
|
serviceableTimeMutex sync.Mutex
|
|
|
|
|
|
|
|
msgBuffer chan msgstream.TsMsg
|
|
|
|
unsolvedMsg []msgstream.TsMsg
|
2020-12-08 14:41:04 +08:00
|
|
|
searchMsgStream msgstream.MsgStream
|
|
|
|
searchResultMsgStream msgstream.MsgStream
|
|
|
|
queryNodeID UniqueID
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
type ResultEntityIds []UniqueID
|
|
|
|
|
2020-12-08 14:41:04 +08:00
|
|
|
func newSearchService(ctx context.Context, replica collectionReplica) *searchService {
|
2020-12-10 16:31:09 +08:00
|
|
|
receiveBufSize := Params.SearchReceiveBufSize
|
|
|
|
pulsarBufSize := Params.SearchPulsarBufSize
|
2020-11-26 16:01:31 +08:00
|
|
|
|
2020-12-10 16:31:09 +08:00
|
|
|
msgStreamURL := Params.PulsarAddress
|
2020-11-26 16:01:31 +08:00
|
|
|
|
2020-12-10 16:31:09 +08:00
|
|
|
consumeChannels := Params.SearchChannelNames
|
|
|
|
consumeSubName := Params.MsgChannelSubName
|
2020-11-26 16:01:31 +08:00
|
|
|
searchStream := msgstream.NewPulsarMsgStream(ctx, receiveBufSize)
|
|
|
|
searchStream.SetPulsarClient(msgStreamURL)
|
|
|
|
unmarshalDispatcher := msgstream.NewUnmarshalDispatcher()
|
|
|
|
searchStream.CreatePulsarConsumers(consumeChannels, consumeSubName, unmarshalDispatcher, pulsarBufSize)
|
|
|
|
var inputStream msgstream.MsgStream = searchStream
|
|
|
|
|
2020-12-10 16:31:09 +08:00
|
|
|
producerChannels := Params.SearchResultChannelNames
|
2020-11-26 16:01:31 +08:00
|
|
|
searchResultStream := msgstream.NewPulsarMsgStream(ctx, receiveBufSize)
|
|
|
|
searchResultStream.SetPulsarClient(msgStreamURL)
|
|
|
|
searchResultStream.CreatePulsarProducers(producerChannels)
|
|
|
|
var outputStream msgstream.MsgStream = searchResultStream
|
|
|
|
|
|
|
|
searchServiceCtx, searchServiceCancel := context.WithCancel(ctx)
|
|
|
|
msgBuffer := make(chan msgstream.TsMsg, receiveBufSize)
|
|
|
|
unsolvedMsg := make([]msgstream.TsMsg, 0)
|
|
|
|
return &searchService{
|
|
|
|
ctx: searchServiceCtx,
|
|
|
|
cancel: searchServiceCancel,
|
|
|
|
serviceableTime: Timestamp(0),
|
|
|
|
msgBuffer: msgBuffer,
|
|
|
|
unsolvedMsg: unsolvedMsg,
|
|
|
|
|
|
|
|
replica: replica,
|
|
|
|
tSafeWatcher: newTSafeWatcher(),
|
|
|
|
|
2020-12-08 14:41:04 +08:00
|
|
|
searchMsgStream: inputStream,
|
|
|
|
searchResultMsgStream: outputStream,
|
2020-12-10 16:31:09 +08:00
|
|
|
queryNodeID: Params.QueryNodeID,
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) start() {
|
2020-12-08 14:41:04 +08:00
|
|
|
ss.searchMsgStream.Start()
|
|
|
|
ss.searchResultMsgStream.Start()
|
2020-11-26 16:01:31 +08:00
|
|
|
ss.register()
|
|
|
|
ss.wait.Add(2)
|
|
|
|
go ss.receiveSearchMsg()
|
|
|
|
go ss.doUnsolvedMsgSearch()
|
|
|
|
ss.wait.Wait()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) close() {
|
2020-12-08 14:41:04 +08:00
|
|
|
if ss.searchMsgStream != nil {
|
|
|
|
ss.searchMsgStream.Close()
|
|
|
|
}
|
|
|
|
if ss.searchResultMsgStream != nil {
|
|
|
|
ss.searchResultMsgStream.Close()
|
|
|
|
}
|
2020-11-26 16:01:31 +08:00
|
|
|
ss.cancel()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) register() {
|
2020-12-08 14:41:04 +08:00
|
|
|
tSafe := ss.replica.getTSafe()
|
|
|
|
tSafe.registerTSafeWatcher(ss.tSafeWatcher)
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) waitNewTSafe() Timestamp {
|
|
|
|
// block until dataSyncService updating tSafe
|
|
|
|
ss.tSafeWatcher.hasUpdate()
|
2020-12-08 14:41:04 +08:00
|
|
|
timestamp := ss.replica.getTSafe().get()
|
2020-11-26 16:01:31 +08:00
|
|
|
return timestamp
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) getServiceableTime() Timestamp {
|
|
|
|
ss.serviceableTimeMutex.Lock()
|
|
|
|
defer ss.serviceableTimeMutex.Unlock()
|
|
|
|
return ss.serviceableTime
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) setServiceableTime(t Timestamp) {
|
|
|
|
ss.serviceableTimeMutex.Lock()
|
|
|
|
// TODO:: add gracefulTime
|
|
|
|
ss.serviceableTime = t
|
|
|
|
ss.serviceableTimeMutex.Unlock()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) receiveSearchMsg() {
|
|
|
|
defer ss.wait.Done()
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-ss.ctx.Done():
|
|
|
|
return
|
|
|
|
default:
|
2020-12-08 14:41:04 +08:00
|
|
|
msgPack := ss.searchMsgStream.Consume()
|
2020-11-26 16:01:31 +08:00
|
|
|
if msgPack == nil || len(msgPack.Msgs) <= 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
searchMsg := make([]msgstream.TsMsg, 0)
|
|
|
|
serverTime := ss.getServiceableTime()
|
|
|
|
for i := range msgPack.Msgs {
|
|
|
|
if msgPack.Msgs[i].BeginTs() > serverTime {
|
|
|
|
ss.msgBuffer <- msgPack.Msgs[i]
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
searchMsg = append(searchMsg, msgPack.Msgs[i])
|
|
|
|
}
|
|
|
|
for _, msg := range searchMsg {
|
|
|
|
err := ss.search(msg)
|
|
|
|
if err != nil {
|
2020-11-30 22:14:19 +08:00
|
|
|
log.Println(err)
|
2020-12-11 11:38:32 +08:00
|
|
|
err2 := ss.publishFailedSearchResult(msg, err.Error())
|
|
|
|
if err2 != nil {
|
|
|
|
log.Println("publish FailedSearchResult failed, error message: ", err2)
|
2020-11-30 11:22:59 +08:00
|
|
|
}
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
}
|
2020-11-28 19:06:48 +08:00
|
|
|
log.Println("ReceiveSearchMsg, do search done, num of searchMsg = ", len(searchMsg))
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) doUnsolvedMsgSearch() {
|
|
|
|
defer ss.wait.Done()
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-ss.ctx.Done():
|
|
|
|
return
|
|
|
|
default:
|
|
|
|
serviceTime := ss.waitNewTSafe()
|
|
|
|
ss.setServiceableTime(serviceTime)
|
|
|
|
searchMsg := make([]msgstream.TsMsg, 0)
|
|
|
|
tempMsg := make([]msgstream.TsMsg, 0)
|
|
|
|
tempMsg = append(tempMsg, ss.unsolvedMsg...)
|
|
|
|
ss.unsolvedMsg = ss.unsolvedMsg[:0]
|
|
|
|
for _, msg := range tempMsg {
|
|
|
|
if msg.EndTs() <= serviceTime {
|
|
|
|
searchMsg = append(searchMsg, msg)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
ss.unsolvedMsg = append(ss.unsolvedMsg, msg)
|
|
|
|
}
|
|
|
|
|
2020-11-26 17:58:08 +08:00
|
|
|
for {
|
2020-11-28 19:06:48 +08:00
|
|
|
msgBufferLength := len(ss.msgBuffer)
|
|
|
|
if msgBufferLength <= 0 {
|
|
|
|
break
|
|
|
|
}
|
2020-11-26 16:01:31 +08:00
|
|
|
msg := <-ss.msgBuffer
|
|
|
|
if msg.EndTs() <= serviceTime {
|
|
|
|
searchMsg = append(searchMsg, msg)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
ss.unsolvedMsg = append(ss.unsolvedMsg, msg)
|
|
|
|
}
|
2020-11-26 17:58:08 +08:00
|
|
|
|
2020-11-26 16:01:31 +08:00
|
|
|
if len(searchMsg) <= 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
for _, msg := range searchMsg {
|
|
|
|
err := ss.search(msg)
|
|
|
|
if err != nil {
|
2020-11-30 22:14:19 +08:00
|
|
|
log.Println(err)
|
2020-12-11 11:38:32 +08:00
|
|
|
err2 := ss.publishFailedSearchResult(msg, err.Error())
|
|
|
|
if err2 != nil {
|
|
|
|
log.Println("publish FailedSearchResult failed, error message: ", err2)
|
2020-11-30 11:22:59 +08:00
|
|
|
}
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
}
|
2020-11-28 19:06:48 +08:00
|
|
|
log.Println("doUnsolvedMsgSearch, do search done, num of searchMsg = ", len(searchMsg))
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO:: cache map[dsl]plan
|
|
|
|
// TODO: reBatched search requests
|
|
|
|
func (ss *searchService) search(msg msgstream.TsMsg) error {
|
|
|
|
searchMsg, ok := msg.(*msgstream.SearchMsg)
|
|
|
|
if !ok {
|
|
|
|
return errors.New("invalid request type = " + string(msg.Type()))
|
|
|
|
}
|
|
|
|
|
|
|
|
searchTimestamp := searchMsg.Timestamp
|
|
|
|
var queryBlob = searchMsg.Query.Value
|
|
|
|
query := servicepb.Query{}
|
|
|
|
err := proto.Unmarshal(queryBlob, &query)
|
|
|
|
if err != nil {
|
|
|
|
return errors.New("unmarshal query failed")
|
|
|
|
}
|
|
|
|
collectionName := query.CollectionName
|
|
|
|
partitionTags := query.PartitionTags
|
2020-12-08 14:41:04 +08:00
|
|
|
collection, err := ss.replica.getCollectionByName(collectionName)
|
2020-11-26 16:01:31 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
collectionID := collection.ID()
|
|
|
|
dsl := query.Dsl
|
2020-11-30 17:58:23 +08:00
|
|
|
plan, err := createPlan(*collection, dsl)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-11-26 16:01:31 +08:00
|
|
|
placeHolderGroupBlob := query.PlaceholderGroup
|
2020-11-30 17:58:23 +08:00
|
|
|
placeholderGroup, err := parserPlaceholderGroup(plan, placeHolderGroupBlob)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-11-26 16:01:31 +08:00
|
|
|
placeholderGroups := make([]*PlaceholderGroup, 0)
|
|
|
|
placeholderGroups = append(placeholderGroups, placeholderGroup)
|
|
|
|
|
|
|
|
searchResults := make([]*SearchResult, 0)
|
2020-12-03 19:00:11 +08:00
|
|
|
matchedSegments := make([]*Segment, 0)
|
2020-11-26 16:01:31 +08:00
|
|
|
|
|
|
|
for _, partitionTag := range partitionTags {
|
2020-12-08 14:41:04 +08:00
|
|
|
hasPartition := ss.replica.hasPartition(collectionID, partitionTag)
|
2020-12-01 02:16:53 +08:00
|
|
|
if !hasPartition {
|
|
|
|
return errors.New("search Failed, invalid partitionTag")
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
2020-12-01 02:16:53 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, partitionTag := range partitionTags {
|
2020-12-08 14:41:04 +08:00
|
|
|
partition, _ := ss.replica.getPartitionByTag(collectionID, partitionTag)
|
2020-11-26 16:01:31 +08:00
|
|
|
for _, segment := range partition.segments {
|
2020-11-28 19:06:48 +08:00
|
|
|
//fmt.Println("dsl = ", dsl)
|
|
|
|
|
2020-11-26 16:01:31 +08:00
|
|
|
searchResult, err := segment.segmentSearch(plan, placeholderGroups, []Timestamp{searchTimestamp})
|
2020-11-28 19:06:48 +08:00
|
|
|
|
2020-11-26 16:01:31 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
searchResults = append(searchResults, searchResult)
|
2020-12-03 19:00:11 +08:00
|
|
|
matchedSegments = append(matchedSegments, segment)
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-28 19:06:48 +08:00
|
|
|
if len(searchResults) <= 0 {
|
2020-12-01 02:16:53 +08:00
|
|
|
var results = internalpb.SearchResult{
|
|
|
|
MsgType: internalpb.MsgType_kSearchResult,
|
|
|
|
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_SUCCESS},
|
|
|
|
ReqID: searchMsg.ReqID,
|
|
|
|
ProxyID: searchMsg.ProxyID,
|
2020-12-08 14:41:04 +08:00
|
|
|
QueryNodeID: ss.queryNodeID,
|
2020-12-01 02:16:53 +08:00
|
|
|
Timestamp: searchTimestamp,
|
|
|
|
ResultChannelID: searchMsg.ResultChannelID,
|
|
|
|
Hits: nil,
|
|
|
|
}
|
|
|
|
searchResultMsg := &msgstream.SearchResultMsg{
|
2020-12-08 14:41:04 +08:00
|
|
|
BaseMsg: msgstream.BaseMsg{HashValues: []uint32{uint32(searchMsg.ResultChannelID)}},
|
2020-12-01 02:16:53 +08:00
|
|
|
SearchResult: results,
|
|
|
|
}
|
|
|
|
err = ss.publishSearchResult(searchResultMsg)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
2020-11-28 19:06:48 +08:00
|
|
|
}
|
|
|
|
|
2020-12-03 19:00:11 +08:00
|
|
|
inReduced := make([]bool, len(searchResults))
|
|
|
|
numSegment := int64(len(searchResults))
|
2020-12-11 12:01:38 +08:00
|
|
|
err = reduceSearchResults(searchResults, numSegment, inReduced)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2020-12-03 19:00:11 +08:00
|
|
|
}
|
|
|
|
err = fillTargetEntry(plan, searchResults, matchedSegments, inReduced)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
marshaledHits, err := reorganizeQueryResults(plan, placeholderGroups, searchResults, numSegment, inReduced)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-11-26 16:01:31 +08:00
|
|
|
hitsBlob, err := marshaledHits.getHitsBlob()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var offset int64 = 0
|
|
|
|
for index := range placeholderGroups {
|
2020-12-03 19:00:11 +08:00
|
|
|
hitBlobSizePeerQuery, err := marshaledHits.hitBlobSizeInGroup(int64(index))
|
2020-11-26 16:01:31 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
hits := make([][]byte, 0)
|
2020-12-03 19:00:11 +08:00
|
|
|
for _, len := range hitBlobSizePeerQuery {
|
2020-11-26 16:01:31 +08:00
|
|
|
hits = append(hits, hitsBlob[offset:offset+len])
|
|
|
|
//test code to checkout marshaled hits
|
|
|
|
//marshaledHit := hitsBlob[offset:offset+len]
|
|
|
|
//unMarshaledHit := servicepb.Hits{}
|
|
|
|
//err = proto.Unmarshal(marshaledHit, &unMarshaledHit)
|
|
|
|
//if err != nil {
|
|
|
|
// return err
|
|
|
|
//}
|
|
|
|
//fmt.Println("hits msg = ", unMarshaledHit)
|
|
|
|
offset += len
|
|
|
|
}
|
|
|
|
var results = internalpb.SearchResult{
|
|
|
|
MsgType: internalpb.MsgType_kSearchResult,
|
|
|
|
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_SUCCESS},
|
|
|
|
ReqID: searchMsg.ReqID,
|
|
|
|
ProxyID: searchMsg.ProxyID,
|
|
|
|
QueryNodeID: searchMsg.ProxyID,
|
|
|
|
Timestamp: searchTimestamp,
|
|
|
|
ResultChannelID: searchMsg.ResultChannelID,
|
|
|
|
Hits: hits,
|
|
|
|
}
|
|
|
|
searchResultMsg := &msgstream.SearchResultMsg{
|
2020-12-08 14:41:04 +08:00
|
|
|
BaseMsg: msgstream.BaseMsg{HashValues: []uint32{uint32(searchMsg.ResultChannelID)}},
|
2020-11-26 16:01:31 +08:00
|
|
|
SearchResult: results,
|
|
|
|
}
|
|
|
|
err = ss.publishSearchResult(searchResultMsg)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
deleteSearchResults(searchResults)
|
|
|
|
deleteMarshaledHits(marshaledHits)
|
|
|
|
plan.delete()
|
|
|
|
placeholderGroup.delete()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ss *searchService) publishSearchResult(msg msgstream.TsMsg) error {
|
2020-12-08 14:41:04 +08:00
|
|
|
fmt.Println("Public SearchResult", msg.HashKeys())
|
2020-11-26 16:01:31 +08:00
|
|
|
msgPack := msgstream.MsgPack{}
|
|
|
|
msgPack.Msgs = append(msgPack.Msgs, msg)
|
2020-12-08 14:41:04 +08:00
|
|
|
err := ss.searchResultMsgStream.Produce(&msgPack)
|
2020-12-11 12:01:20 +08:00
|
|
|
return err
|
2020-11-26 16:01:31 +08:00
|
|
|
}
|
|
|
|
|
2020-12-03 19:00:11 +08:00
|
|
|
func (ss *searchService) publishFailedSearchResult(msg msgstream.TsMsg, errMsg string) error {
|
2020-11-26 16:01:31 +08:00
|
|
|
msgPack := msgstream.MsgPack{}
|
|
|
|
searchMsg, ok := msg.(*msgstream.SearchMsg)
|
|
|
|
if !ok {
|
|
|
|
return errors.New("invalid request type = " + string(msg.Type()))
|
|
|
|
}
|
|
|
|
var results = internalpb.SearchResult{
|
|
|
|
MsgType: internalpb.MsgType_kSearchResult,
|
2020-12-03 19:00:11 +08:00
|
|
|
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UNEXPECTED_ERROR, Reason: errMsg},
|
2020-11-26 16:01:31 +08:00
|
|
|
ReqID: searchMsg.ReqID,
|
|
|
|
ProxyID: searchMsg.ProxyID,
|
|
|
|
QueryNodeID: searchMsg.ProxyID,
|
|
|
|
Timestamp: searchMsg.Timestamp,
|
|
|
|
ResultChannelID: searchMsg.ResultChannelID,
|
|
|
|
Hits: [][]byte{},
|
|
|
|
}
|
|
|
|
|
|
|
|
tsMsg := &msgstream.SearchResultMsg{
|
2020-12-08 14:41:04 +08:00
|
|
|
BaseMsg: msgstream.BaseMsg{HashValues: []uint32{uint32(searchMsg.ResultChannelID)}},
|
2020-11-26 16:01:31 +08:00
|
|
|
SearchResult: results,
|
|
|
|
}
|
|
|
|
msgPack.Msgs = append(msgPack.Msgs, tsMsg)
|
2020-12-08 14:41:04 +08:00
|
|
|
err := ss.searchResultMsgStream.Produce(&msgPack)
|
2020-11-26 16:01:31 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|