Refine logs to reduce print (#17759) (#18028)

Signed-off-by: zhenshan.cao <zhenshan.cao@zilliz.com>
This commit is contained in:
zhenshan.cao 2022-07-04 11:10:20 +08:00 committed by GitHub
parent e1cef261cd
commit 9bd35dd84e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 15 additions and 12 deletions

View File

@ -844,9 +844,14 @@ func (m *meta) CompleteMergeCompaction(compactionLogs []*datapb.CompactionSegmen
CreatedByCompaction: true,
CompactionFrom: compactionFrom,
}
segment := NewSegmentInfo(segmentInfo)
log.Info("CompleteMergeCompaction", zap.Int64("segmentID", segmentInfo.ID),
zap.Int64("collectionID", segmentInfo.CollectionID),
zap.Int64("partitionID", segmentInfo.PartitionID),
zap.Int64("NumOfRows", segmentInfo.NumOfRows),
zap.Any("compactionFrom", segmentInfo.CompactionFrom))
data := make(map[string]string)
for _, s := range segments {

View File

@ -555,6 +555,7 @@ func (t *compactionTask) compact() error {
log.Info("compaction done",
zap.Int64("planID", t.plan.GetPlanID()),
zap.Int64("targetSegmentID", targetSegID),
zap.Int("num of binlog paths", len(segPaths.inPaths)),
zap.Int("num of stats paths", len(segPaths.statsPaths)),
zap.Int("num of delta paths", len(segPaths.deltaInfo)),

View File

@ -627,7 +627,7 @@ func reduceSearchResultData(searchResultData []*schemapb.SearchResultData, nq in
zap.Int("result No.", i),
zap.Int64("nq", sData.NumQueries),
zap.Int64("topk", sData.TopK),
zap.Int64s("topks", sData.Topks),
zap.Any("len(topks)", len(sData.Topks)),
zap.Any("len(FieldsData)", len(sData.FieldsData)))
if err := checkSearchResultData(sData, nq, topk); err != nil {
log.Warn("invalid search results", zap.Error(err))

View File

@ -358,10 +358,10 @@ func (loader *segmentLoader) loadSealedSegmentFields(segment *Segment, fields []
return err
}
log.Info("log field binlogs done",
log.Info("load field binlogs done for sealed segment",
zap.Int64("collection", segment.collectionID),
zap.Int64("segment", segment.segmentID),
zap.Any("fields", fields),
zap.Any("len(field)", len(fields)),
zap.String("segmentType", segment.getType().String()))
return nil
@ -429,7 +429,11 @@ func (loader *segmentLoader) loadIndexedFieldData(segment *Segment, vecFieldInfo
if err != nil {
return err
}
log.Debug("load field's index data done", zap.Int64("segmentID", segment.ID()), zap.Int64("fieldID", fieldID))
log.Info("load field binlogs done for sealed segment with index",
zap.Int64("collection", segment.collectionID),
zap.Int64("segment", segment.segmentID),
zap.Int64("fieldID", fieldID))
segment.setIndexedFieldInfo(fieldID, fieldInfo)
}

View File

@ -161,13 +161,6 @@ func (b *baseReadTask) Ready() (bool, error) {
gt, _ := tsoutil.ParseTS(guaranteeTs)
st, _ := tsoutil.ParseTS(serviceTime)
if guaranteeTs > serviceTime {
log.Debug("query msg can't do",
zap.Any("collectionID", b.CollectionID),
zap.Any("sm.GuaranteeTimestamp", gt),
zap.Any("serviceTime", st),
zap.Any("delta milliseconds", gt.Sub(st).Milliseconds()),
zap.Any("channel", channel),
zap.Any("msgID", b.ID()))
return false, nil
}
log.Debug("query msg can do",