fix: adding blob memory size in binlog serde (#33324)

See: #33280

Signed-off-by: Ted Xu <ted.xu@zilliz.com>
This commit is contained in:
Ted Xu 2024-05-24 10:33:40 +08:00 committed by GitHub
parent 4004e4c545
commit a8bd9bea39
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 6 additions and 3 deletions

View File

@ -925,9 +925,10 @@ func (bsw *BinlogStreamWriter) Finalize() (*Blob, error) {
return nil, err
}
return &Blob{
Key: strconv.Itoa(int(bsw.fieldSchema.FieldID)),
Value: b.Bytes(),
RowNum: int64(bsw.rw.numRows),
Key: strconv.Itoa(int(bsw.fieldSchema.FieldID)),
Value: b.Bytes(),
RowNum: int64(bsw.rw.numRows),
MemorySize: int64(bsw.memorySize),
}, nil
}
@ -1016,6 +1017,7 @@ func NewBinlogSerializeWriter(schema *schemapb.CollectionSchema, partitionID, se
if !ok {
return nil, 0, errors.New(fmt.Sprintf("serialize error on type %s", types[fid]))
}
writers[fid].memorySize += int(typeEntry.sizeof(e))
memorySize += typeEntry.sizeof(e)
}
}

View File

@ -160,6 +160,7 @@ func TestBinlogSerializeWriter(t *testing.T) {
blob, err := w.Finalize()
assert.NoError(t, err)
assert.NotNil(t, blob)
assert.True(t, blob.MemorySize > 0)
newblobs[i] = blob
i++
}