mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-11-30 02:48:45 +08:00
Remove collection name
Signed-off-by: bigsheeper <yihao.dai@zilliz.com>
This commit is contained in:
parent
f3aad3a71c
commit
01e9dc8e3f
@ -16,7 +16,6 @@
|
||||
#include "knowhere/index/vector_index/VecIndex.h"
|
||||
|
||||
struct LoadIndexInfo {
|
||||
std::string field_name;
|
||||
int64_t field_id;
|
||||
std::map<std::string, std::string> index_params;
|
||||
milvus::knowhere::VecIndexPtr index;
|
||||
|
@ -59,11 +59,9 @@ AppendIndexParam(CLoadIndexInfo c_load_index_info, const char* c_index_key, cons
|
||||
}
|
||||
|
||||
CStatus
|
||||
AppendFieldInfo(CLoadIndexInfo c_load_index_info, const char* c_field_name, int64_t field_id) {
|
||||
AppendFieldInfo(CLoadIndexInfo c_load_index_info, int64_t field_id) {
|
||||
try {
|
||||
auto load_index_info = (LoadIndexInfo*)c_load_index_info;
|
||||
std::string field_name(c_field_name);
|
||||
load_index_info->field_name = field_name;
|
||||
load_index_info->field_id = field_id;
|
||||
|
||||
auto status = CStatus();
|
||||
@ -97,7 +95,6 @@ AppendIndex(CLoadIndexInfo c_load_index_info, CBinarySet c_binary_set) {
|
||||
load_index_info->index =
|
||||
milvus::knowhere::VecIndexFactory::GetInstance().CreateVecIndex(index_params["index_type"], mode);
|
||||
load_index_info->index->Load(*binary_set);
|
||||
|
||||
auto status = CStatus();
|
||||
status.error_code = Success;
|
||||
status.error_msg = "";
|
||||
|
@ -33,7 +33,7 @@ CStatus
|
||||
AppendIndexParam(CLoadIndexInfo c_load_index_info, const char* index_key, const char* index_value);
|
||||
|
||||
CStatus
|
||||
AppendFieldInfo(CLoadIndexInfo c_load_index_info, const char* field_name, int64_t field_id);
|
||||
AppendFieldInfo(CLoadIndexInfo c_load_index_info, int64_t field_id);
|
||||
|
||||
CStatus
|
||||
AppendIndex(CLoadIndexInfo c_load_index_info, CBinarySet c_binary_set);
|
||||
|
@ -781,7 +781,7 @@ TEST(CApiTest, LoadIndexInfo) {
|
||||
status = AppendIndexParam(c_load_index_info, index_param_key2.data(), index_param_value2.data());
|
||||
assert(status.error_code == Success);
|
||||
std::string field_name = "field0";
|
||||
status = AppendFieldInfo(c_load_index_info, field_name.data(), 0);
|
||||
status = AppendFieldInfo(c_load_index_info, 0);
|
||||
assert(status.error_code == Success);
|
||||
status = AppendIndex(c_load_index_info, c_binary_set);
|
||||
assert(status.error_code == Success);
|
||||
@ -937,7 +937,7 @@ TEST(CApiTest, UpdateSegmentIndex_Without_Predicate) {
|
||||
AppendIndexParam(c_load_index_info, index_type_key.c_str(), index_type_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, index_mode_key.c_str(), index_mode_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, metric_type_key.c_str(), metric_type_value.c_str());
|
||||
AppendFieldInfo(c_load_index_info, "fakevec", 100);
|
||||
AppendFieldInfo(c_load_index_info, 100);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
status = UpdateSegmentIndex(segment, c_load_index_info);
|
||||
@ -1074,7 +1074,7 @@ TEST(CApiTest, UpdateSegmentIndex_With_float_Predicate_Range) {
|
||||
AppendIndexParam(c_load_index_info, index_type_key.c_str(), index_type_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, index_mode_key.c_str(), index_mode_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, metric_type_key.c_str(), metric_type_value.c_str());
|
||||
AppendFieldInfo(c_load_index_info, "fakevec", 100);
|
||||
AppendFieldInfo(c_load_index_info, 100);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
status = UpdateSegmentIndex(segment, c_load_index_info);
|
||||
@ -1211,7 +1211,7 @@ TEST(CApiTest, UpdateSegmentIndex_With_float_Predicate_Term) {
|
||||
AppendIndexParam(c_load_index_info, index_type_key.c_str(), index_type_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, index_mode_key.c_str(), index_mode_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, metric_type_key.c_str(), metric_type_value.c_str());
|
||||
AppendFieldInfo(c_load_index_info, "fakevec", 100);
|
||||
AppendFieldInfo(c_load_index_info, 100);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
status = UpdateSegmentIndex(segment, c_load_index_info);
|
||||
@ -1350,7 +1350,7 @@ TEST(CApiTest, UpdateSegmentIndex_With_binary_Predicate_Range) {
|
||||
AppendIndexParam(c_load_index_info, index_type_key.c_str(), index_type_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, index_mode_key.c_str(), index_mode_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, metric_type_key.c_str(), metric_type_value.c_str());
|
||||
AppendFieldInfo(c_load_index_info, "fakevec", 100);
|
||||
AppendFieldInfo(c_load_index_info, 100);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
status = UpdateSegmentIndex(segment, c_load_index_info);
|
||||
@ -1488,7 +1488,7 @@ TEST(CApiTest, UpdateSegmentIndex_With_binary_Predicate_Term) {
|
||||
AppendIndexParam(c_load_index_info, index_type_key.c_str(), index_type_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, index_mode_key.c_str(), index_mode_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, metric_type_key.c_str(), metric_type_value.c_str());
|
||||
AppendFieldInfo(c_load_index_info, "fakevec", 100);
|
||||
AppendFieldInfo(c_load_index_info, 100);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
status = UpdateSegmentIndex(segment, c_load_index_info);
|
||||
@ -1665,7 +1665,7 @@ TEST(CApiTest, SealedSegment_search_float_Predicate_Range) {
|
||||
AppendIndexParam(c_load_index_info, index_type_key.c_str(), index_type_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, index_mode_key.c_str(), index_mode_value.c_str());
|
||||
AppendIndexParam(c_load_index_info, metric_type_key.c_str(), metric_type_value.c_str());
|
||||
AppendFieldInfo(c_load_index_info, "fakevec", 100);
|
||||
AppendFieldInfo(c_load_index_info, 100);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
auto load_index_info = (LoadIndexInfo*)c_load_index_info;
|
||||
|
@ -105,7 +105,6 @@ TEST(Sealed, without_predicate) {
|
||||
auto ref_result = QueryResultToJson(qr);
|
||||
|
||||
LoadIndexInfo load_info;
|
||||
load_info.field_name = "fakevec";
|
||||
load_info.field_id = fake_id.get();
|
||||
load_info.index = indexing;
|
||||
load_info.index_params["metric_type"] = "L2";
|
||||
@ -198,7 +197,6 @@ TEST(Sealed, with_predicate) {
|
||||
auto result = indexing->Query(query_dataset, conf, nullptr);
|
||||
|
||||
LoadIndexInfo load_info;
|
||||
load_info.field_name = "fakevec";
|
||||
load_info.field_id = fake_id.get();
|
||||
load_info.index = indexing;
|
||||
load_info.index_params["metric_type"] = "L2";
|
||||
@ -312,7 +310,6 @@ TEST(Sealed, LoadFieldData) {
|
||||
|
||||
LoadIndexInfo vec_info;
|
||||
vec_info.field_id = fakevec_id.get();
|
||||
vec_info.field_name = "fakevec";
|
||||
vec_info.index = indexing;
|
||||
vec_info.index_params["metric_type"] = milvus::knowhere::Metric::L2;
|
||||
segment->LoadIndex(vec_info);
|
||||
|
@ -98,7 +98,7 @@ func TestGrpcService(t *testing.T) {
|
||||
|
||||
var binlogLock sync.Mutex
|
||||
binlogPathArray := make([]string, 0, 16)
|
||||
core.BuildIndexReq = func(binlog []string, typeParams []*commonpb.KeyValuePair, indexParams []*commonpb.KeyValuePair) (typeutil.UniqueID, error) {
|
||||
core.BuildIndexReq = func(binlog []string, typeParams []*commonpb.KeyValuePair, indexParams []*commonpb.KeyValuePair, indexID typeutil.UniqueID, indexName string) (typeutil.UniqueID, error) {
|
||||
binlogLock.Lock()
|
||||
defer binlogLock.Unlock()
|
||||
binlogPathArray = append(binlogPathArray, binlog...)
|
||||
|
@ -247,7 +247,7 @@ func (it *IndexBuildTask) Execute() error {
|
||||
}
|
||||
|
||||
var indexCodec storage.IndexCodec
|
||||
serializedIndexBlobs, err := indexCodec.Serialize(getStorageBlobs(indexBlobs), indexParams)
|
||||
serializedIndexBlobs, err := indexCodec.Serialize(getStorageBlobs(indexBlobs), indexParams, it.cmd.Req.IndexName, it.cmd.Req.IndexID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -152,7 +152,7 @@ type Core struct {
|
||||
GetBinlogFilePathsFromDataServiceReq func(segID typeutil.UniqueID, fieldID typeutil.UniqueID) ([]string, error)
|
||||
|
||||
//TODO, call index builder's client to build index, return build id
|
||||
BuildIndexReq func(binlog []string, typeParams []*commonpb.KeyValuePair, indexParams []*commonpb.KeyValuePair) (typeutil.UniqueID, error)
|
||||
BuildIndexReq func(binlog []string, typeParams []*commonpb.KeyValuePair, indexParams []*commonpb.KeyValuePair, indexID typeutil.UniqueID, indexName string) (typeutil.UniqueID, error)
|
||||
|
||||
//TODO, proxy service interface, notify proxy service to drop collection
|
||||
InvalidateCollectionMetaCache func(ts typeutil.Timestamp, dbName string, collectionName string) error
|
||||
@ -671,11 +671,13 @@ func (c *Core) SetDataService(s DataServiceInterface) error {
|
||||
}
|
||||
|
||||
func (c *Core) SetIndexService(s IndexServiceInterface) error {
|
||||
c.BuildIndexReq = func(binlog []string, typeParams []*commonpb.KeyValuePair, indexParams []*commonpb.KeyValuePair) (typeutil.UniqueID, error) {
|
||||
c.BuildIndexReq = func(binlog []string, typeParams []*commonpb.KeyValuePair, indexParams []*commonpb.KeyValuePair, indexID typeutil.UniqueID, indexName string) (typeutil.UniqueID, error) {
|
||||
rsp, err := s.BuildIndex(&indexpb.BuildIndexRequest{
|
||||
DataPaths: binlog,
|
||||
TypeParams: typeParams,
|
||||
IndexParams: indexParams,
|
||||
IndexID: indexID,
|
||||
IndexName: indexName,
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
|
@ -628,7 +628,7 @@ func (t *CreateIndexTask) BuildIndex() error {
|
||||
})
|
||||
}
|
||||
}
|
||||
bldID, err = t.core.BuildIndexReq(binlogs, t.fieldSchema.TypeParams, t.indexParams)
|
||||
bldID, err = t.core.BuildIndexReq(binlogs, t.fieldSchema.TypeParams, t.indexParams, idxID, t.indexName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -116,9 +116,16 @@ message InsertRequest {
|
||||
message SearchRequest {
|
||||
common.MsgBase base = 1;
|
||||
string result_channelID = 2;
|
||||
common.Blob query = 3;
|
||||
|
||||
int64 dbID = 3;
|
||||
int64 collectionID = 4;
|
||||
repeated int64 partitionIDs = 5;
|
||||
string dsl = 6;
|
||||
// serialized `PlaceholderGroup`
|
||||
bytes placeholder_group = 7;
|
||||
common.Blob query = 8;
|
||||
}
|
||||
|
||||
|
||||
message SearchResults {
|
||||
common.MsgBase base = 1;
|
||||
common.Status status = 2;
|
||||
|
@ -895,12 +895,18 @@ func (m *InsertRequest) GetRowData() []*commonpb.Blob {
|
||||
}
|
||||
|
||||
type SearchRequest struct {
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
ResultChannelID string `protobuf:"bytes,2,opt,name=result_channelID,json=resultChannelID,proto3" json:"result_channelID,omitempty"`
|
||||
Query *commonpb.Blob `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
ResultChannelID string `protobuf:"bytes,2,opt,name=result_channelID,json=resultChannelID,proto3" json:"result_channelID,omitempty"`
|
||||
DbID int64 `protobuf:"varint,3,opt,name=dbID,proto3" json:"dbID,omitempty"`
|
||||
CollectionID int64 `protobuf:"varint,4,opt,name=collectionID,proto3" json:"collectionID,omitempty"`
|
||||
PartitionIDs []int64 `protobuf:"varint,5,rep,packed,name=partitionIDs,proto3" json:"partitionIDs,omitempty"`
|
||||
Dsl string `protobuf:"bytes,6,opt,name=dsl,proto3" json:"dsl,omitempty"`
|
||||
// serialized `PlaceholderGroup`
|
||||
PlaceholderGroup []byte `protobuf:"bytes,7,opt,name=placeholder_group,json=placeholderGroup,proto3" json:"placeholder_group,omitempty"`
|
||||
Query *commonpb.Blob `protobuf:"bytes,8,opt,name=query,proto3" json:"query,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SearchRequest) Reset() { *m = SearchRequest{} }
|
||||
@ -942,6 +948,41 @@ func (m *SearchRequest) GetResultChannelID() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *SearchRequest) GetDbID() int64 {
|
||||
if m != nil {
|
||||
return m.DbID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *SearchRequest) GetCollectionID() int64 {
|
||||
if m != nil {
|
||||
return m.CollectionID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *SearchRequest) GetPartitionIDs() []int64 {
|
||||
if m != nil {
|
||||
return m.PartitionIDs
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *SearchRequest) GetDsl() string {
|
||||
if m != nil {
|
||||
return m.Dsl
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *SearchRequest) GetPlaceholderGroup() []byte {
|
||||
if m != nil {
|
||||
return m.PlaceholderGroup
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *SearchRequest) GetQuery() *commonpb.Blob {
|
||||
if m != nil {
|
||||
return m.Query
|
||||
@ -1729,97 +1770,100 @@ func init() {
|
||||
func init() { proto.RegisterFile("internal.proto", fileDescriptor_41f4a519b878ee3b) }
|
||||
|
||||
var fileDescriptor_41f4a519b878ee3b = []byte{
|
||||
// 1463 bytes of a gzipped FileDescriptorProto
|
||||
// 1510 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x58, 0x5b, 0x6f, 0x1b, 0x45,
|
||||
0x1b, 0xfe, 0xd6, 0x76, 0x7c, 0x78, 0xed, 0x1c, 0xba, 0x5f, 0x0f, 0xdb, 0xef, 0x0b, 0xd4, 0x5d,
|
||||
0x4e, 0x06, 0x44, 0x52, 0xa5, 0x08, 0x21, 0x6e, 0xda, 0x24, 0xee, 0x61, 0x95, 0xc4, 0x84, 0x71,
|
||||
0x5a, 0xa9, 0xbd, 0x59, 0x8d, 0x77, 0x27, 0xf6, 0xb4, 0x7b, 0x70, 0x77, 0xc6, 0x4d, 0xdd, 0x6b,
|
||||
0xee, 0x10, 0x5c, 0x20, 0x71, 0xc9, 0x0d, 0x3f, 0x80, 0x9f, 0x00, 0x12, 0x57, 0x48, 0xdc, 0x23,
|
||||
0x21, 0xf1, 0x03, 0xf8, 0x0d, 0x5c, 0xa1, 0x39, 0xec, 0xda, 0x4e, 0x9d, 0xe0, 0x1a, 0x2a, 0x84,
|
||||
0xe0, 0xce, 0xf3, 0xcc, 0xbb, 0xef, 0xcc, 0xf3, 0xbc, 0x87, 0x99, 0x31, 0x2c, 0xd1, 0x88, 0x93,
|
||||
0x24, 0xc2, 0xc1, 0x5a, 0x3f, 0x89, 0x79, 0x6c, 0x9e, 0x0b, 0x69, 0xf0, 0x78, 0xc0, 0xd4, 0x68,
|
||||
0x2d, 0x9d, 0xfc, 0x5f, 0xcd, 0x8b, 0xc3, 0x30, 0x8e, 0x14, 0x6c, 0x7f, 0x63, 0xc0, 0xe2, 0x76,
|
||||
0x1c, 0xf6, 0xe3, 0x88, 0x44, 0xdc, 0x89, 0x0e, 0x63, 0xf3, 0x3c, 0x14, 0xa3, 0xd8, 0x27, 0x4e,
|
||||
0xd3, 0x32, 0xea, 0x46, 0x23, 0x8f, 0xf4, 0xc8, 0x34, 0xa1, 0x90, 0xc4, 0x01, 0xb1, 0x72, 0x75,
|
||||
0xa3, 0x51, 0x41, 0xf2, 0xb7, 0x79, 0x0d, 0x80, 0x71, 0xcc, 0x89, 0xeb, 0xc5, 0x3e, 0xb1, 0xf2,
|
||||
0x75, 0xa3, 0xb1, 0xb4, 0x51, 0x5f, 0x9b, 0xba, 0xee, 0x5a, 0x5b, 0x18, 0x6e, 0xc7, 0x3e, 0x41,
|
||||
0x15, 0x96, 0xfe, 0x34, 0xaf, 0x03, 0x90, 0x27, 0x3c, 0xc1, 0x2e, 0x8d, 0x0e, 0x63, 0xab, 0x50,
|
||||
0xcf, 0x37, 0xaa, 0x1b, 0x97, 0x27, 0x1d, 0xe8, 0xed, 0xee, 0x90, 0xe1, 0x5d, 0x1c, 0x0c, 0xc8,
|
||||
0x3e, 0xa6, 0x09, 0xaa, 0xc8, 0x8f, 0xc4, 0x76, 0xed, 0x9f, 0x0c, 0x58, 0xce, 0x08, 0xc8, 0x35,
|
||||
0x98, 0xf9, 0x01, 0x2c, 0xc8, 0x25, 0x24, 0x83, 0xea, 0xc6, 0xab, 0x27, 0xec, 0x68, 0x82, 0x37,
|
||||
0x52, 0x9f, 0x98, 0x77, 0xe0, 0xbf, 0x6c, 0xd0, 0xf1, 0xd2, 0x29, 0x57, 0xa2, 0xcc, 0xca, 0xc9,
|
||||
0xad, 0xcd, 0xe6, 0xc9, 0x1c, 0x77, 0xa0, 0xb7, 0x74, 0x15, 0x8a, 0xc2, 0xd3, 0x80, 0x49, 0x95,
|
||||
0xaa, 0x1b, 0xff, 0x9f, 0x4a, 0xb2, 0x2d, 0x4d, 0x90, 0x36, 0xb5, 0xef, 0x42, 0xb9, 0x25, 0xc4,
|
||||
0x17, 0x61, 0x79, 0x0f, 0x4a, 0xd8, 0xf7, 0x13, 0xc2, 0x98, 0x66, 0xb5, 0x3a, 0xd5, 0xc3, 0xa6,
|
||||
0xb2, 0x41, 0xa9, 0xf1, 0xb4, 0xb0, 0xd9, 0x0f, 0x00, 0x9c, 0x88, 0xf2, 0x7d, 0x9c, 0xe0, 0x90,
|
||||
0x9d, 0x18, 0xf0, 0x26, 0xd4, 0x18, 0xc7, 0x09, 0x77, 0xfb, 0xd2, 0x4e, 0x4b, 0x30, 0x43, 0x74,
|
||||
0xaa, 0xf2, 0x33, 0xe5, 0xdd, 0xbe, 0x07, 0xd0, 0xe6, 0x09, 0x8d, 0xba, 0xbb, 0x94, 0x71, 0xb1,
|
||||
0xd6, 0x63, 0x61, 0x27, 0x48, 0xe4, 0x1b, 0x15, 0xa4, 0x47, 0x63, 0xf2, 0xe4, 0x66, 0x97, 0xe7,
|
||||
0x1a, 0x54, 0x0f, 0x68, 0x48, 0x0e, 0xa8, 0xf7, 0x70, 0x8f, 0x75, 0xcd, 0x2b, 0x50, 0xe8, 0x60,
|
||||
0x46, 0x4e, 0x95, 0x67, 0x8f, 0x75, 0xb7, 0x30, 0x23, 0x48, 0x5a, 0xda, 0x3f, 0x1b, 0x70, 0x61,
|
||||
0x3b, 0x21, 0x32, 0x19, 0x83, 0x80, 0x78, 0x9c, 0xc6, 0x11, 0x22, 0x8f, 0x06, 0x84, 0xf1, 0xe7,
|
||||
0xf7, 0x66, 0x5e, 0x80, 0x92, 0xdf, 0x71, 0x23, 0x1c, 0xa6, 0x62, 0x17, 0xfd, 0x4e, 0x0b, 0x87,
|
||||
0xc4, 0x7c, 0x1d, 0x96, 0xbc, 0xcc, 0xbf, 0x40, 0x64, 0x0e, 0x54, 0xd0, 0x31, 0x54, 0x84, 0xca,
|
||||
0xef, 0x38, 0x4d, 0xab, 0x20, 0xc3, 0x20, 0x7f, 0x9b, 0x36, 0xd4, 0x46, 0x56, 0x4e, 0xd3, 0x5a,
|
||||
0x90, 0x73, 0x13, 0x98, 0x10, 0x95, 0x79, 0x3d, 0x12, 0x62, 0xab, 0x58, 0x37, 0x1a, 0x35, 0xa4,
|
||||
0x47, 0xf6, 0x77, 0x06, 0x9c, 0x6b, 0x26, 0x71, 0xff, 0xef, 0x4c, 0xce, 0xfe, 0x34, 0x07, 0xe7,
|
||||
0x55, 0x8c, 0xf6, 0x71, 0xc2, 0xe9, 0x0b, 0x62, 0xf1, 0x06, 0x2c, 0x8f, 0x56, 0x55, 0x06, 0xd3,
|
||||
0x69, 0xbc, 0x06, 0x4b, 0xfd, 0x74, 0x1f, 0xca, 0xae, 0x20, 0xed, 0x16, 0x33, 0x74, 0x82, 0xed,
|
||||
0xc2, 0x29, 0x6c, 0x8b, 0x53, 0x42, 0x59, 0x87, 0x6a, 0xe6, 0xc8, 0x69, 0x5a, 0x25, 0x69, 0x32,
|
||||
0x0e, 0xd9, 0x9f, 0xe4, 0xe0, 0xac, 0x08, 0xea, 0xbf, 0x6a, 0x08, 0x35, 0xbe, 0xcd, 0x81, 0xa9,
|
||||
0xb2, 0xc3, 0x89, 0x7c, 0xf2, 0xe4, 0xaf, 0xd4, 0xe2, 0x25, 0x80, 0x43, 0x4a, 0x02, 0x7f, 0x5c,
|
||||
0x87, 0x8a, 0x44, 0xfe, 0x90, 0x06, 0x16, 0x94, 0xa4, 0x93, 0x8c, 0x7f, 0x3a, 0x14, 0xfd, 0x59,
|
||||
0x9d, 0x9d, 0xba, 0x3f, 0x97, 0x67, 0xee, 0xcf, 0xf2, 0x33, 0xdd, 0x9f, 0xbf, 0xce, 0xc3, 0xa2,
|
||||
0x13, 0x31, 0x92, 0xf0, 0x7f, 0x72, 0x22, 0x99, 0xab, 0x50, 0x61, 0xa4, 0x1b, 0x8a, 0x23, 0xbc,
|
||||
0x69, 0x95, 0xe5, 0xfc, 0x08, 0x10, 0xb3, 0x5e, 0x0f, 0x47, 0x11, 0x09, 0x9c, 0xa6, 0x55, 0x51,
|
||||
0xa1, 0xcd, 0x00, 0xf3, 0x65, 0x00, 0x4e, 0x43, 0xc2, 0x38, 0x0e, 0xfb, 0xcc, 0x82, 0x7a, 0xbe,
|
||||
0x51, 0x40, 0x63, 0x88, 0xe8, 0xcf, 0x49, 0x7c, 0xe4, 0x34, 0x99, 0x55, 0xad, 0xe7, 0xc5, 0x01,
|
||||
0xab, 0x46, 0xe6, 0xbb, 0x50, 0x4e, 0xe2, 0x23, 0xd7, 0xc7, 0x1c, 0x5b, 0x35, 0x19, 0xbc, 0x8b,
|
||||
0x53, 0xc5, 0xde, 0x0a, 0xe2, 0x0e, 0x2a, 0x25, 0xf1, 0x51, 0x13, 0x73, 0x6c, 0x7f, 0x69, 0xc0,
|
||||
0x62, 0x9b, 0xe0, 0xc4, 0xeb, 0xcd, 0x1f, 0xb0, 0x37, 0x61, 0x25, 0x21, 0x6c, 0x10, 0x70, 0x77,
|
||||
0x44, 0x4b, 0x45, 0x6e, 0x59, 0xe1, 0xdb, 0x19, 0xb9, 0x75, 0x58, 0x78, 0x34, 0x20, 0xc9, 0x50,
|
||||
0xdf, 0x5b, 0x4e, 0xd9, 0xa1, 0xb2, 0xb3, 0x7f, 0x1c, 0xdb, 0x9f, 0x70, 0xc5, 0xe6, 0xd8, 0xdf,
|
||||
0x3c, 0xd7, 0x81, 0xa9, 0xa4, 0xf2, 0xd3, 0x49, 0x5d, 0x82, 0x6a, 0x48, 0x78, 0x42, 0x3d, 0x97,
|
||||
0x0f, 0xfb, 0x69, 0xae, 0x81, 0x82, 0x0e, 0x86, 0x7d, 0x99, 0x68, 0x3d, 0xca, 0x99, 0xb5, 0x50,
|
||||
0xcf, 0x37, 0x6a, 0x48, 0xfe, 0xb6, 0x7f, 0x30, 0x60, 0xb1, 0x49, 0x02, 0xc2, 0xc9, 0xfc, 0xc2,
|
||||
0x4f, 0x29, 0x88, 0xdc, 0xd4, 0x82, 0x98, 0xc8, 0xb8, 0xfc, 0xe9, 0x19, 0x57, 0x78, 0x26, 0xe3,
|
||||
0x2e, 0x43, 0xad, 0x9f, 0xd0, 0x10, 0x27, 0x43, 0xf7, 0x21, 0x19, 0x2a, 0x1a, 0x22, 0xe1, 0x15,
|
||||
0xb6, 0x43, 0x86, 0xcc, 0xfe, 0xca, 0x80, 0xf2, 0xcd, 0x60, 0xc0, 0x7a, 0x73, 0x5d, 0x9d, 0x26,
|
||||
0xeb, 0x25, 0x77, 0xbc, 0x5e, 0x8e, 0xd7, 0x64, 0x7e, 0x4a, 0x4d, 0xda, 0x50, 0xcb, 0x0a, 0xf0,
|
||||
0x00, 0x77, 0x75, 0x10, 0x26, 0x30, 0xfb, 0x57, 0x03, 0x2a, 0xbb, 0x31, 0xf6, 0x65, 0x73, 0xff,
|
||||
0xd3, 0x77, 0xb9, 0x0a, 0xa3, 0xfe, 0x9c, 0x6a, 0x3c, 0x6a, 0xd8, 0x63, 0x8d, 0xb7, 0x30, 0xd9,
|
||||
0x78, 0x2f, 0x41, 0x95, 0x8a, 0x0d, 0xb9, 0x7d, 0xcc, 0x7b, 0x4a, 0xdc, 0x0a, 0x02, 0x09, 0xed,
|
||||
0x0b, 0x44, 0x74, 0xe6, 0xd4, 0x40, 0x76, 0xe6, 0xe2, 0xcc, 0x9d, 0x59, 0x3b, 0x91, 0x9d, 0xf9,
|
||||
0x97, 0x1c, 0x58, 0x6d, 0xb5, 0x59, 0x91, 0xe9, 0x94, 0x71, 0xea, 0xb1, 0x3b, 0x7d, 0x5f, 0xbe,
|
||||
0x27, 0x56, 0xa1, 0xd2, 0xce, 0x98, 0xa9, 0x7b, 0xfb, 0x08, 0x10, 0xf9, 0xb1, 0x47, 0xc2, 0x38,
|
||||
0x19, 0xb6, 0xe9, 0x53, 0xa2, 0x89, 0x8f, 0x21, 0x82, 0x5b, 0x6b, 0x10, 0xa2, 0xf8, 0x88, 0xe9,
|
||||
0xd0, 0xa4, 0x43, 0xc1, 0xcd, 0x93, 0xe7, 0xa9, 0x2b, 0xd2, 0x49, 0x32, 0x2f, 0x20, 0x50, 0x90,
|
||||
0xb8, 0x6c, 0x9b, 0x17, 0xa1, 0x4c, 0x22, 0x5f, 0xcd, 0x2e, 0xc8, 0xd9, 0x12, 0x89, 0x7c, 0x39,
|
||||
0xb5, 0x03, 0xcb, 0xfa, 0xc1, 0x10, 0x33, 0x19, 0xc2, 0x94, 0xb9, 0x7d, 0xc2, 0xb3, 0x69, 0x8f,
|
||||
0x75, 0xf7, 0xb5, 0x29, 0x5a, 0x52, 0x8f, 0x86, 0xf4, 0x4b, 0xf3, 0x16, 0x2c, 0x8a, 0x75, 0x46,
|
||||
0xae, 0x4a, 0x33, 0xbb, 0xaa, 0x91, 0xc8, 0x1f, 0x39, 0xb2, 0xa1, 0x46, 0x59, 0x8b, 0x1c, 0x69,
|
||||
0x75, 0x64, 0x73, 0x2f, 0xa3, 0x09, 0xcc, 0xfe, 0xdc, 0x80, 0x33, 0xcf, 0x48, 0x3d, 0x47, 0xbe,
|
||||
0xed, 0x40, 0xb9, 0x4d, 0xba, 0xc2, 0x45, 0xfa, 0x5c, 0x5a, 0x3f, 0xe9, 0x35, 0x7c, 0x42, 0x60,
|
||||
0x51, 0xe6, 0xc0, 0x7e, 0x90, 0x85, 0x5f, 0xd6, 0xa9, 0x78, 0x63, 0x8a, 0xe6, 0xe3, 0xbf, 0x80,
|
||||
0x82, 0xb5, 0x3f, 0x36, 0xc4, 0x93, 0xd0, 0x27, 0x4f, 0xe4, 0xd2, 0xcf, 0x24, 0xb0, 0x31, 0x4f,
|
||||
0x02, 0x9b, 0x57, 0xe0, 0x6c, 0x34, 0x08, 0xdd, 0x84, 0x04, 0x98, 0x13, 0xdf, 0xd5, 0xab, 0x31,
|
||||
0xbd, 0xba, 0x19, 0x0d, 0x42, 0xa4, 0xa6, 0x34, 0x4d, 0x66, 0x7f, 0x66, 0x00, 0xdc, 0x14, 0x55,
|
||||
0xa6, 0xb6, 0x71, 0xbc, 0x8d, 0x18, 0xa7, 0xdf, 0x8f, 0x72, 0x93, 0x65, 0xba, 0x95, 0x96, 0x29,
|
||||
0x93, 0xf1, 0xc8, 0x4f, 0xe3, 0x90, 0xc5, 0x63, 0x44, 0x5e, 0x57, 0xb2, 0x8a, 0xc1, 0x17, 0x06,
|
||||
0xd4, 0xc6, 0x42, 0xc5, 0x26, 0x65, 0x34, 0x8e, 0x77, 0x14, 0x79, 0xae, 0x88, 0x2a, 0x73, 0xd9,
|
||||
0x58, 0xe1, 0x85, 0xa3, 0xc2, 0xbb, 0x08, 0x65, 0x29, 0xc9, 0x58, 0xe5, 0x45, 0xba, 0xf2, 0xde,
|
||||
0x86, 0x33, 0x09, 0xf1, 0x48, 0xc4, 0x83, 0xa1, 0x1b, 0xc6, 0x3e, 0x3d, 0xa4, 0xc4, 0x97, 0xf5,
|
||||
0x57, 0x46, 0x2b, 0xe9, 0xc4, 0x9e, 0xc6, 0xed, 0xef, 0x0d, 0x58, 0xfa, 0x48, 0x1c, 0xb7, 0xad,
|
||||
0xd8, 0x27, 0x6a, 0x67, 0xcf, 0x9f, 0x12, 0xd7, 0x25, 0x17, 0x2d, 0x8f, 0x4a, 0xd7, 0x57, 0x7e,
|
||||
0x3f, 0x5d, 0x19, 0x2a, 0x33, 0x9d, 0xa2, 0x42, 0x62, 0x75, 0xe7, 0x9d, 0x45, 0xe2, 0x51, 0x60,
|
||||
0x91, 0xba, 0x29, 0x2b, 0x89, 0x7d, 0xa8, 0x8e, 0x15, 0xaf, 0x38, 0xba, 0xf4, 0x39, 0xa7, 0x8e,
|
||||
0x47, 0x43, 0xf6, 0xe5, 0xaa, 0xc6, 0x64, 0x67, 0x3e, 0x0b, 0x0b, 0x21, 0xeb, 0x66, 0x57, 0x16,
|
||||
0x35, 0x10, 0x91, 0xc9, 0x4e, 0x40, 0xa9, 0x6d, 0x01, 0x8d, 0x80, 0xb7, 0xde, 0x87, 0x4a, 0xf6,
|
||||
0x07, 0x94, 0xb9, 0x02, 0x35, 0xa7, 0xe5, 0x1c, 0x38, 0x9b, 0xbb, 0xce, 0x7d, 0xa7, 0x75, 0x6b,
|
||||
0xe5, 0x3f, 0x66, 0x15, 0x4a, 0xb7, 0x6f, 0x6c, 0xee, 0x1e, 0xdc, 0xbe, 0xb7, 0x62, 0x98, 0x35,
|
||||
0x28, 0x6f, 0x6e, 0xb5, 0x3e, 0x44, 0x7b, 0x9b, 0xbb, 0x2b, 0xb9, 0xad, 0x1b, 0xf7, 0xb7, 0xbb,
|
||||
0x94, 0xf7, 0x06, 0x1d, 0x21, 0xe2, 0xfa, 0x53, 0x1a, 0x04, 0xf4, 0x29, 0x27, 0x5e, 0x6f, 0x5d,
|
||||
0xb1, 0x7c, 0xc7, 0xa7, 0x8c, 0x27, 0xb4, 0x33, 0xe0, 0xc4, 0x5f, 0x4f, 0xb9, 0xae, 0x4b, 0xea,
|
||||
0xd9, 0xb0, 0xdf, 0xd9, 0xe8, 0x14, 0x25, 0x74, 0xf5, 0xb7, 0x00, 0x00, 0x00, 0xff, 0xff, 0x84,
|
||||
0x0d, 0x31, 0xda, 0xa6, 0x13, 0x00, 0x00,
|
||||
0x14, 0x66, 0x6d, 0x27, 0xb6, 0x8f, 0x9d, 0xc4, 0x5d, 0x7a, 0xd9, 0x42, 0xa0, 0xee, 0x72, 0x0b,
|
||||
0x54, 0x24, 0x55, 0x8a, 0x10, 0xe2, 0xa5, 0x4d, 0xe2, 0x5e, 0x56, 0x49, 0x4c, 0x18, 0xa7, 0x95,
|
||||
0xda, 0x97, 0xd5, 0x7a, 0x77, 0x62, 0x4f, 0xbb, 0x17, 0x77, 0x66, 0xdc, 0xd4, 0x7d, 0xe6, 0x0d,
|
||||
0xc1, 0x03, 0x12, 0x7f, 0x80, 0x1f, 0xc0, 0x33, 0x4f, 0x20, 0xf1, 0x84, 0xc4, 0x3b, 0x12, 0x12,
|
||||
0x3f, 0x80, 0xdf, 0xc0, 0x13, 0x9a, 0xcb, 0xee, 0xda, 0xa9, 0x93, 0xa6, 0x81, 0x0a, 0x21, 0x78,
|
||||
0xf3, 0x9c, 0x39, 0x7b, 0x66, 0xbe, 0xef, 0x3b, 0x67, 0xce, 0x8c, 0x61, 0x9e, 0xc4, 0x1c, 0xd3,
|
||||
0xd8, 0x0b, 0x97, 0x07, 0x34, 0xe1, 0x89, 0x79, 0x26, 0x22, 0xe1, 0xa3, 0x21, 0x53, 0xa3, 0xe5,
|
||||
0x74, 0xf2, 0x95, 0xba, 0x9f, 0x44, 0x51, 0x12, 0x2b, 0xb3, 0xfd, 0xbd, 0x01, 0x73, 0x1b, 0x49,
|
||||
0x34, 0x48, 0x62, 0x1c, 0x73, 0x27, 0xde, 0x4b, 0xcc, 0xb3, 0x30, 0x1b, 0x27, 0x01, 0x76, 0x5a,
|
||||
0x96, 0xd1, 0x34, 0x96, 0x8a, 0x48, 0x8f, 0x4c, 0x13, 0x4a, 0x34, 0x09, 0xb1, 0x55, 0x68, 0x1a,
|
||||
0x4b, 0x55, 0x24, 0x7f, 0x9b, 0x57, 0x01, 0x18, 0xf7, 0x38, 0x76, 0xfd, 0x24, 0xc0, 0x56, 0xb1,
|
||||
0x69, 0x2c, 0xcd, 0xaf, 0x36, 0x97, 0xa7, 0xae, 0xbb, 0xdc, 0x11, 0x8e, 0x1b, 0x49, 0x80, 0x51,
|
||||
0x95, 0xa5, 0x3f, 0xcd, 0x6b, 0x00, 0xf8, 0x31, 0xa7, 0x9e, 0x4b, 0xe2, 0xbd, 0xc4, 0x2a, 0x35,
|
||||
0x8b, 0x4b, 0xb5, 0xd5, 0x8b, 0x93, 0x01, 0xf4, 0x76, 0x37, 0xf1, 0xe8, 0x8e, 0x17, 0x0e, 0xf1,
|
||||
0x8e, 0x47, 0x28, 0xaa, 0xca, 0x8f, 0xc4, 0x76, 0xed, 0x5f, 0x0d, 0x58, 0xc8, 0x00, 0xc8, 0x35,
|
||||
0x98, 0xf9, 0x31, 0xcc, 0xc8, 0x25, 0x24, 0x82, 0xda, 0xea, 0x9b, 0x87, 0xec, 0x68, 0x02, 0x37,
|
||||
0x52, 0x9f, 0x98, 0xb7, 0xe1, 0x65, 0x36, 0xec, 0xfa, 0xe9, 0x94, 0x2b, 0xad, 0xcc, 0x2a, 0xc8,
|
||||
0xad, 0x1d, 0x2f, 0x92, 0x39, 0x1e, 0x40, 0x6f, 0xe9, 0x0a, 0xcc, 0x8a, 0x48, 0x43, 0x26, 0x59,
|
||||
0xaa, 0xad, 0xbe, 0x3a, 0x15, 0x64, 0x47, 0xba, 0x20, 0xed, 0x6a, 0xdf, 0x81, 0x4a, 0x5b, 0x90,
|
||||
0x2f, 0x64, 0xf9, 0x10, 0xca, 0x5e, 0x10, 0x50, 0xcc, 0x98, 0x46, 0xb5, 0x38, 0x35, 0xc2, 0x9a,
|
||||
0xf2, 0x41, 0xa9, 0xf3, 0x34, 0xd9, 0xec, 0xfb, 0x00, 0x4e, 0x4c, 0xf8, 0x8e, 0x47, 0xbd, 0x88,
|
||||
0x1d, 0x2a, 0x78, 0x0b, 0xea, 0x8c, 0x7b, 0x94, 0xbb, 0x03, 0xe9, 0xa7, 0x29, 0x38, 0x86, 0x3a,
|
||||
0x35, 0xf9, 0x99, 0x8a, 0x6e, 0xdf, 0x05, 0xe8, 0x70, 0x4a, 0xe2, 0xde, 0x16, 0x61, 0x5c, 0xac,
|
||||
0xf5, 0x48, 0xf8, 0x09, 0x10, 0xc5, 0xa5, 0x2a, 0xd2, 0xa3, 0x31, 0x7a, 0x0a, 0xc7, 0xa7, 0xe7,
|
||||
0x2a, 0xd4, 0x76, 0x49, 0x84, 0x77, 0x89, 0xff, 0x60, 0x9b, 0xf5, 0xcc, 0xcb, 0x50, 0xea, 0x7a,
|
||||
0x0c, 0x1f, 0x49, 0xcf, 0x36, 0xeb, 0xad, 0x7b, 0x0c, 0x23, 0xe9, 0x69, 0xff, 0x66, 0xc0, 0xb9,
|
||||
0x0d, 0x8a, 0x65, 0x32, 0x86, 0x21, 0xf6, 0x39, 0x49, 0x62, 0x84, 0x1f, 0x0e, 0x31, 0xe3, 0xcf,
|
||||
0x1f, 0xcd, 0x3c, 0x07, 0xe5, 0xa0, 0xeb, 0xc6, 0x5e, 0x94, 0x92, 0x3d, 0x1b, 0x74, 0xdb, 0x5e,
|
||||
0x84, 0xcd, 0xb7, 0x61, 0xde, 0xcf, 0xe2, 0x0b, 0x8b, 0xcc, 0x81, 0x2a, 0x3a, 0x60, 0x15, 0x52,
|
||||
0x05, 0x5d, 0xa7, 0x65, 0x95, 0xa4, 0x0c, 0xf2, 0xb7, 0x69, 0x43, 0x3d, 0xf7, 0x72, 0x5a, 0xd6,
|
||||
0x8c, 0x9c, 0x9b, 0xb0, 0x09, 0x52, 0x99, 0xdf, 0xc7, 0x91, 0x67, 0xcd, 0x36, 0x8d, 0xa5, 0x3a,
|
||||
0xd2, 0x23, 0xfb, 0x47, 0x03, 0xce, 0xb4, 0x68, 0x32, 0xf8, 0x37, 0x83, 0xb3, 0xbf, 0x28, 0xc0,
|
||||
0x59, 0xa5, 0xd1, 0x8e, 0x47, 0x39, 0x79, 0x41, 0x28, 0xde, 0x81, 0x85, 0x7c, 0x55, 0xe5, 0x30,
|
||||
0x1d, 0xc6, 0x5b, 0x30, 0x3f, 0x48, 0xf7, 0xa1, 0xfc, 0x4a, 0xd2, 0x6f, 0x2e, 0xb3, 0x4e, 0xa0,
|
||||
0x9d, 0x39, 0x02, 0xed, 0xec, 0x14, 0x29, 0x9b, 0x50, 0xcb, 0x02, 0x39, 0x2d, 0xab, 0x2c, 0x5d,
|
||||
0xc6, 0x4d, 0xf6, 0xe7, 0x05, 0x38, 0x2d, 0x44, 0xfd, 0x9f, 0x0d, 0xc1, 0xc6, 0x0f, 0x05, 0x30,
|
||||
0x55, 0x76, 0x38, 0x71, 0x80, 0x1f, 0xff, 0x93, 0x5c, 0xbc, 0x06, 0xb0, 0x47, 0x70, 0x18, 0x8c,
|
||||
0xf3, 0x50, 0x95, 0x96, 0xbf, 0xc4, 0x81, 0x05, 0x65, 0x19, 0x24, 0xc3, 0x9f, 0x0e, 0xc5, 0xf9,
|
||||
0xac, 0x7a, 0xa7, 0x3e, 0x9f, 0x2b, 0xc7, 0x3e, 0x9f, 0xe5, 0x67, 0xfa, 0x7c, 0xfe, 0xb6, 0x08,
|
||||
0x73, 0x4e, 0xcc, 0x30, 0xe5, 0xff, 0xe5, 0x44, 0x32, 0x17, 0xa1, 0xca, 0x70, 0x2f, 0x12, 0x2d,
|
||||
0xbc, 0x65, 0x55, 0xe4, 0x7c, 0x6e, 0x10, 0xb3, 0x7e, 0xdf, 0x8b, 0x63, 0x1c, 0x3a, 0x2d, 0xab,
|
||||
0xaa, 0xa4, 0xcd, 0x0c, 0xe6, 0xeb, 0x00, 0x9c, 0x44, 0x98, 0x71, 0x2f, 0x1a, 0x30, 0x0b, 0x9a,
|
||||
0xc5, 0xa5, 0x12, 0x1a, 0xb3, 0x88, 0xf3, 0x99, 0x26, 0xfb, 0x4e, 0x8b, 0x59, 0xb5, 0x66, 0x51,
|
||||
0x34, 0x58, 0x35, 0x32, 0x3f, 0x80, 0x0a, 0x4d, 0xf6, 0xdd, 0xc0, 0xe3, 0x9e, 0x55, 0x97, 0xe2,
|
||||
0x9d, 0x9f, 0x4a, 0xf6, 0x7a, 0x98, 0x74, 0x51, 0x99, 0x26, 0xfb, 0x2d, 0x8f, 0x7b, 0xf6, 0x77,
|
||||
0x05, 0x98, 0xeb, 0x60, 0x8f, 0xfa, 0xfd, 0x93, 0x0b, 0xf6, 0x2e, 0x34, 0x28, 0x66, 0xc3, 0x90,
|
||||
0xbb, 0x39, 0x2c, 0xa5, 0xdc, 0x82, 0xb2, 0x6f, 0x64, 0xe0, 0x52, 0xca, 0x8b, 0x47, 0x50, 0x5e,
|
||||
0x9a, 0x42, 0xb9, 0x0d, 0xf5, 0x31, 0x7e, 0x99, 0x35, 0x23, 0xa1, 0x4f, 0xd8, 0xcc, 0x06, 0x14,
|
||||
0x03, 0x16, 0x4a, 0xc5, 0xaa, 0x48, 0xfc, 0x34, 0x2f, 0xc1, 0xa9, 0x41, 0xe8, 0xf9, 0xb8, 0x9f,
|
||||
0x84, 0x01, 0xa6, 0x6e, 0x8f, 0x26, 0xc3, 0x81, 0x94, 0xab, 0x8e, 0x1a, 0x63, 0x13, 0x37, 0x85,
|
||||
0xdd, 0x5c, 0x81, 0x99, 0x87, 0x43, 0x4c, 0x47, 0x52, 0xaf, 0x23, 0xc9, 0x53, 0x7e, 0xf6, 0x2f,
|
||||
0x46, 0x4e, 0x9d, 0x40, 0xc9, 0x4e, 0x40, 0xdd, 0x49, 0x6e, 0x2a, 0x53, 0xf9, 0x2e, 0x4e, 0xe7,
|
||||
0xfb, 0x02, 0xd4, 0x22, 0xcc, 0x29, 0xf1, 0x5d, 0x3e, 0x1a, 0xa4, 0x65, 0x00, 0xca, 0xb4, 0x3b,
|
||||
0x1a, 0xc8, 0x1a, 0xe8, 0x13, 0xae, 0x08, 0xad, 0x23, 0xf9, 0xdb, 0xfe, 0xd9, 0x80, 0xb9, 0x16,
|
||||
0x0e, 0x31, 0xc7, 0x27, 0xcf, 0x89, 0x29, 0xb5, 0x5a, 0x98, 0x5a, 0xab, 0x13, 0xc5, 0x50, 0x3c,
|
||||
0xba, 0x18, 0x4a, 0x4f, 0x15, 0xc3, 0x45, 0xa8, 0x0f, 0x28, 0x89, 0x3c, 0x3a, 0x72, 0x1f, 0xe0,
|
||||
0x51, 0x9a, 0x17, 0x35, 0x6d, 0xdb, 0xc4, 0x23, 0x66, 0x7f, 0x63, 0x40, 0xe5, 0x46, 0x38, 0x64,
|
||||
0xfd, 0x13, 0xdd, 0xea, 0x26, 0x4b, 0xb9, 0x70, 0xb0, 0x94, 0x0f, 0xe6, 0x6e, 0xf1, 0x19, 0xb9,
|
||||
0xbb, 0xeb, 0xf5, 0xb4, 0x08, 0x13, 0x36, 0xfb, 0x0f, 0x03, 0xaa, 0x5b, 0x89, 0x17, 0xc8, 0xbe,
|
||||
0xf3, 0xb7, 0xef, 0x72, 0x11, 0xf2, 0xd6, 0x91, 0x72, 0x9c, 0xf7, 0x92, 0xb1, 0x9e, 0x50, 0x9a,
|
||||
0xec, 0x09, 0x17, 0xa0, 0x46, 0xc4, 0x86, 0xdc, 0x81, 0xc7, 0xfb, 0x8a, 0xdc, 0x2a, 0x02, 0x69,
|
||||
0xda, 0x11, 0x16, 0xd1, 0x34, 0x52, 0x07, 0xd9, 0x34, 0x66, 0x8f, 0xdd, 0x34, 0x74, 0x10, 0xd9,
|
||||
0x34, 0x7e, 0x2f, 0x80, 0xd5, 0x51, 0x9b, 0x15, 0x99, 0x4e, 0x18, 0x27, 0x3e, 0xbb, 0x3d, 0x08,
|
||||
0xe4, 0x53, 0x67, 0x11, 0xaa, 0x9d, 0x0c, 0x99, 0x7a, 0x52, 0xe4, 0x06, 0x91, 0x1f, 0xdb, 0x38,
|
||||
0x4a, 0xe8, 0xa8, 0x43, 0x9e, 0x60, 0x0d, 0x7c, 0xcc, 0x22, 0xb0, 0xb5, 0x87, 0x11, 0x4a, 0xf6,
|
||||
0x99, 0x96, 0x26, 0x1d, 0x0a, 0x6c, 0xbe, 0x6c, 0xf5, 0xae, 0x48, 0x27, 0x89, 0xbc, 0x84, 0x40,
|
||||
0x99, 0xc4, 0x3b, 0xc0, 0x3c, 0x0f, 0x15, 0x1c, 0x07, 0x6a, 0x76, 0x46, 0xce, 0x96, 0x71, 0x1c,
|
||||
0xc8, 0xa9, 0x4d, 0x58, 0xd0, 0x6f, 0x99, 0x84, 0x49, 0x09, 0x53, 0xe4, 0xf6, 0x21, 0x2f, 0xba,
|
||||
0x6d, 0xd6, 0xdb, 0xd1, 0xae, 0x68, 0x5e, 0xbd, 0x67, 0xd2, 0x2f, 0xcd, 0x9b, 0x30, 0x27, 0xd6,
|
||||
0xc9, 0x43, 0x95, 0x8f, 0x1d, 0xaa, 0x8e, 0xe3, 0x20, 0x0f, 0x64, 0x43, 0x9d, 0xb0, 0x36, 0xde,
|
||||
0xd7, 0xec, 0xc8, 0x73, 0xac, 0x82, 0x26, 0x6c, 0xf6, 0x57, 0x06, 0x9c, 0x7a, 0x8a, 0xea, 0x13,
|
||||
0xe4, 0xdb, 0x26, 0x54, 0x3a, 0xb8, 0x27, 0x42, 0xa4, 0x2f, 0xb9, 0x95, 0xc3, 0x1e, 0xea, 0x87,
|
||||
0x08, 0x8b, 0xb2, 0x00, 0xf6, 0xfd, 0x4c, 0x7e, 0x59, 0xa7, 0xe2, 0xf9, 0x2b, 0x0e, 0x9f, 0xe0,
|
||||
0x05, 0x14, 0xac, 0xfd, 0x99, 0x21, 0x5e, 0xab, 0x01, 0x7e, 0x2c, 0x97, 0x7e, 0x2a, 0x81, 0x8d,
|
||||
0x93, 0x24, 0xb0, 0x79, 0x19, 0x4e, 0xc7, 0xc3, 0xc8, 0xa5, 0x38, 0xf4, 0x38, 0x0e, 0x5c, 0xbd,
|
||||
0x1a, 0xd3, 0xab, 0x9b, 0xf1, 0x30, 0x42, 0x6a, 0x4a, 0xc3, 0x64, 0xf6, 0x97, 0x06, 0xc0, 0x0d,
|
||||
0x51, 0x65, 0x6a, 0x1b, 0x07, 0x8f, 0x11, 0xe3, 0xe8, 0xab, 0x5b, 0x61, 0xb2, 0x4c, 0xd7, 0xd3,
|
||||
0x32, 0x65, 0x52, 0x8f, 0xe2, 0x34, 0x0c, 0x99, 0x1e, 0x39, 0x78, 0x5d, 0xc9, 0x4a, 0x83, 0xaf,
|
||||
0x0d, 0xa8, 0x8f, 0x49, 0xc5, 0x26, 0x69, 0x34, 0x0e, 0x9e, 0x28, 0xb2, 0xaf, 0x88, 0x2a, 0x73,
|
||||
0xd9, 0x58, 0xe1, 0x45, 0x79, 0xe1, 0x9d, 0x87, 0x8a, 0xa4, 0x64, 0xac, 0xf2, 0x62, 0x5d, 0x79,
|
||||
0x97, 0xe0, 0x14, 0xc5, 0x3e, 0x8e, 0x79, 0x38, 0x72, 0xa3, 0x24, 0x20, 0x7b, 0x04, 0x07, 0xb2,
|
||||
0xfe, 0x2a, 0xa8, 0x91, 0x4e, 0x6c, 0x6b, 0xbb, 0xfd, 0x93, 0x01, 0xf3, 0x9f, 0x8a, 0x76, 0xdb,
|
||||
0x4e, 0x02, 0xac, 0x76, 0xf6, 0xfc, 0x29, 0x71, 0x4d, 0x62, 0xd1, 0xf4, 0xa8, 0x74, 0x7d, 0xe3,
|
||||
0xd9, 0xe9, 0xca, 0x50, 0x85, 0xe9, 0x14, 0x15, 0x14, 0xab, 0xeb, 0xf8, 0x71, 0x28, 0xce, 0x85,
|
||||
0x45, 0xea, 0x12, 0xaf, 0x28, 0x0e, 0xa0, 0x36, 0x56, 0xbc, 0xa2, 0x75, 0xe9, 0x3e, 0xa7, 0xda,
|
||||
0xa3, 0x21, 0xcf, 0xe5, 0x9a, 0xb6, 0xc9, 0x93, 0xf9, 0x34, 0xcc, 0x44, 0xac, 0x97, 0xdd, 0xa6,
|
||||
0xd4, 0x40, 0x28, 0x93, 0x75, 0x40, 0xc9, 0x6d, 0x09, 0xe5, 0x86, 0xf7, 0x3e, 0x82, 0x6a, 0xf6,
|
||||
0xdf, 0x98, 0xd9, 0x80, 0xba, 0xd3, 0x76, 0x76, 0x9d, 0xb5, 0x2d, 0xe7, 0x9e, 0xd3, 0xbe, 0xd9,
|
||||
0x78, 0xc9, 0xac, 0x41, 0xf9, 0xd6, 0xf5, 0xb5, 0xad, 0xdd, 0x5b, 0x77, 0x1b, 0x86, 0x59, 0x87,
|
||||
0xca, 0xda, 0x7a, 0xfb, 0x13, 0xb4, 0xbd, 0xb6, 0xd5, 0x28, 0xac, 0x5f, 0xbf, 0xb7, 0xd1, 0x23,
|
||||
0xbc, 0x3f, 0xec, 0x0a, 0x12, 0x57, 0x9e, 0x90, 0x30, 0x24, 0x4f, 0x38, 0xf6, 0xfb, 0x2b, 0x0a,
|
||||
0xe5, 0xfb, 0x01, 0x61, 0x9c, 0x92, 0xee, 0x90, 0xe3, 0x60, 0x25, 0xc5, 0xba, 0x22, 0xa1, 0x67,
|
||||
0xc3, 0x41, 0x77, 0xb5, 0x3b, 0x2b, 0x4d, 0x57, 0xfe, 0x0c, 0x00, 0x00, 0xff, 0xff, 0xac, 0xb7,
|
||||
0x6c, 0x03, 0x41, 0x14, 0x00, 0x00,
|
||||
}
|
||||
|
@ -23,9 +23,9 @@ type Collection struct {
|
||||
partitions []*Partition
|
||||
}
|
||||
|
||||
func (c *Collection) Name() string {
|
||||
return c.schema.Name
|
||||
}
|
||||
//func (c *Collection) Name() string {
|
||||
// return c.schema.Name
|
||||
//}
|
||||
|
||||
func (c *Collection) ID() UniqueID {
|
||||
return c.id
|
||||
@ -43,9 +43,6 @@ func newCollection(collectionID UniqueID, schema *schemapb.CollectionSchema) *Co
|
||||
/*
|
||||
CCollection
|
||||
NewCollection(const char* schema_proto_blob);
|
||||
|
||||
const char*
|
||||
GetCollectionName(CCollection collection);
|
||||
*/
|
||||
schemaBlob := proto.MarshalTextString(schema)
|
||||
|
||||
|
@ -39,9 +39,8 @@ type collectionReplica interface {
|
||||
addCollection(collectionID UniqueID, schema *schemapb.CollectionSchema) error
|
||||
removeCollection(collectionID UniqueID) error
|
||||
getCollectionByID(collectionID UniqueID) (*Collection, error)
|
||||
getCollectionByName(collectionName string) (*Collection, error)
|
||||
hasCollection(collectionID UniqueID) bool
|
||||
getVecFieldsByCollectionID(collectionID UniqueID) (map[int64]string, error)
|
||||
getVecFieldsByCollectionID(collectionID UniqueID) ([]int64, error)
|
||||
|
||||
// partition
|
||||
// Partition tags in different collections are not unique,
|
||||
@ -150,19 +149,6 @@ func (colReplica *collectionReplicaImpl) getCollectionByIDPrivate(collectionID U
|
||||
return nil, errors.New("cannot find collection, id = " + strconv.FormatInt(collectionID, 10))
|
||||
}
|
||||
|
||||
func (colReplica *collectionReplicaImpl) getCollectionByName(collectionName string) (*Collection, error) {
|
||||
colReplica.mu.RLock()
|
||||
defer colReplica.mu.RUnlock()
|
||||
|
||||
for _, collection := range colReplica.collections {
|
||||
if collection.Name() == collectionName {
|
||||
return collection, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, errors.New("Cannot found collection: " + collectionName)
|
||||
}
|
||||
|
||||
func (colReplica *collectionReplicaImpl) hasCollection(collectionID UniqueID) bool {
|
||||
colReplica.mu.RLock()
|
||||
defer colReplica.mu.RUnlock()
|
||||
@ -175,7 +161,7 @@ func (colReplica *collectionReplicaImpl) hasCollection(collectionID UniqueID) bo
|
||||
return false
|
||||
}
|
||||
|
||||
func (colReplica *collectionReplicaImpl) getVecFieldsByCollectionID(collectionID UniqueID) (map[int64]string, error) {
|
||||
func (colReplica *collectionReplicaImpl) getVecFieldsByCollectionID(collectionID UniqueID) ([]int64, error) {
|
||||
colReplica.mu.RLock()
|
||||
defer colReplica.mu.RUnlock()
|
||||
|
||||
@ -184,10 +170,10 @@ func (colReplica *collectionReplicaImpl) getVecFieldsByCollectionID(collectionID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
vecFields := make(map[int64]string)
|
||||
vecFields := make([]int64, 0)
|
||||
for _, field := range col.Schema().Fields {
|
||||
if field.DataType == schemapb.DataType_VECTOR_BINARY || field.DataType == schemapb.DataType_VECTOR_FLOAT {
|
||||
vecFields[field.FieldID] = field.Name
|
||||
vecFields = append(vecFields, field.FieldID)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -9,20 +9,20 @@ import (
|
||||
//----------------------------------------------------------------------------------------------------- collection
|
||||
func TestCollectionReplica_getCollectionNum(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
assert.Equal(t, node.replica.getCollectionNum(), 1)
|
||||
node.Stop()
|
||||
}
|
||||
|
||||
func TestCollectionReplica_addCollection(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
node.Stop()
|
||||
}
|
||||
|
||||
func TestCollectionReplica_removeCollection(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
assert.Equal(t, node.replica.getCollectionNum(), 1)
|
||||
|
||||
err := node.replica.removeCollection(0)
|
||||
@ -33,37 +33,19 @@ func TestCollectionReplica_removeCollection(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_getCollectionByID(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
targetCollection, err := node.replica.getCollectionByID(collectionID)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, targetCollection)
|
||||
assert.Equal(t, targetCollection.Name(), collectionName)
|
||||
assert.Equal(t, targetCollection.ID(), collectionID)
|
||||
node.Stop()
|
||||
}
|
||||
|
||||
func TestCollectionReplica_getCollectionByName(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
|
||||
targetCollection, err := node.replica.getCollectionByName(collectionName)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, targetCollection)
|
||||
assert.Equal(t, targetCollection.Name(), collectionName)
|
||||
assert.Equal(t, targetCollection.ID(), collectionID)
|
||||
|
||||
node.Stop()
|
||||
}
|
||||
|
||||
func TestCollectionReplica_hasCollection(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
hasCollection := node.replica.hasCollection(collectionID)
|
||||
assert.Equal(t, hasCollection, true)
|
||||
@ -76,9 +58,8 @@ func TestCollectionReplica_hasCollection(t *testing.T) {
|
||||
//----------------------------------------------------------------------------------------------------- partition
|
||||
func TestCollectionReplica_getPartitionNum(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
partitionTags := []string{"a", "b", "c"}
|
||||
for _, tag := range partitionTags {
|
||||
@ -97,9 +78,8 @@ func TestCollectionReplica_getPartitionNum(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_addPartition(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
partitionTags := []string{"a", "b", "c"}
|
||||
for _, tag := range partitionTags {
|
||||
@ -114,9 +94,8 @@ func TestCollectionReplica_addPartition(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_removePartition(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
partitionTags := []string{"a", "b", "c"}
|
||||
|
||||
@ -134,11 +113,10 @@ func TestCollectionReplica_removePartition(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_addPartitionsByCollectionMeta(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
collectionMeta.PartitionTags = []string{"p0", "p1", "p2"}
|
||||
|
||||
err := node.replica.addPartitionsByCollectionMeta(collectionMeta)
|
||||
@ -158,11 +136,10 @@ func TestCollectionReplica_addPartitionsByCollectionMeta(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_removePartitionsByCollectionMeta(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
collectionMeta.PartitionTags = []string{"p0"}
|
||||
|
||||
err := node.replica.addPartitionsByCollectionMeta(collectionMeta)
|
||||
@ -183,11 +160,10 @@ func TestCollectionReplica_removePartitionsByCollectionMeta(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_getPartitionByTag(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
for _, tag := range collectionMeta.PartitionTags {
|
||||
err := node.replica.addPartition2(collectionID, tag)
|
||||
@ -202,11 +178,10 @@ func TestCollectionReplica_getPartitionByTag(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_hasPartition(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
err := node.replica.addPartition2(collectionID, collectionMeta.PartitionTags[0])
|
||||
assert.NoError(t, err)
|
||||
hasPartition := node.replica.hasPartition(collectionID, "default")
|
||||
@ -219,9 +194,8 @@ func TestCollectionReplica_hasPartition(t *testing.T) {
|
||||
//----------------------------------------------------------------------------------------------------- segment
|
||||
func TestCollectionReplica_addSegment(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
const segmentNum = 3
|
||||
tag := "default"
|
||||
@ -238,9 +212,8 @@ func TestCollectionReplica_addSegment(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_removeSegment(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
const segmentNum = 3
|
||||
tag := "default"
|
||||
@ -260,9 +233,8 @@ func TestCollectionReplica_removeSegment(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_getSegmentByID(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
const segmentNum = 3
|
||||
tag := "default"
|
||||
@ -280,9 +252,8 @@ func TestCollectionReplica_getSegmentByID(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_hasSegment(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
const segmentNum = 3
|
||||
tag := "default"
|
||||
@ -304,9 +275,8 @@ func TestCollectionReplica_hasSegment(t *testing.T) {
|
||||
|
||||
func TestCollectionReplica_freeAll(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
node.Stop()
|
||||
|
||||
|
@ -8,11 +8,10 @@ import (
|
||||
|
||||
func TestCollection_Partitions(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
collection, err := node.replica.getCollectionByName(collectionName)
|
||||
collection, err := node.replica.getCollectionByID(collectionID)
|
||||
assert.NoError(t, err)
|
||||
|
||||
partitions := collection.Partitions()
|
||||
@ -20,22 +19,18 @@ func TestCollection_Partitions(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCollection_newCollection(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
}
|
||||
|
||||
func TestCollection_deleteCollection(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
deleteCollection(collection)
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ import (
|
||||
// NOTE: start pulsar before test
|
||||
func TestDataSyncService_Start(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
// test data generate
|
||||
const msgLength = 10
|
||||
const DIM = 16
|
||||
@ -61,12 +61,12 @@ func TestDataSyncService_Start(t *testing.T) {
|
||||
Timestamp: uint64(i + 1000),
|
||||
SourceID: 0,
|
||||
},
|
||||
CollectionName: "collection0",
|
||||
PartitionName: "default",
|
||||
SegmentID: int64(0),
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000), uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i), int64(i)},
|
||||
CollectionID: UniqueID(0),
|
||||
PartitionName: "default",
|
||||
SegmentID: int64(0),
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000), uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i), int64(i)},
|
||||
RowData: []*commonpb.Blob{
|
||||
{Value: rawData},
|
||||
{Value: rawData},
|
||||
|
@ -37,7 +37,7 @@ func (ddNode *ddNode) Operate(in []*Msg) []*Msg {
|
||||
}
|
||||
|
||||
var ddMsg = ddMsg{
|
||||
collectionRecords: make(map[string][]metaOperateRecord),
|
||||
collectionRecords: make(map[UniqueID][]metaOperateRecord),
|
||||
partitionRecords: make(map[string][]metaOperateRecord),
|
||||
timeRange: TimeRange{
|
||||
timestampMin: msMsg.TimestampMin(),
|
||||
@ -108,8 +108,7 @@ func (ddNode *ddNode) createCollection(msg *msgstream.CreateCollectionMsg) {
|
||||
return
|
||||
}
|
||||
|
||||
collectionName := schema.Name
|
||||
ddNode.ddMsg.collectionRecords[collectionName] = append(ddNode.ddMsg.collectionRecords[collectionName],
|
||||
ddNode.ddMsg.collectionRecords[collectionID] = append(ddNode.ddMsg.collectionRecords[collectionID],
|
||||
metaOperateRecord{
|
||||
createOrDrop: true,
|
||||
timestamp: msg.Base.Timestamp,
|
||||
@ -125,8 +124,7 @@ func (ddNode *ddNode) dropCollection(msg *msgstream.DropCollectionMsg) {
|
||||
// return
|
||||
//}
|
||||
|
||||
collectionName := msg.CollectionName
|
||||
ddNode.ddMsg.collectionRecords[collectionName] = append(ddNode.ddMsg.collectionRecords[collectionName],
|
||||
ddNode.ddMsg.collectionRecords[collectionID] = append(ddNode.ddMsg.collectionRecords[collectionID],
|
||||
metaOperateRecord{
|
||||
createOrDrop: false,
|
||||
timestamp: msg.Base.Timestamp,
|
||||
|
@ -110,7 +110,7 @@ func (fdmNode *filterDmNode) filterInvalidInsertMessage(msg *msgstream.InsertMsg
|
||||
//}
|
||||
|
||||
// No dd record, do all insert requests.
|
||||
records, ok := fdmNode.ddMsg.collectionRecords[msg.CollectionName]
|
||||
records, ok := fdmNode.ddMsg.collectionRecords[msg.CollectionID]
|
||||
if !ok {
|
||||
return msg
|
||||
}
|
||||
|
@ -81,12 +81,7 @@ func (iNode *insertNode) Operate(in []*Msg) []*Msg {
|
||||
|
||||
// check if segment exists, if not, create this segment
|
||||
if !iNode.replica.hasSegment(task.SegmentID) {
|
||||
collection, err := iNode.replica.getCollectionByName(task.CollectionName)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
continue
|
||||
}
|
||||
err = iNode.replica.addSegment2(task.SegmentID, task.PartitionName, collection.ID(), segTypeGrowing)
|
||||
err := iNode.replica.addSegment2(task.SegmentID, task.PartitionName, task.CollectionID, segTypeGrowing)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
continue
|
||||
|
@ -14,7 +14,7 @@ type key2SegMsg struct {
|
||||
}
|
||||
|
||||
type ddMsg struct {
|
||||
collectionRecords map[string][]metaOperateRecord
|
||||
collectionRecords map[UniqueID][]metaOperateRecord
|
||||
partitionRecords map[string][]metaOperateRecord
|
||||
gcRecord *gcRecord
|
||||
timeRange TimeRange
|
||||
|
@ -29,7 +29,7 @@ func (s *Segment) buildIndex(collection *Collection) commonpb.Status {
|
||||
return commonpb.Status{ErrorCode: commonpb.ErrorCode_SUCCESS}
|
||||
}
|
||||
|
||||
func (s *Segment) dropIndex(fieldName string) commonpb.Status {
|
||||
func (s *Segment) dropIndex(fieldID int64) commonpb.Status {
|
||||
// WARN: Not support yet
|
||||
|
||||
return commonpb.Status{ErrorCode: commonpb.ErrorCode_SUCCESS}
|
||||
|
@ -51,10 +51,9 @@ func (li *LoadIndexInfo) appendIndexParam(indexKey string, indexValue string) er
|
||||
return nil
|
||||
}
|
||||
|
||||
func (li *LoadIndexInfo) appendFieldInfo(fieldName string, fieldID int64) error {
|
||||
cFieldName := C.CString(fieldName)
|
||||
func (li *LoadIndexInfo) appendFieldInfo(fieldID int64) error {
|
||||
cFieldID := C.long(fieldID)
|
||||
status := C.AppendFieldInfo(li.cLoadIndexInfo, cFieldName, cFieldID)
|
||||
status := C.AppendFieldInfo(li.cLoadIndexInfo, cFieldID)
|
||||
errorCode := status.error_code
|
||||
|
||||
if errorCode != 0 {
|
||||
|
@ -1,12 +1,64 @@
|
||||
package querynode
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/zilliztech/milvus-distributed/internal/indexnode"
|
||||
"github.com/zilliztech/milvus-distributed/internal/proto/commonpb"
|
||||
)
|
||||
|
||||
func genIndexBinarySet() ([][]byte, error) {
|
||||
const (
|
||||
msgLength = 1000
|
||||
DIM = 16
|
||||
)
|
||||
|
||||
indexParams := make(map[string]string)
|
||||
indexParams["index_type"] = "IVF_PQ"
|
||||
indexParams["index_mode"] = "cpu"
|
||||
indexParams["dim"] = "16"
|
||||
indexParams["k"] = "10"
|
||||
indexParams["nlist"] = "100"
|
||||
indexParams["nprobe"] = "10"
|
||||
indexParams["m"] = "4"
|
||||
indexParams["nbits"] = "8"
|
||||
indexParams["metric_type"] = "L2"
|
||||
indexParams["SLICE_SIZE"] = "4"
|
||||
|
||||
typeParams := make(map[string]string)
|
||||
typeParams["dim"] = strconv.Itoa(DIM)
|
||||
var indexRowData []float32
|
||||
for n := 0; n < msgLength; n++ {
|
||||
for i := 0; i < DIM; i++ {
|
||||
indexRowData = append(indexRowData, float32(n*i))
|
||||
}
|
||||
}
|
||||
|
||||
index, err := indexnode.NewCIndex(typeParams, indexParams)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = index.BuildFloatVecIndexWithoutIds(indexRowData)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// save index to minio
|
||||
binarySet, err := index.Serialize()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bytesSet := make([][]byte, 0)
|
||||
for i := range binarySet {
|
||||
bytesSet = append(bytesSet, binarySet[i].Value)
|
||||
}
|
||||
return bytesSet, nil
|
||||
}
|
||||
|
||||
func TestLoadIndexInfo(t *testing.T) {
|
||||
indexParams := make([]*commonpb.KeyValuePair, 0)
|
||||
indexParams = append(indexParams, &commonpb.KeyValuePair{
|
||||
@ -18,19 +70,21 @@ func TestLoadIndexInfo(t *testing.T) {
|
||||
Value: "cpu",
|
||||
})
|
||||
|
||||
indexBytes := make([][]byte, 0)
|
||||
indexValue := make([]byte, 10)
|
||||
indexBytes = append(indexBytes, indexValue)
|
||||
indexBytes, err := genIndexBinarySet()
|
||||
assert.NoError(t, err)
|
||||
indexPaths := make([]string, 0)
|
||||
indexPaths = append(indexPaths, "index-0")
|
||||
indexPaths = append(indexPaths, "IVF")
|
||||
|
||||
loadIndexInfo, err := newLoadIndexInfo()
|
||||
assert.Nil(t, err)
|
||||
for _, indexParam := range indexParams {
|
||||
loadIndexInfo.appendIndexParam(indexParam.Key, indexParam.Value)
|
||||
err = loadIndexInfo.appendIndexParam(indexParam.Key, indexParam.Value)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
loadIndexInfo.appendFieldInfo("field0", 0)
|
||||
loadIndexInfo.appendIndex(indexBytes, indexPaths)
|
||||
err = loadIndexInfo.appendFieldInfo(0)
|
||||
assert.NoError(t, err)
|
||||
err = loadIndexInfo.appendIndex(indexBytes, indexPaths)
|
||||
assert.NoError(t, err)
|
||||
|
||||
deleteLoadIndexInfo(loadIndexInfo)
|
||||
}
|
||||
|
@ -47,7 +47,6 @@ type loadService struct {
|
||||
type loadIndex struct {
|
||||
segmentID UniqueID
|
||||
fieldID int64
|
||||
fieldName string
|
||||
indexPaths []string
|
||||
}
|
||||
|
||||
@ -231,7 +230,7 @@ func (s *loadService) loadIndex(indexPath []string) ([][]byte, indexParam, error
|
||||
// get index params when detecting indexParamPrefix
|
||||
if path.Base(p) == storage.IndexParamsFile {
|
||||
indexCodec := storage.NewIndexCodec()
|
||||
_, indexParams, err = indexCodec.Deserialize([]*storage.Blob{
|
||||
_, indexParams, _, _, err = indexCodec.Deserialize([]*storage.Blob{
|
||||
{
|
||||
Key: storage.IndexParamsFile,
|
||||
Value: []byte(indexPiece),
|
||||
@ -262,7 +261,7 @@ func (s *loadService) updateSegmentIndex(indexParams indexParam, bytesIndex [][]
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = loadIndexInfo.appendFieldInfo(l.fieldName, l.fieldID)
|
||||
err = loadIndexInfo.appendFieldInfo(l.fieldID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -422,10 +421,9 @@ func (s *loadService) loadIndexImmediate(segment *Segment, indexPaths []string)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for id, name := range vecFieldIDs {
|
||||
for _, id := range vecFieldIDs {
|
||||
l := &loadIndex{
|
||||
segmentID: segment.ID(),
|
||||
fieldName: name,
|
||||
fieldID: id,
|
||||
indexPaths: indexPaths,
|
||||
}
|
||||
@ -449,10 +447,9 @@ func (s *loadService) loadIndexDelayed(collectionID, segmentID UniqueID, indexPa
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for id, name := range vecFieldIDs {
|
||||
for _, id := range vecFieldIDs {
|
||||
l := &loadIndex{
|
||||
segmentID: segmentID,
|
||||
fieldName: name,
|
||||
fieldID: id,
|
||||
indexPaths: indexPaths,
|
||||
}
|
||||
@ -487,10 +484,18 @@ func (s *loadService) getInsertBinlogPaths(segmentID UniqueID) ([]*internalpb2.S
|
||||
return pathResponse.Paths, pathResponse.FieldIDs, nil
|
||||
}
|
||||
|
||||
func (s *loadService) filterOutVectorFields(fieldIDs []int64, vectorFields map[int64]string) []int64 {
|
||||
func (s *loadService) filterOutVectorFields(fieldIDs []int64, vectorFields []int64) []int64 {
|
||||
containsFunc := func(s []int64, e int64) bool {
|
||||
for _, a := range s {
|
||||
if a == e {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
targetFields := make([]int64, 0)
|
||||
for _, id := range fieldIDs {
|
||||
if _, ok := vectorFields[id]; !ok {
|
||||
if !containsFunc(vectorFields, id) {
|
||||
targetFields = append(targetFields, id)
|
||||
}
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ import (
|
||||
// Timestamp: timestamps[0],
|
||||
// SourceID: 0,
|
||||
// },
|
||||
// CollectionName: "collection0",
|
||||
// CollectionID: UniqueID(collectionID),
|
||||
// PartitionName: "default",
|
||||
// SegmentID: segmentID,
|
||||
// ChannelID: "0",
|
||||
@ -425,7 +425,7 @@ import (
|
||||
// Timestamp: timestamps[0],
|
||||
// SourceID: 0,
|
||||
// },
|
||||
// CollectionName: "collection0",
|
||||
// CollectionID: UniqueID(collectionID),
|
||||
// PartitionName: "default",
|
||||
// SegmentID: segmentID,
|
||||
// ChannelID: "0",
|
||||
@ -726,7 +726,7 @@ func generateInsertBinLog(collectionID UniqueID, partitionID UniqueID, segmentID
|
||||
}
|
||||
|
||||
// buffer data to binLogs
|
||||
collMeta := genTestCollectionMeta("collection0", collectionID, false)
|
||||
collMeta := genTestCollectionMeta(collectionID, false)
|
||||
collMeta.Schema.Fields = append(collMeta.Schema.Fields, &schemapb.FieldSchema{
|
||||
FieldID: 0,
|
||||
Name: "uid",
|
||||
@ -853,7 +853,7 @@ func generateIndex(segmentID UniqueID) ([]string, error) {
|
||||
|
||||
// serialize index params
|
||||
var indexCodec storage.IndexCodec
|
||||
serializedIndexBlobs, err := indexCodec.Serialize(binarySet, indexParams)
|
||||
serializedIndexBlobs, err := indexCodec.Serialize(binarySet, indexParams, "index_test_name", 1234)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -871,7 +871,7 @@ func generateIndex(segmentID UniqueID) ([]string, error) {
|
||||
return indexPaths, nil
|
||||
}
|
||||
|
||||
func doInsert(ctx context.Context, collectionName string, partitionTag string, segmentID UniqueID) error {
|
||||
func doInsert(ctx context.Context, collectionID UniqueID, partitionTag string, segmentID UniqueID) error {
|
||||
const msgLength = 1000
|
||||
const DIM = 16
|
||||
|
||||
@ -907,12 +907,12 @@ func doInsert(ctx context.Context, collectionName string, partitionTag string, s
|
||||
Timestamp: uint64(i + 1000),
|
||||
SourceID: 0,
|
||||
},
|
||||
CollectionName: collectionName,
|
||||
PartitionName: partitionTag,
|
||||
SegmentID: segmentID,
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i)},
|
||||
CollectionID: collectionID,
|
||||
PartitionName: partitionTag,
|
||||
SegmentID: segmentID,
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i)},
|
||||
RowData: []*commonpb.Blob{
|
||||
{Value: rawData},
|
||||
},
|
||||
@ -1057,8 +1057,7 @@ func TestSegmentLoad_Search_Vector(t *testing.T) {
|
||||
ctx := node.queryNodeLoopCtx
|
||||
node.loadService = newLoadService(ctx, nil, nil, nil, node.replica, nil)
|
||||
|
||||
collectionName := "collection0"
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
err := node.replica.addPartition(collectionID, partitionID)
|
||||
assert.NoError(t, err)
|
||||
@ -1119,7 +1118,7 @@ func TestSegmentLoad_Search_Vector(t *testing.T) {
|
||||
placeholderGroups = append(placeholderGroups, holder)
|
||||
|
||||
// wait for segment building index
|
||||
time.Sleep(3 * time.Second)
|
||||
time.Sleep(1 * time.Second)
|
||||
|
||||
_, err = segment.segmentSearch(plan, placeholderGroups, []Timestamp{searchTimestamp})
|
||||
assert.Nil(t, err)
|
||||
|
@ -96,9 +96,8 @@ func TestMetaService_isSegmentChannelRangeInQueryNodeChannelRange(t *testing.T)
|
||||
}
|
||||
|
||||
func TestMetaService_printCollectionStruct(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
printCollectionStruct(collectionMeta)
|
||||
}
|
||||
|
||||
@ -156,7 +155,7 @@ func TestMetaService_processCollectionCreate(t *testing.T) {
|
||||
collectionNum := node.replica.getCollectionNum()
|
||||
assert.Equal(t, collectionNum, 1)
|
||||
|
||||
collection, err := node.replica.getCollectionByName("test")
|
||||
collection, err := node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
node.Stop()
|
||||
@ -164,9 +163,8 @@ func TestMetaService_processCollectionCreate(t *testing.T) {
|
||||
|
||||
func TestMetaService_processSegmentCreate(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
node.metaService = newMetaService(node.queryNodeLoopCtx, node.replica)
|
||||
|
||||
id := "0"
|
||||
@ -222,7 +220,7 @@ func TestMetaService_processCreate(t *testing.T) {
|
||||
collectionNum := node.replica.getCollectionNum()
|
||||
assert.Equal(t, collectionNum, 1)
|
||||
|
||||
collection, err := node.replica.getCollectionByName("test")
|
||||
collection, err := node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
|
||||
@ -242,10 +240,9 @@ func TestMetaService_processCreate(t *testing.T) {
|
||||
|
||||
func TestMetaService_processSegmentModify(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
segmentID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, segmentID)
|
||||
initTestMeta(t, node, collectionID, segmentID)
|
||||
node.metaService = newMetaService(node.queryNodeLoopCtx, node.replica)
|
||||
|
||||
id := "0"
|
||||
@ -314,7 +311,7 @@ func TestMetaService_processCollectionModify(t *testing.T) {
|
||||
collectionNum := node.replica.getCollectionNum()
|
||||
assert.Equal(t, collectionNum, 1)
|
||||
|
||||
collection, err := node.replica.getCollectionByName("test")
|
||||
collection, err := node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
|
||||
@ -363,7 +360,7 @@ func TestMetaService_processCollectionModify(t *testing.T) {
|
||||
`
|
||||
|
||||
(*node.metaService).processCollectionModify(id, newValue)
|
||||
collection, err = node.replica.getCollectionByName("test")
|
||||
collection, err = node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
|
||||
@ -422,7 +419,7 @@ func TestMetaService_processModify(t *testing.T) {
|
||||
collectionNum := node.replica.getCollectionNum()
|
||||
assert.Equal(t, collectionNum, 1)
|
||||
|
||||
collection, err := node.replica.getCollectionByName("test")
|
||||
collection, err := node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
|
||||
@ -485,7 +482,7 @@ func TestMetaService_processModify(t *testing.T) {
|
||||
`
|
||||
|
||||
(*node.metaService).processModify(key1, msg3)
|
||||
collection, err = node.replica.getCollectionByName("test")
|
||||
collection, err = node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
|
||||
@ -517,9 +514,8 @@ func TestMetaService_processModify(t *testing.T) {
|
||||
|
||||
func TestMetaService_processSegmentDelete(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
node.metaService = newMetaService(node.queryNodeLoopCtx, node.replica)
|
||||
|
||||
id := "0"
|
||||
@ -578,7 +574,7 @@ func TestMetaService_processCollectionDelete(t *testing.T) {
|
||||
collectionNum := node.replica.getCollectionNum()
|
||||
assert.Equal(t, collectionNum, 1)
|
||||
|
||||
collection, err := node.replica.getCollectionByName("test")
|
||||
collection, err := node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
|
||||
@ -626,7 +622,7 @@ func TestMetaService_processDelete(t *testing.T) {
|
||||
collectionNum := node.replica.getCollectionNum()
|
||||
assert.Equal(t, collectionNum, 1)
|
||||
|
||||
collection, err := node.replica.getCollectionByName("test")
|
||||
collection, err := node.replica.getCollectionByID(UniqueID(0))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.ID(), UniqueID(0))
|
||||
|
||||
|
@ -8,11 +8,10 @@ import (
|
||||
|
||||
func TestPartition_Segments(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
initTestMeta(t, node, collectionName, collectionID, 0)
|
||||
initTestMeta(t, node, collectionID, 0)
|
||||
|
||||
collection, err := node.replica.getCollectionByName(collectionName)
|
||||
collection, err := node.replica.getCollectionByID(collectionID)
|
||||
assert.NoError(t, err)
|
||||
|
||||
partitions := collection.Partitions()
|
||||
|
@ -12,9 +12,8 @@ import (
|
||||
)
|
||||
|
||||
func TestPlan_Plan(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
|
||||
@ -32,9 +31,8 @@ func TestPlan_Plan(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPlan_PlaceholderGroup(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
|
||||
|
@ -27,7 +27,7 @@ func setup() {
|
||||
Params.MetaRootPath = "/etcd/test/root/querynode"
|
||||
}
|
||||
|
||||
func genTestCollectionMeta(collectionName string, collectionID UniqueID, isBinary bool) *etcdpb.CollectionMeta {
|
||||
func genTestCollectionMeta(collectionID UniqueID, isBinary bool) *etcdpb.CollectionMeta {
|
||||
var fieldVec schemapb.FieldSchema
|
||||
if isBinary {
|
||||
fieldVec = schemapb.FieldSchema{
|
||||
@ -76,8 +76,9 @@ func genTestCollectionMeta(collectionName string, collectionID UniqueID, isBinar
|
||||
DataType: schemapb.DataType_INT32,
|
||||
}
|
||||
|
||||
collectionName := rand.Int63n(1000000)
|
||||
schema := schemapb.CollectionSchema{
|
||||
Name: collectionName,
|
||||
Name: "collection-" + strconv.FormatInt(collectionName, 10),
|
||||
AutoID: true,
|
||||
Fields: []*schemapb.FieldSchema{
|
||||
&fieldVec, &fieldInt,
|
||||
@ -95,19 +96,18 @@ func genTestCollectionMeta(collectionName string, collectionID UniqueID, isBinar
|
||||
return &collectionMeta
|
||||
}
|
||||
|
||||
func initTestMeta(t *testing.T, node *QueryNode, collectionName string, collectionID UniqueID, segmentID UniqueID, optional ...bool) {
|
||||
func initTestMeta(t *testing.T, node *QueryNode, collectionID UniqueID, segmentID UniqueID, optional ...bool) {
|
||||
isBinary := false
|
||||
if len(optional) > 0 {
|
||||
isBinary = optional[0]
|
||||
}
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, isBinary)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, isBinary)
|
||||
|
||||
var err = node.replica.addCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.NoError(t, err)
|
||||
|
||||
collection, err := node.replica.getCollectionByName(collectionName)
|
||||
collection, err := node.replica.getCollectionByID(collectionID)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
assert.Equal(t, node.replica.getCollectionNum(), 1)
|
||||
|
||||
|
@ -13,10 +13,9 @@ import (
|
||||
)
|
||||
|
||||
func TestReduce_AllFunc(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
segmentID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
segment := newSegment2(collection, segmentID, Params.DefaultPartitionTag, collectionID, segTypeGrowing)
|
||||
|
@ -238,14 +238,13 @@ func (ss *searchService) search(msg msgstream.TsMsg) error {
|
||||
span.LogFields(oplog.Error(err))
|
||||
return errors.New("unmarshal query failed")
|
||||
}
|
||||
collectionName := query.CollectionName
|
||||
collectionID := searchMsg.CollectionID
|
||||
partitionTagsInQuery := query.PartitionNames
|
||||
collection, err := ss.replica.getCollectionByName(collectionName)
|
||||
collection, err := ss.replica.getCollectionByID(collectionID)
|
||||
if err != nil {
|
||||
span.LogFields(oplog.Error(err))
|
||||
return err
|
||||
}
|
||||
collectionID := collection.ID()
|
||||
dsl := query.Dsl
|
||||
plan, err := createPlan(*collection, dsl)
|
||||
if err != nil {
|
||||
|
@ -20,7 +20,7 @@ import (
|
||||
|
||||
func TestSearch_Search(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
|
||||
pulsarURL := Params.PulsarAddress
|
||||
|
||||
@ -136,12 +136,12 @@ func TestSearch_Search(t *testing.T) {
|
||||
Timestamp: uint64(10 + 1000),
|
||||
SourceID: 0,
|
||||
},
|
||||
CollectionName: "collection0",
|
||||
PartitionName: "default",
|
||||
SegmentID: int64(0),
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i)},
|
||||
CollectionID: UniqueID(0),
|
||||
PartitionName: "default",
|
||||
SegmentID: int64(0),
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i)},
|
||||
RowData: []*commonpb.Blob{
|
||||
{Value: rawData},
|
||||
},
|
||||
@ -214,7 +214,7 @@ func TestSearch_Search(t *testing.T) {
|
||||
|
||||
func TestSearch_SearchMultiSegments(t *testing.T) {
|
||||
node := NewQueryNode(context.Background(), 0)
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
|
||||
pulsarURL := Params.PulsarAddress
|
||||
|
||||
@ -334,12 +334,12 @@ func TestSearch_SearchMultiSegments(t *testing.T) {
|
||||
Timestamp: uint64(i + 1000),
|
||||
SourceID: 0,
|
||||
},
|
||||
CollectionName: "collection0",
|
||||
PartitionName: "default",
|
||||
SegmentID: int64(segmentID),
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i)},
|
||||
CollectionID: UniqueID(0),
|
||||
PartitionName: "default",
|
||||
SegmentID: int64(segmentID),
|
||||
ChannelID: "0",
|
||||
Timestamps: []uint64{uint64(i + 1000)},
|
||||
RowIDs: []int64{int64(i)},
|
||||
RowData: []*commonpb.Blob{
|
||||
{Value: rawData},
|
||||
},
|
||||
|
@ -15,12 +15,10 @@ import (
|
||||
|
||||
//-------------------------------------------------------------------------------------- constructor and destructor
|
||||
func TestSegment_newSegment(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -31,12 +29,10 @@ func TestSegment_newSegment(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSegment_deleteSegment(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -49,12 +45,10 @@ func TestSegment_deleteSegment(t *testing.T) {
|
||||
|
||||
//-------------------------------------------------------------------------------------- stats functions
|
||||
func TestSegment_getRowCount(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -98,12 +92,10 @@ func TestSegment_getRowCount(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSegment_getDeletedCount(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -153,12 +145,10 @@ func TestSegment_getDeletedCount(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSegment_getMemSize(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -203,12 +193,10 @@ func TestSegment_getMemSize(t *testing.T) {
|
||||
|
||||
//-------------------------------------------------------------------------------------- dm & search functions
|
||||
func TestSegment_segmentInsert(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
segmentID := UniqueID(0)
|
||||
segment := newSegment2(collection, segmentID, Params.DefaultPartitionTag, collectionID, segTypeGrowing)
|
||||
@ -247,12 +235,10 @@ func TestSegment_segmentInsert(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSegment_segmentDelete(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -298,12 +284,10 @@ func TestSegment_segmentDelete(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSegment_segmentSearch(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -381,12 +365,10 @@ func TestSegment_segmentSearch(t *testing.T) {
|
||||
|
||||
//-------------------------------------------------------------------------------------- preDm functions
|
||||
func TestSegment_segmentPreInsert(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -421,12 +403,10 @@ func TestSegment_segmentPreInsert(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSegment_segmentPreDelete(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
@ -470,12 +450,10 @@ func TestSegment_segmentPreDelete(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSegment_segmentLoadFieldData(t *testing.T) {
|
||||
collectionName := "collection0"
|
||||
collectionID := UniqueID(0)
|
||||
collectionMeta := genTestCollectionMeta(collectionName, collectionID, false)
|
||||
collectionMeta := genTestCollectionMeta(collectionID, false)
|
||||
|
||||
collection := newCollection(collectionMeta.ID, collectionMeta.Schema)
|
||||
assert.Equal(t, collection.Name(), collectionName)
|
||||
assert.Equal(t, collection.ID(), collectionID)
|
||||
|
||||
segmentID := UniqueID(0)
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
// NOTE: start pulsar before test
|
||||
func TestStatsService_start(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
node.statsService = newStatsService(node.queryNodeLoopCtx, node.replica, nil)
|
||||
node.statsService.start()
|
||||
node.Stop()
|
||||
@ -19,7 +19,7 @@ func TestStatsService_start(t *testing.T) {
|
||||
//NOTE: start pulsar before test
|
||||
func TestSegmentManagement_sendSegmentStatistic(t *testing.T) {
|
||||
node := newQueryNodeMock()
|
||||
initTestMeta(t, node, "collection0", 0, 0)
|
||||
initTestMeta(t, node, 0, 0)
|
||||
|
||||
const receiveBufSize = 1024
|
||||
// start pulsar
|
||||
|
@ -635,8 +635,16 @@ func NewIndexCodec() *IndexCodec {
|
||||
return &IndexCodec{}
|
||||
}
|
||||
|
||||
func (indexCodec *IndexCodec) Serialize(blobs []*Blob, params map[string]string) ([]*Blob, error) {
|
||||
paramsBytes, err := json.Marshal(params)
|
||||
func (indexCodec *IndexCodec) Serialize(blobs []*Blob, params map[string]string, indexName string, indexID UniqueID) ([]*Blob, error) {
|
||||
paramsBytes, err := json.Marshal(struct {
|
||||
Params map[string]string
|
||||
IndexName string
|
||||
IndexID UniqueID
|
||||
}{
|
||||
Params: params,
|
||||
IndexName: indexName,
|
||||
IndexID: indexID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -644,20 +652,27 @@ func (indexCodec *IndexCodec) Serialize(blobs []*Blob, params map[string]string)
|
||||
return blobs, nil
|
||||
}
|
||||
|
||||
func (indexCodec *IndexCodec) Deserialize(blobs []*Blob) ([]*Blob, map[string]string, error) {
|
||||
var params map[string]string
|
||||
func (indexCodec *IndexCodec) Deserialize(blobs []*Blob) ([]*Blob, map[string]string, string, UniqueID, error) {
|
||||
var file *Blob
|
||||
for i := 0; i < len(blobs); i++ {
|
||||
if blobs[i].Key != IndexParamsFile {
|
||||
continue
|
||||
}
|
||||
if err := json.Unmarshal(blobs[i].Value, ¶ms); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
file = blobs[i]
|
||||
blobs = append(blobs[:i], blobs[i+1:]...)
|
||||
break
|
||||
}
|
||||
if params == nil {
|
||||
return nil, nil, errors.New("can not find params blob")
|
||||
if file == nil {
|
||||
return nil, nil, "", -1, errors.New("can not find params blob")
|
||||
}
|
||||
return blobs, params, nil
|
||||
info := struct {
|
||||
Params map[string]string
|
||||
IndexName string
|
||||
IndexID UniqueID
|
||||
}{}
|
||||
if err := json.Unmarshal(file.Value, &info); err != nil {
|
||||
return nil, nil, "", -1, errors.New("json unmarshal error: " + err.Error())
|
||||
}
|
||||
|
||||
return blobs, info.Params, info.IndexName, info.IndexID, nil
|
||||
}
|
||||
|
@ -310,15 +310,17 @@ func TestIndexCodec(t *testing.T) {
|
||||
indexParams := map[string]string{
|
||||
"k1": "v1", "k2": "v2",
|
||||
}
|
||||
blobsInput, err := indexCodec.Serialize(blobs, indexParams)
|
||||
blobsInput, err := indexCodec.Serialize(blobs, indexParams, "index_test_name", 1234)
|
||||
assert.Nil(t, err)
|
||||
assert.EqualValues(t, 4, len(blobsInput))
|
||||
assert.EqualValues(t, IndexParamsFile, blobsInput[3])
|
||||
blobsOutput, indexParamsOutput, err := indexCodec.Deserialize(blobsInput)
|
||||
assert.EqualValues(t, IndexParamsFile, blobsInput[3].Key)
|
||||
blobsOutput, indexParamsOutput, indexName, indexID, err := indexCodec.Deserialize(blobsInput)
|
||||
assert.Nil(t, err)
|
||||
assert.EqualValues(t, 3, len(blobsOutput))
|
||||
for i := 0; i < 3; i++ {
|
||||
assert.EqualValues(t, blobs[i], blobsOutput[i])
|
||||
}
|
||||
assert.EqualValues(t, indexParams, indexParamsOutput)
|
||||
assert.EqualValues(t, "index_test_name", indexName)
|
||||
assert.EqualValues(t, 1234, indexID)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user