mirror of
https://gitee.com/milvus-io/milvus.git
synced 2024-11-30 02:48:45 +08:00
fix the spelling of field
(#25008)
Signed-off-by: PowderLi <min.li@zilliz.com>
This commit is contained in:
parent
f56c65efa8
commit
3f4356df10
@ -237,7 +237,7 @@ class IndexingRecord {
|
||||
if (field_meta.get_data_type() == DataType::VECTOR_BINARY) {
|
||||
continue;
|
||||
}
|
||||
//Small-Index disabled, create index for vector filed only
|
||||
//Small-Index disabled, create index for vector field only
|
||||
if (index_meta_->GetIndexMaxRowCount() > 0 &&
|
||||
index_meta_->HasFiled(field_id)) {
|
||||
field_indexings_.try_emplace(
|
||||
|
@ -1917,7 +1917,7 @@ TEST(CApiTest, Indexing_Without_Predicate) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::FloatVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -2064,7 +2064,7 @@ TEST(CApiTest, Indexing_Expr_Without_Predicate) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::FloatVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -2227,7 +2227,7 @@ TEST(CApiTest, Indexing_With_float_Predicate_Range) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::FloatVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -2403,7 +2403,7 @@ TEST(CApiTest, Indexing_Expr_With_float_Predicate_Range) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::FloatVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -2563,7 +2563,7 @@ TEST(CApiTest, Indexing_With_float_Predicate_Term) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::FloatVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -2732,7 +2732,7 @@ TEST(CApiTest, Indexing_Expr_With_float_Predicate_Term) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::FloatVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -2893,7 +2893,7 @@ TEST(CApiTest, Indexing_With_binary_Predicate_Range) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::BinaryVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -3068,7 +3068,7 @@ TEST(CApiTest, Indexing_Expr_With_binary_Predicate_Range) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::BinaryVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -3229,7 +3229,7 @@ TEST(CApiTest, Indexing_With_binary_Predicate_Term) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::BinaryVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -3420,7 +3420,7 @@ TEST(CApiTest, Indexing_Expr_With_binary_Predicate_Term) {
|
||||
AppendFieldInfo(c_load_index_info, 0, 0, 0, 100, CDataType::BinaryVector);
|
||||
AppendIndex(c_load_index_info, (CBinarySet)&binary_set);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
@ -3643,7 +3643,7 @@ TEST(CApiTest, SealedSegment_search_float_Predicate_Range) {
|
||||
status = LoadFieldData(segment, c_ts_field_data);
|
||||
ASSERT_EQ(status.error_code, Success);
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(FieldId(100));
|
||||
sealed_segment->LoadIndex(*(LoadIndexInfo*)c_load_index_info);
|
||||
|
@ -120,7 +120,7 @@ TEST(Sealed, without_predicate) {
|
||||
load_info.index = std::move(indexing);
|
||||
load_info.index_params["metric_type"] = "L2";
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(fake_id);
|
||||
sealed_segment->LoadIndex(load_info);
|
||||
@ -236,7 +236,7 @@ TEST(Sealed, with_predicate) {
|
||||
load_info.index = std::move(indexing);
|
||||
load_info.index_params["metric_type"] = "L2";
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto sealed_segment = SealedCreator(schema, dataset);
|
||||
sealed_segment->DropFieldData(fake_id);
|
||||
sealed_segment->LoadIndex(load_info);
|
||||
@ -329,7 +329,7 @@ TEST(Sealed, with_predicate_filter_all) {
|
||||
load_info.index = std::move(ivf_indexing);
|
||||
load_info.index_params["metric_type"] = "L2";
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto ivf_sealed_segment = SealedCreator(schema, dataset);
|
||||
ivf_sealed_segment->DropFieldData(fake_id);
|
||||
ivf_sealed_segment->LoadIndex(load_info);
|
||||
@ -361,7 +361,7 @@ TEST(Sealed, with_predicate_filter_all) {
|
||||
hnsw_load_info.index = std::move(hnsw_indexing);
|
||||
hnsw_load_info.index_params["metric_type"] = "L2";
|
||||
|
||||
// load index for vec field, load raw data for scalar filed
|
||||
// load index for vec field, load raw data for scalar field
|
||||
auto hnsw_sealed_segment = SealedCreator(schema, dataset);
|
||||
hnsw_sealed_segment->DropFieldData(fake_id);
|
||||
hnsw_sealed_segment->LoadIndex(hnsw_load_info);
|
||||
|
@ -353,7 +353,7 @@ func (t *compactionTask) merge(
|
||||
}
|
||||
|
||||
if pkField == nil {
|
||||
log.Warn("failed to get pk filed from schema")
|
||||
log.Warn("failed to get pk field from schema")
|
||||
return nil, nil, 0, fmt.Errorf("no pk field in schema")
|
||||
}
|
||||
|
||||
|
@ -369,7 +369,7 @@ func (m *rendezvousFlushManager) serializePkStatsLog(segmentID int64, flushed bo
|
||||
|
||||
pkField := getPKField(inCodec.Schema)
|
||||
if pkField == nil {
|
||||
log.Error("No pk filed in schema", zap.Int64("segmentID", segmentID), zap.Int64("collectionID", inCodec.Schema.GetID()))
|
||||
log.Error("No pk field in schema", zap.Int64("segmentID", segmentID), zap.Int64("collectionID", inCodec.Schema.GetID()))
|
||||
return nil, nil, fmt.Errorf("no primary key in meta")
|
||||
}
|
||||
|
||||
|
@ -33,6 +33,7 @@ import (
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||
"github.com/milvus-io/milvus/internal/datanode/allocator"
|
||||
"github.com/milvus-io/milvus/internal/proto/datapb"
|
||||
"github.com/milvus-io/milvus/internal/proto/etcdpb"
|
||||
"github.com/milvus-io/milvus/internal/storage"
|
||||
"github.com/milvus-io/milvus/pkg/util/retry"
|
||||
)
|
||||
@ -206,6 +207,9 @@ func TestRendezvousFlushManager(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
assert.Eventually(t, func() bool { return counter.Load() == int64(size) }, 3*time.Second, 100*time.Millisecond)
|
||||
|
||||
_, _, err := m.serializePkStatsLog(0, false, nil, &storage.InsertCodec{Schema: &etcdpb.CollectionMeta{Schema: &schemapb.CollectionSchema{}, ID: 0}})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestRendezvousFlushManager_Inject(t *testing.T) {
|
||||
|
@ -2002,7 +2002,7 @@ func TestSearchTask_Requery(t *testing.T) {
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("Test get pk filed data failed", func(t *testing.T) {
|
||||
t.Run("Test get pk field data failed", func(t *testing.T) {
|
||||
schema := constructCollectionSchema(pkField, vecField, dim, collection)
|
||||
node := mocks.NewProxy(t)
|
||||
node.EXPECT().Query(mock.Anything, mock.Anything).
|
||||
|
@ -18,7 +18,7 @@ package rootcoord
|
||||
|
||||
import "github.com/milvus-io/milvus/pkg/common"
|
||||
|
||||
// system filed id:
|
||||
// system field id:
|
||||
// 0: unique row id
|
||||
// 1: timestamp
|
||||
// 100: first user field id
|
||||
@ -26,7 +26,7 @@ import "github.com/milvus-io/milvus/pkg/common"
|
||||
// 102: ...
|
||||
|
||||
const (
|
||||
// StartOfUserFieldID id of user defined filed begin from here
|
||||
// StartOfUserFieldID id of user defined field begin from here
|
||||
StartOfUserFieldID = common.StartOfUserFieldID
|
||||
|
||||
// RowIDField id of row ID field
|
||||
|
@ -52,7 +52,7 @@ type (
|
||||
// UniqueID is type alias of typeutil.UniqueID
|
||||
UniqueID = typeutil.UniqueID
|
||||
|
||||
// FieldID represent the identity number of filed in collection and its type is UniqueID
|
||||
// FieldID represent the identity number of field in collection and its type is UniqueID
|
||||
FieldID = typeutil.UniqueID
|
||||
|
||||
// Timestamp is type alias of typeutil.Timestamp
|
||||
@ -266,7 +266,7 @@ func (data *FloatVectorFieldData) GetMemorySize() int {
|
||||
return binary.Size(data.Data) + 4
|
||||
}
|
||||
|
||||
// system filed id:
|
||||
// system field id:
|
||||
// 0: unique row id
|
||||
// 1: timestamp
|
||||
// 100: first user field id
|
||||
|
@ -18,7 +18,7 @@ package common
|
||||
|
||||
import "encoding/binary"
|
||||
|
||||
// system filed id:
|
||||
// system field id:
|
||||
// 0: unique row id
|
||||
// 1: timestamp
|
||||
// 100: first user field id
|
||||
|
@ -52,7 +52,7 @@ func GetPrivilegeExtObj(m proto.GeneratedMessage) (commonpb.PrivilegeExt, error)
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetObjectName get object name from the grpc message according to the filed index. The filed is a string.
|
||||
// GetObjectName get object name from the grpc message according to the field index. The field is a string.
|
||||
func GetObjectName(m proto.GeneratedMessage, index int32) string {
|
||||
if index <= 0 {
|
||||
return util.AnyWord
|
||||
@ -71,7 +71,7 @@ func GetObjectName(m proto.GeneratedMessage, index int32) string {
|
||||
return value.String()
|
||||
}
|
||||
|
||||
// GetObjectNames get object names from the grpc message according to the filed index. The filed is an array.
|
||||
// GetObjectNames get object names from the grpc message according to the field index. The field is an array.
|
||||
func GetObjectNames(m proto.GeneratedMessage, index int32) []string {
|
||||
if index <= 0 {
|
||||
return []string{}
|
||||
|
@ -1077,7 +1077,7 @@ func TestCalcColumnSize(t *testing.T) {
|
||||
func TestGetDataAndGetDataSize(t *testing.T) {
|
||||
const (
|
||||
Dim = 8
|
||||
fieldName = "filed-0"
|
||||
fieldName = "field-0"
|
||||
fieldID = 0
|
||||
)
|
||||
|
||||
|
@ -80,7 +80,7 @@ insert_performance:
|
||||
The other parts in the test yaml is the params pass to the runner, such as:
|
||||
- The field `collection_name` means which kind of collection will be created in milvus
|
||||
- The field `ni_per` means the batch size
|
||||
- The filed `build_index` means that whether to create index during inserting
|
||||
- The field `build_index` means that whether to create index during inserting
|
||||
|
||||
While using argo workflow as benchmark pipeline, the test suite is made of both `client` and `server` configmap, an example:
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user