Merge remote-tracking branch 'upstream/branch-0.3.1' into branch-0.3.1

Former-commit-id: 59c36121db32e51794f1798160d90287bfb823b7
This commit is contained in:
kun yu 2019-07-22 10:33:10 +08:00
commit 4a967676fd
61 changed files with 1920 additions and 923 deletions

View File

@ -5,15 +5,17 @@ try {
dir ("milvus-helm") {
checkout([$class: 'GitSCM', branches: [[name: "${SEMVER}"]], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: "${params.GIT_USER}", url: "git@192.168.1.105:megasearch/milvus-helm.git", name: 'origin', refspec: "+refs/heads/${SEMVER}:refs/remotes/origin/${SEMVER}"]]])
dir ("milvus/milvus-cluster") {
sh "helm install --set roServers.image.tag=${DOCKER_VERSION} --set woServers.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP -f ci/values.yaml --name ${env.JOB_NAME}-${env.BUILD_NUMBER}-cluster --namespace milvus-cluster --version 0.3.1 . "
sh "helm install --wait --timeout 300 --set roServers.image.tag=${DOCKER_VERSION} --set woServers.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP -f ci/values.yaml --name ${env.JOB_NAME}-${env.BUILD_NUMBER}-cluster --namespace milvus-cluster --version 0.3.1 . "
}
}
/*
timeout(time: 2, unit: 'MINUTES') {
waitUntil {
def result = sh script: "nc -z -w 3 ${env.JOB_NAME}-${env.BUILD_NUMBER}-cluster-milvus-cluster-proxy.milvus-cluster.svc.cluster.local 19530", returnStatus: true
return !result
}
}
*/
} catch (exc) {
echo 'Helm running failed!'
sh "helm del --purge ${env.JOB_NAME}-${env.BUILD_NUMBER}-cluster"

View File

@ -2,7 +2,7 @@ timeout(time: 10, unit: 'MINUTES') {
try {
dir ("${PROJECT_NAME}_test") {
checkout([$class: 'GitSCM', branches: [[name: "${SEMVER}"]], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: "${params.GIT_USER}", url: "git@192.168.1.105:Test/milvus_test.git", name: 'origin', refspec: "+refs/heads/${SEMVER}:refs/remotes/origin/${SEMVER}"]]])
sh 'python3 -m pip install -r requirements.txt'
sh 'python3 -m pip install -r requirements_cluster.txt'
sh "pytest . --alluredir=cluster_test_out --ip ${env.JOB_NAME}-${env.BUILD_NUMBER}-cluster-milvus-cluster-proxy.milvus-cluster.svc.cluster.local"
}
} catch (exc) {

View File

@ -5,15 +5,17 @@ try {
dir ("milvus-helm") {
checkout([$class: 'GitSCM', branches: [[name: "${SEMVER}"]], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: "${params.GIT_USER}", url: "git@192.168.1.105:megasearch/milvus-helm.git", name: 'origin', refspec: "+refs/heads/${SEMVER}:refs/remotes/origin/${SEMVER}"]]])
dir ("milvus/milvus-gpu") {
sh "helm install --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.JOB_NAME}-${env.BUILD_NUMBER} --version 0.3.1 ."
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.JOB_NAME}-${env.BUILD_NUMBER} -f ci/values.yaml --namespace milvus-1 --version 0.3.1 ."
}
}
/*
timeout(time: 2, unit: 'MINUTES') {
waitUntil {
def result = sh script: "nc -z -w 3 ${env.JOB_NAME}-${env.BUILD_NUMBER}-milvus-gpu-engine.kube-opt.svc.cluster.local 19530", returnStatus: true
return !result
}
}
*/
} catch (exc) {
echo 'Helm running failed!'
sh "helm del --purge ${env.JOB_NAME}-${env.BUILD_NUMBER}"

View File

@ -1,10 +1,27 @@
timeout(time: 10, unit: 'MINUTES') {
timeout(time: 20, unit: 'MINUTES') {
try {
dir ("${PROJECT_NAME}_test") {
checkout([$class: 'GitSCM', branches: [[name: "${SEMVER}"]], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: "${params.GIT_USER}", url: "git@192.168.1.105:Test/milvus_test.git", name: 'origin', refspec: "+refs/heads/${SEMVER}:refs/remotes/origin/${SEMVER}"]]])
sh 'python3 -m pip install -r requirements.txt'
sh "pytest . --alluredir=test_out --ip ${env.JOB_NAME}-${env.BUILD_NUMBER}-milvus-gpu-engine.kube-opt.svc.cluster.local"
}
// mysql database backend test
load "${env.WORKSPACE}/ci/jenkinsfile/cleanup_dev.groovy"
if (!fileExists('milvus-helm')) {
dir ("milvus-helm") {
checkout([$class: 'GitSCM', branches: [[name: "${SEMVER}"]], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: "${params.GIT_USER}", url: "git@192.168.1.105:megasearch/milvus-helm.git", name: 'origin', refspec: "+refs/heads/${SEMVER}:refs/remotes/origin/${SEMVER}"]]])
}
}
dir ("milvus-helm") {
dir ("milvus/milvus-gpu") {
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.JOB_NAME}-${env.BUILD_NUMBER} -f ci/db_backend/mysql_values.yaml --namespace milvus-2 --version 0.3.1 ."
}
}
dir ("${PROJECT_NAME}_test") {
sh "pytest . --alluredir=test_out --ip ${env.JOB_NAME}-${env.BUILD_NUMBER}-milvus-gpu-engine.kube-opt.svc.cluster.local"
}
} catch (exc) {
echo 'Milvus Test Failed !'
throw exc

View File

@ -35,7 +35,7 @@ pipeline {
defaultContainer 'jnlp'
containerTemplate {
name 'milvus-build-env'
image 'registry.zilliz.com/milvus/milvus-build-env:v0.10'
image 'registry.zilliz.com/milvus/milvus-build-env:v0.11'
ttyEnabled true
command 'cat'
}

View File

@ -35,7 +35,7 @@ pipeline {
defaultContainer 'jnlp'
containerTemplate {
name 'milvus-build-env'
image 'registry.zilliz.com/milvus/milvus-build-env:v0.10'
image 'registry.zilliz.com/milvus/milvus-build-env:v0.11'
ttyEnabled true
command 'cat'
}

View File

@ -35,7 +35,7 @@ pipeline {
defaultContainer 'jnlp'
containerTemplate {
name 'milvus-build-env'
image 'registry.zilliz.com/milvus/milvus-build-env:v0.10'
image 'registry.zilliz.com/milvus/milvus-build-env:v0.11'
ttyEnabled true
command 'cat'
}

View File

@ -17,6 +17,10 @@ Please mark all change in change log and use the ticket from JIRA.
- MS-232 - Add MySQLMetaImpl::UpdateTableFilesToIndex and set maximum_memory to default if config value = 0
- MS-233 - Remove mem manager log
- MS-230 - Change parameter name: Maximum_memory to insert_buffer_size
- MS-234 - Some case cause background merge thread stop
- MS-235 - Some test cases random fail
- MS-236 - Add MySQLMetaImpl::HasNonIndexFiles
- MS-257 - Update bzip2 download url
## Improvement
- MS-156 - Add unittest for merge result functions
@ -25,6 +29,13 @@ Please mark all change in change log and use the ticket from JIRA.
- MS-206 - Support SQ8 index type
- MS-208 - Add buildinde interface for C++ SDK
- MS-212 - Support Inner product metric type
- MS-241 - Build Faiss with MKL if using Intel CPU; else build with OpenBlas
- MS-242 - Clean up cmake and change MAKE_BUILD_ARGS to be user defined variable
- MS-245 - Improve search result transfer performance
- MS-248 - Support AddVector/SearchVector profiling
- MS-256 - Add more cache config
- MS-260 - Refine log
- MS-261 - update faiss version to 1.5.3 and add BUILD_FAISS_WITH_MKL as an option
## New Feature
- MS-180 - Add new mem manager

View File

@ -116,6 +116,11 @@ set(MILVUS_ENGINE_SRC ${PROJECT_SOURCE_DIR}/src)
add_compile_definitions(PROFILER=${PROFILER})
message("MILVUS_ENABLE_PROFILING = ${MILVUS_ENABLE_PROFILING}")
if (MILVUS_ENABLE_PROFILING STREQUAL "ON")
ADD_DEFINITIONS(-DMILVUS_ENABLE_PROFILING)
endif()
include_directories(${MILVUS_ENGINE_INCLUDE})
include_directories(${MILVUS_ENGINE_SRC})

View File

@ -1,13 +1,13 @@
### Compilation
#### Step 1: install necessery tools
Install MySQL
centos7 :
yum install gfortran qt4 flex bison mysql-devel
yum install gfortran qt4 flex bison mysql-devel mysql
ubuntu16.04 :
sudo apt-get install gfortran qt4-qmake flex bison libmysqlclient-dev
sudo apt-get install gfortran qt4-qmake flex bison libmysqlclient-dev mysql-client
cd scripts && sudo ./requirements.sh
If `libmysqlclient_r.so` does not exist after installing MySQL Development Files, you need to create a symbolic link:

View File

@ -7,8 +7,10 @@ INSTALL_PREFIX=$(pwd)/milvus
MAKE_CLEAN="OFF"
BUILD_COVERAGE="OFF"
DB_PATH="/opt/milvus"
PROFILING="OFF"
BUILD_FAISS_WITH_MKL="OFF"
while getopts "p:d:t:uhlrc" arg
while getopts "p:d:t:uhlrcgm" arg
do
case $arg in
t)
@ -36,6 +38,12 @@ do
c)
BUILD_COVERAGE="ON"
;;
g)
PROFILING="ON"
;;
m)
BUILD_FAISS_WITH_MKL="ON"
;;
h) # help
echo "
@ -47,9 +55,11 @@ parameter:
-l: build license version(default: OFF)
-r: remove previous build directory(default: OFF)
-c: code coverage(default: OFF)
-g: profiling(default: OFF)
-m: build faiss with MKL(default: OFF)
usage:
./build.sh -t \${BUILD_TYPE} [-u] [-h] [-g] [-r] [-c]
./build.sh -t \${BUILD_TYPE} [-u] [-h] [-g] [-r] [-c] [-m]
"
exit 0
;;
@ -77,6 +87,8 @@ if [[ ${MAKE_CLEAN} == "ON" ]]; then
-DCMAKE_LICENSE_CHECK=${LICENSE_CHECK} \
-DBUILD_COVERAGE=${BUILD_COVERAGE} \
-DMILVUS_DB_PATH=${DB_PATH} \
-DMILVUS_ENABLE_PROFILING=${PROFILING} \
-DBUILD_FAISS_WITH_MKL=${BUILD_FAISS_WITH_MKL} \
$@ ../"
echo ${CMAKE_CMD}

View File

@ -57,8 +57,6 @@ define_option(MILVUS_VERBOSE_THIRDPARTY_BUILD
define_option(MILVUS_WITH_ARROW "Build with ARROW" OFF)
define_option(MILVUS_BOOST_USE_SHARED "Rely on boost shared libraries where relevant" OFF)
define_option(MILVUS_BOOST_VENDORED "Use vendored Boost instead of existing Boost. \
Note that this requires linking Boost statically" ON)
@ -111,6 +109,11 @@ define_option(MILVUS_WITH_ZSTD "Build with zstd compression" ${MILVUS_WITH_ZSTD_
define_option(MILVUS_WITH_AWS "Build with AWS SDK" ON)
if (MILVUS_ENABLE_PROFILING STREQUAL "ON")
define_option(MILVUS_WITH_LIBUNWIND "Build with libunwind" ON)
define_option(MILVUS_WITH_GPERFTOOLS "Build with gperftools" ON)
endif()
#----------------------------------------------------------------------
if(MSVC)
set_option_category("MSVC")

View File

@ -5,7 +5,6 @@
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
@ -37,7 +36,9 @@ set(MILVUS_THIRDPARTY_DEPENDENCIES
yaml-cpp
ZLIB
ZSTD
AWS)
AWS
libunwind
gperftools)
message(STATUS "Using ${MILVUS_DEPENDENCY_SOURCE} approach to find dependencies")
@ -89,6 +90,10 @@ macro(build_dependency DEPENDENCY_NAME)
build_zstd()
elseif("${DEPENDENCY_NAME}" STREQUAL "AWS")
build_aws()
elseif("${DEPENDENCY_NAME}" STREQUAL "libunwind")
build_libunwind()
elseif("${DEPENDENCY_NAME}" STREQUAL "gperftools")
build_gperftools()
else()
message(FATAL_ERROR "Unknown thirdparty dependency to build: ${DEPENDENCY_NAME}")
endif ()
@ -96,12 +101,8 @@ endmacro()
macro(resolve_dependency DEPENDENCY_NAME)
if (${DEPENDENCY_NAME}_SOURCE STREQUAL "AUTO")
#message(STATUS "Finding ${DEPENDENCY_NAME} package")
# find_package(${DEPENDENCY_NAME} QUIET)
# if (NOT ${DEPENDENCY_NAME}_FOUND)
#message(STATUS "${DEPENDENCY_NAME} package not found")
#disable find_package for now
build_dependency(${DEPENDENCY_NAME})
# endif ()
elseif (${DEPENDENCY_NAME}_SOURCE STREQUAL "BUNDLED")
build_dependency(${DEPENDENCY_NAME})
elseif (${DEPENDENCY_NAME}_SOURCE STREQUAL "SYSTEM")
@ -117,11 +118,9 @@ string(TOUPPER ${CMAKE_BUILD_TYPE} UPPERCASE_BUILD_TYPE)
set(EP_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_${UPPERCASE_BUILD_TYPE}}")
set(EP_C_FLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${UPPERCASE_BUILD_TYPE}}")
if(NOT MSVC)
# Set -fPIC on all external projects
set(EP_CXX_FLAGS "${EP_CXX_FLAGS} -fPIC")
set(EP_C_FLAGS "${EP_C_FLAGS} -fPIC")
endif()
# Set -fPIC on all external projects
set(EP_CXX_FLAGS "${EP_CXX_FLAGS} -fPIC")
set(EP_C_FLAGS "${EP_C_FLAGS} -fPIC")
# CC/CXX environment variables are captured on the first invocation of the
# builder (e.g make or ninja) instead of when CMake is invoked into to build
@ -159,20 +158,13 @@ endif()
# Ensure that a default make is set
if("${MAKE}" STREQUAL "")
if(NOT MSVC)
find_program(MAKE make)
endif()
find_program(MAKE make)
endif()
set(MAKE_BUILD_ARGS "-j2")
## Using make -j in sub-make is fragile
#if(${CMAKE_GENERATOR} MATCHES "Makefiles")
# set(MAKE_BUILD_ARGS "")
#else()
# # limit the maximum number of jobs for ninja
# set(MAKE_BUILD_ARGS "-j4")
#endif()
if (NOT DEFINED MAKE_BUILD_ARGS)
set(MAKE_BUILD_ARGS "-j8")
endif()
message(STATUS "Third Party MAKE_BUILD_ARGS = ${MAKE_BUILD_ARGS}")
# ----------------------------------------------------------------------
# Find pthreads
@ -227,7 +219,7 @@ endif()
if(DEFINED ENV{MILVUS_BZIP2_URL})
set(BZIP2_SOURCE_URL "$ENV{MILVUS_BZIP2_URL}")
else()
set(BZIP2_SOURCE_URL "https://fossies.org/linux/misc/bzip2-${BZIP2_VERSION}.tar.gz")
set(BZIP2_SOURCE_URL "https://sourceware.org/pub/bzip2/bzip2-${BZIP2_VERSION}.tar.gz")
endif()
if(DEFINED ENV{MILVUS_EASYLOGGINGPP_URL})
@ -285,7 +277,6 @@ if (DEFINED ENV{MILVUS_PROMETHEUS_URL})
set(PROMETHEUS_SOURCE_URL "$ENV{PROMETHEUS_OPENBLAS_URL}")
else ()
set(PROMETHEUS_SOURCE_URL
#"https://github.com/JinHai-CN/prometheus-cpp/archive/${PROMETHEUS_VERSION}.tar.gz"
https://github.com/jupp0r/prometheus-cpp.git)
endif()
@ -347,6 +338,21 @@ if(DEFINED ENV{MILVUS_AWS_URL})
else()
set(AWS_SOURCE_URL "https://github.com/aws/aws-sdk-cpp/archive/${AWS_VERSION}.tar.gz")
endif()
if(DEFINED ENV{MILVUS_LIBUNWIND_URL})
set(LIBUNWIND_SOURCE_URL "$ENV{MILVUS_LIBUNWIND_URL}")
else()
set(LIBUNWIND_SOURCE_URL
"https://github.com/libunwind/libunwind/releases/download/v${LIBUNWIND_VERSION}/libunwind-${LIBUNWIND_VERSION}.tar.gz")
endif()
if(DEFINED ENV{MILVUS_GPERFTOOLS_URL})
set(GPERFTOOLS_SOURCE_URL "$ENV{MILVUS_GPERFTOOLS_URL}")
else()
set(GPERFTOOLS_SOURCE_URL
"https://github.com/gperftools/gperftools/releases/download/gperftools-${GPERFTOOLS_VERSION}/gperftools-${GPERFTOOLS_VERSION}.tar.gz")
endif()
# ----------------------------------------------------------------------
# ARROW
@ -354,19 +360,13 @@ macro(build_arrow)
message(STATUS "Building Apache ARROW-${ARROW_VERSION} from source")
set(ARROW_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/arrow_ep-prefix/src/arrow_ep/cpp")
set(ARROW_STATIC_LIB_NAME arrow)
# set(ARROW_CUDA_STATIC_LIB_NAME arrow_cuda)
set(ARROW_STATIC_LIB
"${ARROW_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}${ARROW_STATIC_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}"
)
# set(ARROW_CUDA_STATIC_LIB
# "${ARROW_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}${ARROW_CUDA_STATIC_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}"
# )
set(ARROW_INCLUDE_DIR "${ARROW_PREFIX}/include")
set(ARROW_CMAKE_ARGS
${EP_COMMON_CMAKE_ARGS}
# "-DARROW_THRIFT_URL=${THRIFT_SOURCE_URL}"
#"env ARROW_THRIFT_URL=${THRIFT_SOURCE_URL}"
-DARROW_BUILD_STATIC=ON
-DARROW_BUILD_SHARED=OFF
-DARROW_PARQUET=ON
@ -375,8 +375,6 @@ macro(build_arrow)
"-DCMAKE_LIBRARY_PATH=${CUDA_TOOLKIT_ROOT_DIR}/lib64/stubs"
-DCMAKE_BUILD_TYPE=Release)
# set($ENV{ARROW_THRIFT_URL} ${THRIFT_SOURCE_URL})
externalproject_add(arrow_ep
GIT_REPOSITORY
${ARROW_SOURCE_URL}
@ -384,14 +382,8 @@ macro(build_arrow)
${ARROW_VERSION}
GIT_SHALLOW
TRUE
# SOURCE_DIR
# ${ARROW_PREFIX}
# BINARY_DIR
# ${ARROW_PREFIX}
SOURCE_SUBDIR
cpp
# COMMAND
# "export \"ARROW_THRIFT_URL=${THRIFT_SOURCE_URL}\""
${EP_LOG_OPTIONS}
CMAKE_ARGS
${ARROW_CMAKE_ARGS}
@ -400,21 +392,16 @@ macro(build_arrow)
${MAKE_BUILD_ARGS}
INSTALL_COMMAND
${MAKE} install
# BUILD_IN_SOURCE
# 1
BUILD_BYPRODUCTS
"${ARROW_STATIC_LIB}"
# "${ARROW_CUDA_STATIC_LIB}"
)
# ExternalProject_Add_StepDependencies(arrow_ep build thrift_ep)
file(MAKE_DIRECTORY "${ARROW_PREFIX}/include")
add_library(arrow STATIC IMPORTED)
set_target_properties(arrow
PROPERTIES IMPORTED_LOCATION "${ARROW_STATIC_LIB}"
INTERFACE_INCLUDE_DIRECTORIES "${ARROW_INCLUDE_DIR}")
# INTERFACE_LINK_LIBRARIES thrift)
add_dependencies(arrow arrow_ep)
set(JEMALLOC_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/arrow_ep-prefix/src/arrow_ep-build/jemalloc_ep-prefix/src/jemalloc_ep")
@ -438,9 +425,6 @@ endif()
# Add Boost dependencies (code adapted from Apache Kudu (incubating))
set(Boost_USE_MULTITHREADED ON)
if(MSVC AND MILVUS_USE_STATIC_CRT)
set(Boost_USE_STATIC_RUNTIME ON)
endif()
set(Boost_ADDITIONAL_VERSIONS
"1.70.0"
"1.70"
@ -530,59 +514,8 @@ if(MILVUS_BOOST_VENDORED)
add_dependencies(boost_filesystem_static boost_ep)
add_dependencies(boost_serialization_static boost_ep)
else()
if(MSVC)
# disable autolinking in boost
add_definitions(-DBOOST_ALL_NO_LIB)
endif()
# if(DEFINED ENV{BOOST_ROOT} OR DEFINED BOOST_ROOT)
# # In older versions of CMake (such as 3.2), the system paths for Boost will
# # be looked in first even if we set $BOOST_ROOT or pass -DBOOST_ROOT
# set(Boost_NO_SYSTEM_PATHS ON)
# endif()
if(MILVUS_BOOST_USE_SHARED)
# Find shared Boost libraries.
set(Boost_USE_STATIC_LIBS OFF)
set(BUILD_SHARED_LIBS_KEEP ${BUILD_SHARED_LIBS})
set(BUILD_SHARED_LIBS ON)
if(MSVC)
# force all boost libraries to dynamic link
add_definitions(-DBOOST_ALL_DYN_LINK)
endif()
if(MILVUS_BOOST_HEADER_ONLY)
find_package(Boost REQUIRED)
else()
find_package(Boost COMPONENTS serialization system filesystem REQUIRED)
set(BOOST_SYSTEM_LIBRARY Boost::system)
set(BOOST_FILESYSTEM_LIBRARY Boost::filesystem)
set(BOOST_SERIALIZATION_LIBRARY Boost::serialization)
set(MILVUS_BOOST_LIBS ${BOOST_SYSTEM_LIBRARY} ${BOOST_FILESYSTEM_LIBRARY})
endif()
set(BUILD_SHARED_LIBS ${BUILD_SHARED_LIBS_KEEP})
unset(BUILD_SHARED_LIBS_KEEP)
else()
# Find static boost headers and libs
# TODO Differentiate here between release and debug builds
set(Boost_USE_STATIC_LIBS ON)
if(MILVUS_BOOST_HEADER_ONLY)
find_package(Boost REQUIRED)
else()
find_package(Boost COMPONENTS serialization system filesystem REQUIRED)
set(BOOST_SYSTEM_LIBRARY Boost::system)
set(BOOST_FILESYSTEM_LIBRARY Boost::filesystem)
set(BOOST_SERIALIZATION_LIBRARY Boost::serialization)
set(MILVUS_BOOST_LIBS ${BOOST_SYSTEM_LIBRARY} ${BOOST_FILESYSTEM_LIBRARY})
endif()
endif()
endif()
#message(STATUS "Boost include dir: " ${Boost_INCLUDE_DIR})
#message(STATUS "Boost libraries: " ${Boost_LIBRARIES})
include_directories(SYSTEM ${Boost_INCLUDE_DIR})
link_directories(SYSTEM ${BOOST_LIB_DIR})
@ -726,13 +659,6 @@ macro(build_openblas)
add_dependencies(openblas openblas_ep)
endmacro()
#if(MILVUS_WITH_OPENBLAS)
# resolve_dependency(OpenBLAS)
#
# get_target_property(OPENBLAS_INCLUDE_DIR openblas INTERFACE_INCLUDE_DIRECTORIES)
# include_directories(SYSTEM "${OPENBLAS_INCLUDE_DIR}")
#endif()
# ----------------------------------------------------------------------
# LAPACK
@ -770,16 +696,25 @@ macro(build_lapack)
add_dependencies(lapack lapack_ep)
endmacro()
#if(MILVUS_WITH_LAPACK)
# resolve_dependency(LAPACK)
#
# get_target_property(LAPACK_INCLUDE_DIR lapack INTERFACE_INCLUDE_DIRECTORIES)
# include_directories(SYSTEM "${LAPACK_INCLUDE_DIR}")
#endif()
# ----------------------------------------------------------------------
# FAISS
if(NOT DEFINED BUILD_FAISS_WITH_MKL)
set(BUILD_FAISS_WITH_MKL OFF)
endif()
if(EXISTS "/proc/cpuinfo")
FILE(READ /proc/cpuinfo PROC_CPUINFO)
SET(VENDOR_ID_RX "vendor_id[ \t]*:[ \t]*([a-zA-Z]+)\n")
STRING(REGEX MATCH "${VENDOR_ID_RX}" VENDOR_ID "${PROC_CPUINFO}")
STRING(REGEX REPLACE "${VENDOR_ID_RX}" "\\1" VENDOR_ID "${VENDOR_ID}")
if(NOT ${VENDOR_ID} STREQUAL "GenuineIntel")
set(BUILD_FAISS_WITH_MKL OFF)
endif()
endif()
macro(build_faiss)
message(STATUS "Building FAISS-${FAISS_VERSION} from source")
set(FAISS_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/faiss_ep-prefix/src/faiss_ep")
@ -787,37 +722,40 @@ macro(build_faiss)
set(FAISS_STATIC_LIB
"${FAISS_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}faiss${CMAKE_STATIC_LIBRARY_SUFFIX}")
# add_custom_target(faiss_dependencies)
# add_dependencies(faiss_dependencies openblas_ep)
# add_dependencies(faiss_dependencies openblas)
# get_target_property(FAISS_OPENBLAS_LIB_DIR openblas IMPORTED_LOCATION)
# get_filename_component(FAISS_OPENBLAS_LIB "${FAISS_OPENBLAS_LIB_DIR}" DIRECTORY)
set(FAISS_CONFIGURE_ARGS
"--prefix=${FAISS_PREFIX}"
"CFLAGS=${EP_C_FLAGS}"
"CXXFLAGS=${EP_CXX_FLAGS}"
"LDFLAGS=-L${OPENBLAS_PREFIX}/lib -L${LAPACK_PREFIX}/lib -lopenblas -llapack"
--without-python)
# if(OPENBLAS_STATIC_LIB)
# set(OPENBLAS_LIBRARY ${OPENBLAS_STATIC_LIB})
# else()
# set(OPENBLAS_LIBRARY ${OPENBLAS_SHARED_LIB})
# endif()
# set(FAISS_DEPENDENCIES ${FAISS_DEPENDENCIES} ${OPENBLAS_LIBRARY})
set(FAISS_CFLAGS ${EP_C_FLAGS})
set(FAISS_CXXFLAGS ${EP_CXX_FLAGS})
if(${BUILD_FAISS_WITH_MKL} STREQUAL "ON")
message(STATUS "Build Faiss with MKL")
if(NOT DEFINED MKL_LIB_PATH)
set(MKL_LIB_PATH "/opt/intel/compilers_and_libraries_${MKL_VERSION}/linux/mkl/lib/intel64")
message(STATUS "MKL_LIB_PATH = ${MKL_LIB_PATH}")
endif()
set(FAISS_CONFIGURE_ARGS ${FAISS_CONFIGURE_ARGS}
"CPPFLAGS=-DFINTEGER=long -DMKL_ILP64 -m64 -I${MKL_LIB_PATH}/../../include"
"LDFLAGS=-L${MKL_LIB_PATH}"
"LIBS=-Wl,--start-group ${MKL_LIB_PATH}/libmkl_intel_ilp64.a ${MKL_LIB_PATH}/libmkl_gnu_thread.a ${MKL_LIB_PATH}/libmkl_core.a -Wl,--end-group -lgomp -lpthread -lm -ldl")
else()
message(STATUS "Build Faiss with OpenBlas/LAPACK")
set(FAISS_CONFIGURE_ARGS ${FAISS_CONFIGURE_ARGS}
"LDFLAGS=-L${OPENBLAS_PREFIX}/lib -L${LAPACK_PREFIX}/lib")
endif()
if(${MILVUS_WITH_FAISS_GPU_VERSION} STREQUAL "ON")
set(FAISS_CONFIGURE_ARGS ${FAISS_CONFIGURE_ARGS}
"--with-cuda=${CUDA_TOOLKIT_ROOT_DIR}"
# "with_cuda_arch=\"-gencode=arch=compute_35,code=compute_35 \\
# -gencode=arch=compute_52,code=compute_52 \\
# -gencode=arch=compute_60,code=compute_60 \\
# -gencode=arch=compute_61,code=compute_61\""
"--with-cuda-arch=\"-gencode=arch=compute_35,code=compute_35\""
"--with-cuda-arch=\"-gencode=arch=compute_52,code=compute_52\""
"--with-cuda-arch=\"-gencode=arch=compute_60,code=compute_60\""
"--with-cuda-arch=\"-gencode=arch=compute_61,code=compute_61\""
"--with-cuda-arch=\"-gencode=arch=compute_35,code=sm_35\""
"--with-cuda-arch=\"-gencode=arch=compute_52,code=sm_52\""
"--with-cuda-arch=\"-gencode=arch=compute_60,code=sm_60\""
"--with-cuda-arch=\"-gencode=arch=compute_61,code=sm_61\""
)
else()
set(FAISS_CONFIGURE_ARGS ${FAISS_CONFIGURE_ARGS} --without-cuda)
@ -830,66 +768,67 @@ macro(build_faiss)
CONFIGURE_COMMAND
"./configure"
${FAISS_CONFIGURE_ARGS}
# BINARY_DIR
# ${FAISS_PREFIX}
# INSTALL_DIR
# ${FAISS_PREFIX}
# BUILD_COMMAND
# ${MAKE} ${MAKE_BUILD_ARGS}
BUILD_COMMAND
${MAKE} ${MAKE_BUILD_ARGS} all
COMMAND
cd gpu && ${MAKE} ${MAKE_BUILD_ARGS}
${MAKE} ${MAKE_BUILD_ARGS}
BUILD_IN_SOURCE
1
# INSTALL_DIR
# ${FAISS_PREFIX}
INSTALL_COMMAND
${MAKE} install
COMMAND
ln -s faiss_ep ../faiss
BUILD_BYPRODUCTS
${FAISS_STATIC_LIB})
# DEPENDS
# ${faiss_dependencies})
ExternalProject_Add_StepDependencies(faiss_ep build openblas_ep lapack_ep)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "OFF")
ExternalProject_Add_StepDependencies(faiss_ep build openblas_ep lapack_ep)
endif()
file(MAKE_DIRECTORY "${FAISS_INCLUDE_DIR}")
add_library(faiss STATIC IMPORTED)
set_target_properties(
faiss
PROPERTIES IMPORTED_LOCATION "${FAISS_STATIC_LIB}"
INTERFACE_INCLUDE_DIRECTORIES "${FAISS_INCLUDE_DIR}"
INTERFACE_LINK_LIBRARIES "openblas;lapack" )
add_library(faiss SHARED IMPORTED)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "ON")
set(MKL_LIBS ${MKL_LIB_PATH}/libmkl_intel_ilp64.a
${MKL_LIB_PATH}/libmkl_gnu_thread.a
${MKL_LIB_PATH}/libmkl_core.a)
set_target_properties(
faiss
PROPERTIES IMPORTED_LOCATION "${FAISS_STATIC_LIB}"
INTERFACE_INCLUDE_DIRECTORIES "${FAISS_INCLUDE_DIR}"
INTERFACE_LINK_LIBRARIES "${MKL_LIBS}" )
else()
set_target_properties(
faiss
PROPERTIES IMPORTED_LOCATION "${FAISS_STATIC_LIB}"
INTERFACE_INCLUDE_DIRECTORIES "${FAISS_INCLUDE_DIR}"
INTERFACE_LINK_LIBRARIES "openblas;lapack" )
endif()
add_dependencies(faiss faiss_ep)
#add_dependencies(faiss openblas_ep)
#add_dependencies(faiss lapack_ep)
#target_link_libraries(faiss ${OPENBLAS_PREFIX}/lib)
#target_link_libraries(faiss ${LAPACK_PREFIX}/lib)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "OFF")
add_dependencies(faiss openblas_ep)
add_dependencies(faiss lapack_ep)
endif()
endmacro()
if(MILVUS_WITH_FAISS)
resolve_dependency(OpenBLAS)
get_target_property(OPENBLAS_INCLUDE_DIR openblas INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM "${OPENBLAS_INCLUDE_DIR}")
link_directories(SYSTEM ${OPENBLAS_PREFIX}/lib)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "OFF")
resolve_dependency(OpenBLAS)
get_target_property(OPENBLAS_INCLUDE_DIR openblas INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM "${OPENBLAS_INCLUDE_DIR}")
link_directories(SYSTEM ${OPENBLAS_PREFIX}/lib)
resolve_dependency(LAPACK)
get_target_property(LAPACK_INCLUDE_DIR lapack INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM "${LAPACK_INCLUDE_DIR}")
link_directories(SYSTEM "${LAPACK_PREFIX}/lib")
resolve_dependency(LAPACK)
get_target_property(LAPACK_INCLUDE_DIR lapack INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM "${LAPACK_INCLUDE_DIR}")
link_directories(SYSTEM "${LAPACK_PREFIX}/lib")
endif()
resolve_dependency(FAISS)
get_target_property(FAISS_INCLUDE_DIR faiss INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM "${FAISS_INCLUDE_DIR}")
include_directories(SYSTEM "${CMAKE_CURRENT_BINARY_DIR}/faiss_ep-prefix/src/")
link_directories(SYSTEM ${FAISS_PREFIX}/)
link_directories(SYSTEM ${FAISS_PREFIX}/lib/)
link_directories(SYSTEM ${FAISS_PREFIX}/gpu/)
endif()
# ----------------------------------------------------------------------
@ -926,8 +865,6 @@ macro(build_gtest)
set(GMOCK_STATIC_LIB
"${GTEST_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}gmock${CMAKE_STATIC_LIBRARY_SUFFIX}"
)
ExternalProject_Add(googletest_ep
URL
${GTEST_SOURCE_URL}
@ -967,13 +904,11 @@ macro(build_gtest)
endmacro()
if (MILVUS_BUILD_TESTS)
#message(STATUS "Resolving gtest dependency")
resolve_dependency(GTest)
if(NOT GTEST_VENDORED)
endif()
# TODO: Don't use global includes but rather target_include_directories
get_target_property(GTEST_INCLUDE_DIR gtest INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM "${GTEST_PREFIX}/lib")
include_directories(SYSTEM ${GTEST_INCLUDE_DIR})
@ -1011,32 +946,8 @@ macro(build_lz4)
set(LZ4_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/lz4_ep-prefix/src/lz4_ep")
set(LZ4_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/lz4_ep-prefix/")
if(MSVC)
if(MILVUS_USE_STATIC_CRT)
if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG")
set(LZ4_RUNTIME_LIBRARY_LINKAGE "/p:RuntimeLibrary=MultiThreadedDebug")
else()
set(LZ4_RUNTIME_LIBRARY_LINKAGE "/p:RuntimeLibrary=MultiThreaded")
endif()
endif()
set(LZ4_STATIC_LIB
"${LZ4_BUILD_DIR}/visual/VS2010/bin/x64_${CMAKE_BUILD_TYPE}/liblz4_static.lib")
set(LZ4_BUILD_COMMAND
BUILD_COMMAND
msbuild.exe
/m
/p:Configuration=${CMAKE_BUILD_TYPE}
/p:Platform=x64
/p:PlatformToolset=v140
${LZ4_RUNTIME_LIBRARY_LINKAGE}
/t:Build
${LZ4_BUILD_DIR}/visual/VS2010/lz4.sln)
else()
set(LZ4_STATIC_LIB "${LZ4_BUILD_DIR}/lib/liblz4.a")
#set(LZ4_BUILD_COMMAND BUILD_COMMAND ${CMAKE_SOURCE_DIR}/build-support/build-lz4-lib.sh
# "AR=${CMAKE_AR}")
set(LZ4_BUILD_COMMAND BUILD_COMMAND ${MAKE} ${MAKE_BUILD_ARGS} CFLAGS=${EP_C_FLAGS})
endif()
set(LZ4_STATIC_LIB "${LZ4_BUILD_DIR}/lib/liblz4.a")
set(LZ4_BUILD_COMMAND BUILD_COMMAND ${MAKE} ${MAKE_BUILD_ARGS} CFLAGS=${EP_C_FLAGS})
# We need to copy the header in lib to directory outside of the build
externalproject_add(lz4_ep
@ -1071,7 +982,6 @@ endmacro()
if(MILVUS_WITH_LZ4)
resolve_dependency(Lz4)
# TODO: Don't use global includes but rather target_include_directories
get_target_property(LZ4_INCLUDE_DIR lz4 INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM ${LZ4_BUILD_DIR}/lib/)
include_directories(SYSTEM ${LZ4_INCLUDE_DIR})
@ -1097,16 +1007,8 @@ macro(build_mysqlpp)
externalproject_add(mysqlpp_ep
URL
${MYSQLPP_SOURCE_URL}
# GIT_REPOSITORY
# ${MYSQLPP_SOURCE_URL}
# GIT_TAG
# ${MYSQLPP_VERSION}
# GIT_SHALLOW
# TRUE
${EP_LOG_OPTIONS}
CONFIGURE_COMMAND
# "./bootstrap"
# COMMAND
"./configure"
${MYSQLPP_CONFIGURE_ARGS}
BUILD_COMMAND
@ -1167,10 +1069,6 @@ macro(build_prometheus)
${PROMETHEUS_VERSION}
GIT_SHALLOW
TRUE
# GIT_CONFIG
# recurse-submodules=true
# URL
# ${PROMETHEUS_SOURCE_URL}
${EP_LOG_OPTIONS}
CMAKE_ARGS
${PROMETHEUS_CMAKE_ARGS}
@ -1214,21 +1112,15 @@ if(MILVUS_WITH_PROMETHEUS)
resolve_dependency(Prometheus)
# TODO: Don't use global includes but rather target_include_directories
#get_target_property(PROMETHEUS-core_INCLUDE_DIRS prometheus-core INTERFACE_INCLUDE_DIRECTORIES)
#get_target_property(PROMETHEUS_PUSH_INCLUDE_DIRS prometheus_push INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM ${PROMETHEUS_PREFIX}/push/)
include_directories(SYSTEM ${PROMETHEUS_PREFIX}/push/include)
#get_target_property(PROMETHEUS_PULL_INCLUDE_DIRS prometheus_pull INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM ${PROMETHEUS_PREFIX}/pull/)
include_directories(SYSTEM ${PROMETHEUS_PREFIX}/pull/include)
link_directories(SYSTEM ${PROMETHEUS_PREFIX}/core/)
include_directories(SYSTEM ${PROMETHEUS_PREFIX}/core/include)
#link_directories(${PROMETHEUS_PREFIX}/civetweb_ep-prefix/src/civetweb_ep)
endif()
# ----------------------------------------------------------------------
@ -1276,8 +1168,6 @@ if(MILVUS_WITH_ROCKSDB)
resolve_dependency(RocksDB)
# TODO: Don't use global includes but rather target_include_directories
# get_target_property(ROCKSDB_INCLUDE_DIRS rocksdb INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM ${ROCKSDB_PREFIX}/lib/lib/)
include_directories(SYSTEM ${ROCKSDB_INCLUDE_DIRS})
endif()
@ -1326,34 +1216,9 @@ macro(build_snappy)
endmacro()
if(MILVUS_WITH_SNAPPY)
# if(Snappy_SOURCE STREQUAL "AUTO")
# # Normally *Config.cmake files reside in /usr/lib/cmake but Snappy
# # errornously places them in ${CMAKE_ROOT}/Modules/
# # This is fixed in 1.1.7 but fedora (30) still installs into the wrong
# # location.
# # https://bugzilla.redhat.com/show_bug.cgi?id=1679727
# # https://src.fedoraproject.org/rpms/snappy/pull-request/1
# find_package(Snappy QUIET HINTS "${CMAKE_ROOT}/Modules/")
# if(NOT Snappy_FOUND)
# find_package(SnappyAlt)
# endif()
# if(NOT Snappy_FOUND AND NOT SnappyAlt_FOUND)
# build_snappy()
# endif()
# elseif(Snappy_SOURCE STREQUAL "BUNDLED")
# build_snappy()
# elseif(Snappy_SOURCE STREQUAL "SYSTEM")
# # SnappyConfig.cmake is not installed on Ubuntu/Debian
# # TODO: Make a bug report upstream
# find_package(Snappy HINTS "${CMAKE_ROOT}/Modules/")
# if(NOT Snappy_FOUND)
# find_package(SnappyAlt REQUIRED)
# endif()
# endif()
resolve_dependency(Snappy)
# TODO: Don't use global includes but rather target_include_directories
get_target_property(SNAPPY_INCLUDE_DIRS snappy INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM ${SNAPPY_PREFIX}/lib/)
include_directories(SYSTEM ${SNAPPY_INCLUDE_DIRS})
@ -1425,75 +1290,11 @@ macro(build_sqlite_orm)
endif ()
#set(SQLITE_ORM_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/sqlite_orm_ep-prefix/src/sqlite_orm_ep")
#set(SQLITE_ORM_INCLUDE_DIR "${SQLITE_ORM_PREFIX}/include/sqlite_orm")
# set(SQLITE_ORM_STATIC_LIB
# "${SQLITE_ORM_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}sqlite_orm${CMAKE_STATIC_LIBRARY_SUFFIX}")
#
# set(SQLITE_ORM_CMAKE_CXX_FLAGS "${EP_CXX_FLAGS} -std=c++14")
# set(SQLITE_ORM_CMAKE_CXX_FLAGS_DEBUG "${EP_CXX_FLAGS} -std=c++14")
#
# set(SQLITE_ORM_CMAKE_ARGS
# ${EP_COMMON_CMAKE_ARGS}
# "-DCMAKE_INSTALL_PREFIX=${SQLITE_ORM_PREFIX}"
# #"LDFLAGS=-L${SQLITE_PREFIX}"
# #"-DCMAKE_PREFIX_PATH=${SQLITE_PREFIX}/include"
# "-DCMAKE_INCLUDE_PATH=${SQLITE_PREFIX}/include"
# "-DCMAKE_CXX_FLAGS=${SQLITE_ORM_CMAKE_CXX_FLAGS}"
# "-DCMAKE_CXX_FLAGS_DEBUG=${SQLITE_ORM_CMAKE_CXX_FLAGS}"
# -DSqliteOrm_BuildTests=off
# -DBUILD_TESTING=off)
# message(STATUS "SQLITE_INCLUDE: ${SQLITE_ORM_CMAKE_ARGS}")
#
# message(STATUS "SQLITE_ORM_CMAKE_CXX_FLAGS: ${SQLITE_ORM_CMAKE_CXX_FLAGS}")
# externalproject_add(sqlite_orm_ep
# URL
# ${SQLITE_ORM_SOURCE_URL}
# PREFIX ${CMAKE_CURRENT_BINARY_DIR}/sqlite_orm_ep-prefix
# CONFIGURE_COMMAND
# ""
# BUILD_COMMAND
# ""
# INSTALL_COMMAND
# ""
#${EP_LOG_OPTIONS}
#${EP_LOG_OPTIONS}
# CMAKE_ARGS
# ${SQLITE_ORM_CMAKE_ARGS}
# BUILD_COMMAND
# ${MAKE}
# ${MAKE_BUILD_ARGS}
# #"LDFLAGS=-L${SQLITE_PREFIX}"
# BUILD_IN_SOURCE
# 1
# BUILD_BYPRODUCTS
# "${SQLITE_ORM_STATIC_LIB}"
# )
# ExternalProject_Add_StepDependencies(sqlite_orm_ep build sqlite_ep)
#set(SQLITE_ORM_SQLITE_HEADER ${SQLITE_INCLUDE_DIR}/sqlite3.h)
# file(MAKE_DIRECTORY "${SQLITE_ORM_INCLUDE_DIR}")
# add_library(sqlite_orm STATIC IMPORTED)
## message(STATUS "SQLITE_INCLUDE_DIR: ${SQLITE_INCLUDE_DIR}")
# set_target_properties(
# sqlite_orm
# PROPERTIES
# IMPORTED_LOCATION "${SQLITE_ORM_STATIC_LIB}"
# INTERFACE_INCLUDE_DIRECTORIES "${SQLITE_ORM_INCLUDE_DIR};${SQLITE_INCLUDE_DIR}")
# target_include_directories(sqlite_orm INTERFACE ${SQLITE_PREFIX} ${SQLITE_INCLUDE_DIR})
# target_link_libraries(sqlite_orm INTERFACE sqlite)
#
# add_dependencies(sqlite_orm sqlite_orm_ep)
endmacro()
if(MILVUS_WITH_SQLITE_ORM)
resolve_dependency(SQLite_ORM)
# ExternalProject_Get_Property(sqlite_orm_ep source_dir)
# set(SQLITE_ORM_INCLUDE_DIR ${source_dir}/sqlite_orm_ep)
include_directories(SYSTEM "${SQLITE_ORM_INCLUDE_DIR}")
#message(STATUS "SQLITE_ORM_INCLUDE_DIR: ${SQLITE_ORM_INCLUDE_DIR}")
endif()
# ----------------------------------------------------------------------
@ -1533,18 +1334,7 @@ macro(build_thrift)
endif()
set(THRIFT_STATIC_LIB_NAME "${CMAKE_STATIC_LIBRARY_PREFIX}thrift")
if(MSVC)
if(MILVUS_USE_STATIC_CRT)
set(THRIFT_STATIC_LIB_NAME "${THRIFT_STATIC_LIB_NAME}")
set(THRIFT_CMAKE_ARGS ${THRIFT_CMAKE_ARGS} "-DWITH_MT=ON")
else()
set(THRIFT_STATIC_LIB_NAME "${THRIFT_STATIC_LIB_NAME}")
set(THRIFT_CMAKE_ARGS ${THRIFT_CMAKE_ARGS} "-DWITH_MT=OFF")
endif()
endif()
if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG")
set(THRIFT_STATIC_LIB_NAME "${THRIFT_STATIC_LIB_NAME}")
endif()
set(THRIFT_STATIC_LIB
"${THRIFT_PREFIX}/lib/${THRIFT_STATIC_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}")
@ -1555,60 +1345,6 @@ macro(build_thrift)
endif()
set(THRIFT_DEPENDENCIES ${THRIFT_DEPENDENCIES} ${ZLIB_LIBRARY})
if(MSVC)
set(WINFLEXBISON_VERSION 2.4.9)
set(WINFLEXBISON_PREFIX
"${CMAKE_CURRENT_BINARY_DIR}/winflexbison_ep/src/winflexbison_ep-install")
externalproject_add(
winflexbison_ep
URL
https://github.com/lexxmark/winflexbison/releases/download/v.${WINFLEXBISON_VERSION}/win_flex_bison-${WINFLEXBISON_VERSION}.zip
URL_HASH
MD5=a2e979ea9928fbf8567e995e9c0df765
SOURCE_DIR
${WINFLEXBISON_PREFIX}
CONFIGURE_COMMAND
""
BUILD_COMMAND
""
INSTALL_COMMAND
""
${EP_LOG_OPTIONS})
set(THRIFT_DEPENDENCIES ${THRIFT_DEPENDENCIES} winflexbison_ep)
set(THRIFT_CMAKE_ARGS
"-DFLEX_EXECUTABLE=${WINFLEXBISON_PREFIX}/win_flex.exe"
"-DBISON_EXECUTABLE=${WINFLEXBISON_PREFIX}/win_bison.exe"
"-DZLIB_INCLUDE_DIR=${ZLIB_INCLUDE_DIR}"
"-DWITH_SHARED_LIB=OFF"
"-DWITH_PLUGIN=OFF"
${THRIFT_CMAKE_ARGS})
elseif(APPLE)
# Some other process always resets BISON_EXECUTABLE to the system default,
# thus we use our own variable here.
if(NOT DEFINED THRIFT_BISON_EXECUTABLE)
find_package(BISON 2.5.1)
# In the case where we cannot find a system-wide installation, look for
# homebrew and ask for its bison installation.
if(NOT BISON_FOUND)
find_program(BREW_BIN brew)
if(BREW_BIN)
execute_process(COMMAND ${BREW_BIN} --prefix bison
OUTPUT_VARIABLE BISON_PREFIX
OUTPUT_STRIP_TRAILING_WHITESPACE)
set(BISON_EXECUTABLE "${BISON_PREFIX}/bin/bison")
find_package(BISON 2.5.1)
set(THRIFT_BISON_EXECUTABLE "${BISON_EXECUTABLE}")
endif()
else()
set(THRIFT_BISON_EXECUTABLE "${BISON_EXECUTABLE}")
endif()
endif()
set(THRIFT_CMAKE_ARGS "-DBISON_EXECUTABLE=${THRIFT_BISON_EXECUTABLE}"
${THRIFT_CMAKE_ARGS})
endif()
externalproject_add(thrift_ep
URL
${THRIFT_SOURCE_URL}
@ -1637,8 +1373,7 @@ endmacro()
if(MILVUS_WITH_THRIFT)
resolve_dependency(Thrift)
# TODO: Don't use global includes but rather target_include_directories
# MESSAGE(STATUS ${THRIFT_PREFIX}/lib/)
link_directories(SYSTEM ${THRIFT_PREFIX}/lib/)
link_directories(SYSTEM ${CMAKE_CURRENT_BINARY_DIR}/thrift_ep-prefix/src/thrift_ep-build/lib)
include_directories(SYSTEM ${THRIFT_INCLUDE_DIR})
@ -1684,8 +1419,7 @@ endmacro()
if(MILVUS_WITH_YAMLCPP)
resolve_dependency(yaml-cpp)
# TODO: Don't use global includes but rather target_include_directories
get_target_property(YAMLCPP_INCLUDE_DIR yaml-cpp INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM ${YAMLCPP_PREFIX}/lib/)
include_directories(SYSTEM ${YAMLCPP_INCLUDE_DIR})
@ -1697,15 +1431,7 @@ endif()
macro(build_zlib)
message(STATUS "Building ZLIB-${ZLIB_VERSION} from source")
set(ZLIB_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/zlib_ep-prefix/src/zlib_ep")
if(MSVC)
if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG")
set(ZLIB_STATIC_LIB_NAME zlibstaticd.lib)
else()
set(ZLIB_STATIC_LIB_NAME zlibstatic.lib)
endif()
else()
set(ZLIB_STATIC_LIB_NAME libz.a)
endif()
set(ZLIB_STATIC_LIB_NAME libz.a)
set(ZLIB_STATIC_LIB "${ZLIB_PREFIX}/lib/${ZLIB_STATIC_LIB_NAME}")
set(ZLIB_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} "-DCMAKE_INSTALL_PREFIX=${ZLIB_PREFIX}"
-DBUILD_SHARED_LIBS=OFF)
@ -1734,8 +1460,7 @@ endmacro()
if(MILVUS_WITH_ZLIB)
resolve_dependency(ZLIB)
# TODO: Don't use global includes but rather target_include_directories
get_target_property(ZLIB_INCLUDE_DIR zlib INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM ${ZLIB_INCLUDE_DIR})
endif()
@ -1757,22 +1482,15 @@ macro(build_zstd)
-DZSTD_BUILD_STATIC=on
-DZSTD_MULTITHREAD_SUPPORT=off)
if(MSVC)
set(ZSTD_STATIC_LIB "${ZSTD_PREFIX}/lib/zstd_static.lib")
if(MILVUS_USE_STATIC_CRT)
set(ZSTD_CMAKE_ARGS ${ZSTD_CMAKE_ARGS} "-DZSTD_USE_STATIC_RUNTIME=on")
endif()
else()
set(ZSTD_STATIC_LIB "${ZSTD_PREFIX}/lib/libzstd.a")
# Only pass our C flags on Unix as on MSVC it leads to a
# "incompatible command-line options" error
set(ZSTD_CMAKE_ARGS
${ZSTD_CMAKE_ARGS}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_FLAGS=${EP_C_FLAGS}
-DCMAKE_CXX_FLAGS=${EP_CXX_FLAGS})
endif()
set(ZSTD_STATIC_LIB "${ZSTD_PREFIX}/lib/libzstd.a")
set(ZSTD_CMAKE_ARGS
${ZSTD_CMAKE_ARGS}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_FLAGS=${EP_C_FLAGS}
-DCMAKE_CXX_FLAGS=${EP_CXX_FLAGS})
if(CMAKE_VERSION VERSION_LESS 3.7)
message(FATAL_ERROR "Building zstd using ExternalProject requires at least CMake 3.7")
@ -1806,8 +1524,7 @@ endmacro()
if(MILVUS_WITH_ZSTD)
resolve_dependency(ZSTD)
# TODO: Don't use global includes but rather target_include_directories
get_target_property(ZSTD_INCLUDE_DIR zstd INTERFACE_INCLUDE_DIRECTORIES)
link_directories(SYSTEM ${ZSTD_PREFIX}/lib)
include_directories(SYSTEM ${ZSTD_INCLUDE_DIR})
@ -1823,7 +1540,7 @@ macro(build_aws)
${EP_COMMON_TOOLCHAIN}
"-DCMAKE_INSTALL_PREFIX=${AWS_PREFIX}"
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_INSTALL_LIBDIR=lib #${CMAKE_INSTALL_LIBDIR}
-DCMAKE_INSTALL_LIBDIR=lib
-DBUILD_ONLY=s3
-DBUILD_SHARED_LIBS=off
-DENABLE_TESTING=off
@ -1834,8 +1551,7 @@ macro(build_aws)
"${AWS_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}aws-cpp-sdk-core${CMAKE_STATIC_LIBRARY_SUFFIX}")
set(AWS_CPP_SDK_S3_STATIC_LIB
"${AWS_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}aws-cpp-sdk-s3${CMAKE_STATIC_LIBRARY_SUFFIX}")
# Only pass our C flags on Unix as on MSVC it leads to a
# "incompatible command-line options" error
set(AWS_CMAKE_ARGS
${AWS_CMAKE_ARGS}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
@ -1843,10 +1559,6 @@ macro(build_aws)
-DCMAKE_C_FLAGS=${EP_C_FLAGS}
-DCMAKE_CXX_FLAGS=${EP_CXX_FLAGS})
if(CMAKE_VERSION VERSION_LESS 3.7)
message(FATAL_ERROR "Building AWS using ExternalProject requires at least CMake 3.7")
endif()
externalproject_add(aws_ep
${EP_LOG_OPTIONS}
CMAKE_ARGS
@ -1861,8 +1573,6 @@ macro(build_aws)
BUILD_BYPRODUCTS
"${AWS_CPP_SDK_S3_STATIC_LIB}"
"${AWS_CPP_SDK_CORE_STATIC_LIB}")
file(MAKE_DIRECTORY "${AWS_PREFIX}/include")
add_library(aws-cpp-sdk-s3 STATIC IMPORTED)
@ -1885,8 +1595,7 @@ endmacro()
if(MILVUS_WITH_AWS)
resolve_dependency(AWS)
# TODO: Don't use global includes but rather target_include_directories
link_directories(SYSTEM ${AWS_PREFIX}/lib)
get_target_property(AWS_CPP_SDK_S3_INCLUDE_DIR aws-cpp-sdk-s3 INTERFACE_INCLUDE_DIRECTORIES)
@ -1896,3 +1605,91 @@ if(MILVUS_WITH_AWS)
include_directories(SYSTEM ${AWS_CPP_SDK_CORE_INCLUDE_DIR})
endif()
# ----------------------------------------------------------------------
# libunwind
macro(build_libunwind)
message(STATUS "Building libunwind-${LIBUNWIND_VERSION} from source")
set(LIBUNWIND_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/libunwind_ep-prefix/src/libunwind_ep/install")
set(LIBUNWIND_INCLUDE_DIR "${LIBUNWIND_PREFIX}/include")
set(LIBUNWIND_SHARED_LIB "${LIBUNWIND_PREFIX}/lib/libunwind${CMAKE_SHARED_LIBRARY_SUFFIX}")
set(LIBUNWIND_CONFIGURE_ARGS "--prefix=${LIBUNWIND_PREFIX}")
externalproject_add(libunwind_ep
URL
${LIBUNWIND_SOURCE_URL}
${EP_LOG_OPTIONS}
CONFIGURE_COMMAND
"./configure"
${LIBUNWIND_CONFIGURE_ARGS}
BUILD_COMMAND
${MAKE} ${MAKE_BUILD_ARGS}
BUILD_IN_SOURCE
1
INSTALL_COMMAND
${MAKE} install
BUILD_BYPRODUCTS
${LIBUNWIND_SHARED_LIB})
file(MAKE_DIRECTORY "${LIBUNWIND_INCLUDE_DIR}")
add_library(libunwind SHARED IMPORTED)
set_target_properties(libunwind
PROPERTIES IMPORTED_LOCATION "${LIBUNWIND_SHARED_LIB}"
INTERFACE_INCLUDE_DIRECTORIES "${LIBUNWIND_INCLUDE_DIR}")
add_dependencies(libunwind libunwind_ep)
endmacro()
if(MILVUS_WITH_LIBUNWIND)
resolve_dependency(libunwind)
# TODO: Don't use global includes but rather target_include_directories
get_target_property(LIBUNWIND_INCLUDE_DIR libunwind INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM ${LIBUNWIND_INCLUDE_DIR})
endif()
# ----------------------------------------------------------------------
# gperftools
macro(build_gperftools)
message(STATUS "Building gperftools-${GPERFTOOLS_VERSION} from source")
set(GPERFTOOLS_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/gperftools_ep-prefix/src/gperftools_ep")
set(GPERFTOOLS_INCLUDE_DIR "${GPERFTOOLS_PREFIX}/include")
set(GPERFTOOLS_STATIC_LIB "${GPERFTOOLS_PREFIX}/lib/libprofiler${CMAKE_STATIC_LIBRARY_SUFFIX}")
set(GPERFTOOLS_CONFIGURE_ARGS "--prefix=${GPERFTOOLS_PREFIX}")
externalproject_add(gperftools_ep
URL
${GPERFTOOLS_SOURCE_URL}
${EP_LOG_OPTIONS}
CONFIGURE_COMMAND
"./configure"
${GPERFTOOLS_CONFIGURE_ARGS}
BUILD_COMMAND
${MAKE} ${MAKE_BUILD_ARGS}
BUILD_IN_SOURCE
1
INSTALL_COMMAND
${MAKE} install
BUILD_BYPRODUCTS
${GPERFTOOLS_STATIC_LIB})
file(MAKE_DIRECTORY "${GPERFTOOLS_INCLUDE_DIR}")
add_library(gperftools SHARED IMPORTED)
set_target_properties(gperftools
PROPERTIES IMPORTED_LOCATION "${GPERFTOOLS_STATIC_LIB}"
INTERFACE_INCLUDE_DIRECTORIES "${GPERFTOOLS_INCLUDE_DIR}")
add_dependencies(gperftools gperftools_ep)
endmacro()
if(MILVUS_WITH_GPERFTOOLS)
resolve_dependency(gperftools)
# TODO: Don't use global includes but rather target_include_directories
get_target_property(GPERFTOOLS_INCLUDE_DIR gperftools INTERFACE_INCLUDE_DIRECTORIES)
include_directories(SYSTEM ${GPERFTOOLS_INCLUDE_DIR})
endif()

View File

@ -6,7 +6,7 @@
TO_STANDARD_OUTPUT = false
SUBSECOND_PRECISION = 3
PERFORMANCE_TRACKING = false
MAX_LOG_FILE_SIZE = 2097152 ## Throw log files away after 2MB
MAX_LOG_FILE_SIZE = 209715200 ## Throw log files away after 200MB
* DEBUG:
FILENAME = "@MILVUS_DB_PATH@/logs/milvus-%datetime{%H:%m}-debug.log"
ENABLED = true

View File

@ -34,6 +34,8 @@ license_config: # license configure
cache_config: # cache configure
cpu_cache_capacity: 16 # how many memory are used as cache, unit: GB, range: 0 ~ less than total memory
cache_free_percent: 0.85 # how much memory should be free when cache is full, range: greater than zero ~ 1.0
insert_cache_immediately: false # insert data will be load into cache immediately for hot query
engine_config:
nprobe: 10

10
cpp/scripts/requirements.sh Executable file
View File

@ -0,0 +1,10 @@
#!/usr/bin/env bash
wget -P /tmp https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB
apt-key add /tmp/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB
sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list'
apt -y update && apt-get -y install intel-mkl-gnu-2019.4-243 intel-mkl-core-2019.4-243
#sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.4.243/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
#source /etc/profile

View File

@ -53,7 +53,6 @@ set(engine_files
${db_files}
${db_scheduler_files}
${wrapper_files}
# metrics/Metrics.cpp
${metrics_files}
)
@ -71,6 +70,10 @@ include_directories(/usr/include/mysql)
include_directories(grpc/gen-status)
include_directories(grpc/gen-milvus)
if (MILVUS_ENABLE_PROFILING STREQUAL "ON")
SET(PROFILER_LIB profiler)
endif()
set(third_party_libs
easyloggingpp
sqlite
@ -79,10 +82,7 @@ set(third_party_libs
grpc++
grpcpp_channelz
yaml-cpp
libgpufaiss.a
faiss
lapack
openblas
prometheus-cpp-push
prometheus-cpp-pull
prometheus-cpp-core
@ -95,11 +95,22 @@ set(third_party_libs
zlib
zstd
mysqlpp
${PROFILER_LIB}
${CUDA_TOOLKIT_ROOT_DIR}/lib64/stubs/libnvidia-ml.so
)
if (MEGASEARCH_WITH_ARROW STREQUAL "ON")
set(third_party_libs ${third_party_libs} arrow)
endif()
endif()
if(${BUILD_FAISS_WITH_MKL} STREQUAL "ON")
set(third_party_libs ${third_party_libs}
${MKL_LIBS}
${MKL_LIBS})
else()
set(third_party_libs ${third_party_libs}
lapack
openblas)
endif()
if (GPU_VERSION STREQUAL "ON")
link_directories("${CUDA_TOOLKIT_ROOT_DIR}/lib64")
@ -198,7 +209,7 @@ install(FILES
${CMAKE_BINARY_DIR}/mysqlpp_ep-prefix/src/mysqlpp_ep/lib/${CMAKE_SHARED_LIBRARY_PREFIX}mysqlpp${CMAKE_SHARED_LIBRARY_SUFFIX}
${CMAKE_BINARY_DIR}/mysqlpp_ep-prefix/src/mysqlpp_ep/lib/${CMAKE_SHARED_LIBRARY_PREFIX}mysqlpp${CMAKE_SHARED_LIBRARY_SUFFIX}.3
${CMAKE_BINARY_DIR}/mysqlpp_ep-prefix/src/mysqlpp_ep/lib/${CMAKE_SHARED_LIBRARY_PREFIX}mysqlpp${CMAKE_SHARED_LIBRARY_SUFFIX}.3.2.4
DESTINATION lib) #need to copy libmysqlpp.so
DESTINATION lib)
#add_subdirectory(sdk)
add_subdirectory(grpcsdk)

View File

@ -13,9 +13,12 @@ namespace zilliz {
namespace milvus {
namespace cache {
constexpr double DEFAULT_THRESHHOLD_PERCENT = 0.85;
Cache::Cache(int64_t capacity, uint64_t cache_max_count)
: usage_(0),
capacity_(capacity),
freemem_percent_(DEFAULT_THRESHHOLD_PERCENT),
lru_(cache_max_count) {
// AGENT_LOG_DEBUG << "Construct Cache with capacity " << std::to_string(mem_capacity)
}
@ -64,15 +67,14 @@ void Cache::insert(const std::string& key, const DataObjPtr& data_ptr) {
usage_ += data_ptr->size();
}
// AGENT_LOG_DEBUG << "Insert into LRU(" << (capacity_ > 0 ? std::to_string(usage_ * 100 / capacity_) : "Nan")
// << "%, +" << data_ptr->size() << ", " << usage_ << ", " << lru_.size() << "):"
// << " " << key;
SERVER_LOG_DEBUG << "Insert " << key << " size:" << data_ptr->size()
<< " bytes into cache, usage: " << usage_ << " bytes";
}
if (usage_ > capacity_) {
// AGENT_LOG_TRACE << "Current usage " << usage_
// << " exceeds cache capacity " << capacity_
// << ", start free memory";
SERVER_LOG_DEBUG << "Current usage " << usage_
<< " exceeds cache capacity " << capacity_
<< ", start free memory";
free_memory();
}
}
@ -86,12 +88,9 @@ void Cache::erase(const std::string& key) {
const CacheObjPtr& obj_ptr = lru_.get(key);
const DataObjPtr& data_ptr = obj_ptr->data_;
usage_ -= data_ptr->size();
// AGENT_LOG_DEBUG << "Erase from LRU(" << (capacity_ > 0 ? std::to_string(usage_*100/capacity_) : "Nan")
// << "%, -" << data_ptr->size() << ", " << usage_ << ", " << lru_.size() << "): "
// << (data_ptr->flags().get_flag(DataObjAttr::kPinned) ? "Pinned " : "")
// << (data_ptr->flags().get_flag(DataObjAttr::kValid) ? "Valid " : "")
// << "(ref:" << obj_ptr->ref_ << ") "
// << key;
SERVER_LOG_DEBUG << "Erase " << key << " from cache";
lru_.erase(key);
}
@ -99,7 +98,7 @@ void Cache::clear() {
std::lock_guard<std::mutex> lock(mutex_);
lru_.clear();
usage_ = 0;
// AGENT_LOG_DEBUG << "Clear LRU !";
SERVER_LOG_DEBUG << "Clear cache !";
}
#if 0 /* caiyd 20190221, need more testing before enable */
@ -162,7 +161,7 @@ void Cache::restore_from_file(const std::string& key, const CacheObjPtr& obj_ptr
void Cache::free_memory() {
if (usage_ <= capacity_) return;
int64_t threshhold = capacity_ * THRESHHOLD_PERCENT;
int64_t threshhold = capacity_ * freemem_percent_;
int64_t delta_size = usage_ - threshhold;
std::set<std::string> key_array;
@ -183,7 +182,7 @@ void Cache::free_memory() {
}
}
// AGENT_LOG_DEBUG << "to be released memory size: " << released_size;
SERVER_LOG_DEBUG << "to be released memory size: " << released_size;
for (auto& key : key_array) {
erase(key);
@ -193,28 +192,15 @@ void Cache::free_memory() {
}
void Cache::print() {
int64_t still_pinned_count = 0;
int64_t total_pinned_size = 0;
int64_t total_valid_empty_size = 0;
size_t cache_count = 0;
{
std::lock_guard<std::mutex> lock(mutex_);
for (auto it = lru_.begin(); it != lru_.end(); ++it) {
auto& obj_ptr = it->second;
const auto& data_ptr = obj_ptr->data_;
if (data_ptr != nullptr) {
total_pinned_size += data_ptr->size();
++still_pinned_count;
} else {
total_valid_empty_size += data_ptr->size();
}
}
cache_count = lru_.size();
}
SERVER_LOG_DEBUG << "[Still Pinned count]: " << still_pinned_count;
SERVER_LOG_DEBUG << "[Pinned Memory total size(byte)]: " << total_pinned_size;
SERVER_LOG_DEBUG << "[valid_empty total size(byte)]: " << total_valid_empty_size;
SERVER_LOG_DEBUG << "[free memory size(byte)]: " << capacity_ - total_pinned_size - total_valid_empty_size;
SERVER_LOG_DEBUG << "[Cache item count]: " << cache_count;
SERVER_LOG_DEBUG << "[Cache usage]: " << usage_ << " bytes";
SERVER_LOG_DEBUG << "[Cache capacity]: " << capacity_ << " bytes";
}
} // cache

View File

@ -18,7 +18,6 @@ namespace milvus {
namespace cache {
const std::string SWAP_DIR = ".CACHE";
const float THRESHHOLD_PERCENT = 0.75;
class Cache {
private:
@ -45,6 +44,9 @@ public:
int64_t capacity() const { return capacity_; } //unit: BYTE
void set_capacity(int64_t capacity); //unit: BYTE
double freemem_percent() const { return freemem_percent_; };
void set_freemem_percent(double percent) { freemem_percent_ = percent; }
size_t size() const;
bool exists(const std::string& key);
DataObjPtr get(const std::string& key);
@ -57,6 +59,7 @@ public:
private:
int64_t usage_;
int64_t capacity_;
double freemem_percent_;
LRU<std::string, CacheObjPtr> lru_;
mutable std::mutex mutex_;

View File

@ -6,6 +6,7 @@
#include "CpuCacheMgr.h"
#include "server/ServerConfig.h"
#include "utils/Log.h"
namespace zilliz {
namespace milvus {
@ -16,6 +17,14 @@ CpuCacheMgr::CpuCacheMgr() {
int64_t cap = config.GetInt64Value(server::CONFIG_CPU_CACHE_CAPACITY, 16);
cap *= 1024*1024*1024;
cache_ = std::make_shared<Cache>(cap, 1UL<<32);
double free_percent = config.GetDoubleValue(server::CACHE_FREE_PERCENT, 0.85);
if(free_percent > 0.0 && free_percent <= 1.0) {
cache_->set_freemem_percent(free_percent);
} else {
SERVER_LOG_ERROR << "Invalid cache_free_percent: " << free_percent <<
", defaultly set to " << cache_->freemem_percent();
}
}
}

View File

@ -89,8 +89,11 @@ DBImpl::DBImpl(const Options& options)
meta_ptr_ = DBMetaImplFactory::Build(options.meta, options.mode);
mem_mgr_ = MemManagerFactory::Build(meta_ptr_, options_);
if (options.mode != Options::MODE::READ_ONLY) {
ENGINE_LOG_INFO << "StartTimerTasks";
StartTimerTasks();
}
}
Status DBImpl::CreateTable(meta::TableSchema& table_schema) {
@ -206,9 +209,10 @@ Status DBImpl::Query(const std::string& table_id, const std::vector<std::string>
Status DBImpl::QueryAsync(const std::string& table_id, const meta::TableFilesSchema& files,
uint64_t k, uint64_t nq, const float* vectors,
const meta::DatesT& dates, QueryResults& results) {
server::TimeRecorder rc("");
//step 1: get files to search
ENGINE_LOG_DEBUG << "Search DateT Size=" << files.size();
ENGINE_LOG_DEBUG << "Engine query begin, index file count:" << files.size() << " date range count:" << dates.size();
SearchContextPtr context = std::make_shared<SearchContext>(k, nq, vectors);
for (auto &file : files) {
TableFileSchemaPtr file_ptr = std::make_shared<meta::TableFileSchema>(file);
@ -221,8 +225,31 @@ Status DBImpl::QueryAsync(const std::string& table_id, const meta::TableFilesSch
context->WaitResult();
//step 3: construct results
//step 3: print time cost information
double load_cost = context->LoadCost();
double search_cost = context->SearchCost();
double reduce_cost = context->ReduceCost();
std::string load_info = server::TimeRecorder::GetTimeSpanStr(load_cost);
std::string search_info = server::TimeRecorder::GetTimeSpanStr(search_cost);
std::string reduce_info = server::TimeRecorder::GetTimeSpanStr(reduce_cost);
if(search_cost > 0.0 || reduce_cost > 0.0) {
double total_cost = load_cost + search_cost + reduce_cost;
double load_percent = load_cost/total_cost;
double search_percent = search_cost/total_cost;
double reduce_percent = reduce_cost/total_cost;
ENGINE_LOG_DEBUG << "Engine load index totally cost:" << load_info << " percent: " << load_percent*100 << "%";
ENGINE_LOG_DEBUG << "Engine search index totally cost:" << search_info << " percent: " << search_percent*100 << "%";
ENGINE_LOG_DEBUG << "Engine reduce topk totally cost:" << reduce_info << " percent: " << reduce_percent*100 << "%";
} else {
ENGINE_LOG_DEBUG << "Engine load cost:" << load_info
<< " search cost: " << search_info
<< " reduce cost: " << reduce_info;
}
//step 4: construct results
results = context->GetResult();
rc.ElapseFromBegin("Engine query totally cost");
return Status::OK();
}
@ -235,7 +262,6 @@ void DBImpl::BackgroundTimerTask() {
Status status;
server::SystemInfo::GetInstance().Init();
while (true) {
if (!bg_error_.ok()) break;
if (shutting_down_.load(std::memory_order_acquire)){
for(auto& iter : compact_thread_results_) {
iter.wait();
@ -357,10 +383,11 @@ Status DBImpl::MergeFiles(const std::string& table_id, const meta::DateT& date,
updated.push_back(table_file);
status = meta_ptr_->UpdateTableFiles(updated);
ENGINE_LOG_DEBUG << "New merged file " << table_file.file_id_ <<
" of size=" << index->PhysicalSize()/(1024*1024) << " M";
" of size " << index->PhysicalSize() << " bytes";
//current disable this line to avoid memory
//index->Cache();
if(options_.insert_cache_immediately_) {
index->Cache();
}
return status;
}
@ -390,15 +417,11 @@ Status DBImpl::BackgroundMergeFiles(const std::string& table_id) {
}
void DBImpl::BackgroundCompaction(std::set<std::string> table_ids) {
// static int b_count = 0;
// b_count++;
// std::cout << "BackgroundCompaction: " << b_count << std::endl;
Status status;
for (auto& table_id : table_ids) {
status = BackgroundMergeFiles(table_id);
if (!status.ok()) {
bg_error_ = status;
ENGINE_LOG_ERROR << "Merge files for table " << table_id << " failed: " << status.ToString();
return;
}
}
@ -408,7 +431,6 @@ void DBImpl::BackgroundCompaction(std::set<std::string> table_ids) {
int ttl = 1;
if (options_.mode == Options::MODE::CLUSTER) {
ttl = meta::D_SEC;
// ENGINE_LOG_DEBUG << "Server mode is cluster. Clean up files with ttl = " << std::to_string(ttl) << "seconds.";
}
meta_ptr_->CleanUpFilesWithTTL(ttl);
}
@ -460,7 +482,7 @@ Status DBImpl::BuildIndex(const meta::TableFileSchema& file) {
try {
//step 1: load index
to_index->Load();
to_index->Load(options_.insert_cache_immediately_);
//step 2: create table file
meta::TableFileSchema table_file;
@ -501,11 +523,12 @@ Status DBImpl::BuildIndex(const meta::TableFileSchema& file) {
meta_ptr_->UpdateTableFiles(update_files);
ENGINE_LOG_DEBUG << "New index file " << table_file.file_id_ << " of size "
<< index->PhysicalSize()/(1024*1024) << " M"
<< index->PhysicalSize() << " bytes"
<< " from file " << to_remove.file_id_;
//current disable this line to avoid memory
//index->Cache();
if(options_.insert_cache_immediately_) {
index->Cache();
}
} catch (std::exception& ex) {
std::string msg = "Build index encounter exception" + std::string(ex.what());
@ -541,10 +564,9 @@ void DBImpl::BackgroundBuildIndex() {
meta_ptr_->FilesToIndex(to_index_files);
Status status;
for (auto& file : to_index_files) {
/* ENGINE_LOG_DEBUG << "Buiding index for " << file.location; */
status = BuildIndex(file);
if (!status.ok()) {
bg_error_ = status;
ENGINE_LOG_ERROR << "Building index for " << file.id_ << " failed: " << status.ToString();
return;
}
@ -552,7 +574,6 @@ void DBImpl::BackgroundBuildIndex() {
break;
}
}
/* ENGINE_LOG_DEBUG << "All Buiding index Done"; */
}
Status DBImpl::DropAll() {

View File

@ -118,10 +118,8 @@ class DBImpl : public DB {
BuildIndex(const meta::TableFileSchema &);
private:
const Options options_;
Status bg_error_;
std::atomic<bool> shutting_down_;
std::thread bg_timer_thread_;

View File

@ -291,6 +291,8 @@ Status DBMetaImpl::HasNonIndexFiles(const std::string& table_id, bool& has) {
try {
auto selected = ConnectorPtr->select(columns(&TableFileSchema::id_),
where((c(&TableFileSchema::file_type_) == (int) TableFileSchema::RAW
or
c(&TableFileSchema::file_type_) == (int) TableFileSchema::NEW
or
c(&TableFileSchema::file_type_) == (int) TableFileSchema::TO_INDEX)
and c(&TableFileSchema::table_id_) == table_id
@ -863,7 +865,7 @@ Status DBMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) {
table_file.date_ = std::get<3>(file);
utils::DeleteTableFilePath(options_, table_file);
ENGINE_LOG_DEBUG << "Removing deleted id =" << table_file.id_ << " location = " << table_file.location_ << std::endl;
ENGINE_LOG_DEBUG << "Removing file id:" << table_file.id_ << " location:" << table_file.location_;
ConnectorPtr->remove<TableFileSchema>(table_file.id_);
}

View File

@ -39,7 +39,7 @@ public:
virtual Status Serialize() = 0;
virtual Status Load() = 0;
virtual Status Load(bool to_cache = true) = 0;
virtual Status Merge(const std::string& location) = 0;

View File

@ -79,18 +79,17 @@ Status FaissExecutionEngine::Serialize() {
return Status::OK();
}
Status FaissExecutionEngine::Load() {
Status FaissExecutionEngine::Load(bool to_cache) {
auto index = zilliz::milvus::cache::CpuCacheMgr::GetInstance()->GetIndex(location_);
bool to_cache = false;
bool already_in_cache = (index != nullptr);
auto start_time = METRICS_NOW_TIME;
if (!index) {
index = read_index(location_);
to_cache = true;
ENGINE_LOG_DEBUG << "Disk io from: " << location_;
}
pIndex_ = index->data();
if (to_cache) {
if (!already_in_cache && to_cache) {
Cache();
auto end_time = METRICS_NOW_TIME;
auto total_time = METRICS_MICROSECONDS(start_time, end_time);
@ -98,7 +97,6 @@ Status FaissExecutionEngine::Load() {
server::Metrics::GetInstance().FaissDiskLoadDurationSecondsHistogramObserve(total_time);
double total_size = (pIndex_->d) * (pIndex_->ntotal) * 4;
server::Metrics::GetInstance().FaissDiskLoadSizeBytesHistogramObserve(total_size);
// server::Metrics::GetInstance().FaissDiskLoadIOSpeedHistogramObserve(total_size/double(total_time));
server::Metrics::GetInstance().FaissDiskLoadIOSpeedGaugeSet(total_size/double(total_time));
@ -151,10 +149,11 @@ Status FaissExecutionEngine::Search(long n,
std::shared_ptr<faiss::IndexIVF> ivf_index = std::dynamic_pointer_cast<faiss::IndexIVF>(pIndex_);
if(ivf_index) {
ENGINE_LOG_DEBUG << "Index type: IVFFLAT nProbe: " << nprobe_;
ENGINE_LOG_DEBUG << "Searching index type: " << build_index_type_ << " nProbe: " << nprobe_;
ivf_index->nprobe = nprobe_;
ivf_index->search(n, data, k, distances, labels);
} else {
ENGINE_LOG_DEBUG << "Searching raw file";
pIndex_->search(n, data, k, distances, labels);
}

View File

@ -44,7 +44,7 @@ public:
Status Serialize() override;
Status Load() override;
Status Load(bool to_cache) override;
Status Merge(const std::string& location) override;

View File

@ -83,11 +83,12 @@ Status MemVectors::Serialize(std::string &table_id) {
auto status = meta_->UpdateTableFile(schema_);
LOG(DEBUG) << "New " << ((schema_.file_type_ == meta::TableFileSchema::RAW) ? "raw" : "to_index")
<< " file " << schema_.file_id_ << " of size " << (double) (active_engine_->Size()) / (double) meta::M
<< " M";
ENGINE_LOG_DEBUG << "New " << ((schema_.file_type_ == meta::TableFileSchema::RAW) ? "raw" : "to_index")
<< " file " << schema_.file_id_ << " of size " << active_engine_->Size() << " bytes";
active_engine_->Cache();
if(options_.insert_cache_immediately_) {
active_engine_->Cache();
}
return status;
}

View File

@ -95,10 +95,12 @@ Status MemTableFile::Serialize() {
auto status = meta_->UpdateTableFile(table_file_schema_);
LOG(DEBUG) << "New " << ((table_file_schema_.file_type_ == meta::TableFileSchema::RAW) ? "raw" : "to_index")
<< " file " << table_file_schema_.file_id_ << " of size " << (double) size / (double) M << " M";
ENGINE_LOG_DEBUG << "New " << ((table_file_schema_.file_type_ == meta::TableFileSchema::RAW) ? "raw" : "to_index")
<< " file " << table_file_schema_.file_id_ << " of size " << size << " bytes";
execution_engine_->Cache();
if(options_.insert_cache_immediately_) {
execution_engine_->Cache();
}
return status;
}

View File

@ -382,7 +382,49 @@ Status MySQLMetaImpl::CreateTable(TableSchema &table_schema) {
}
Status MySQLMetaImpl::HasNonIndexFiles(const std::string &table_id, bool &has) {
// TODO
has = false;
try {
StoreQueryResult res;
{
ScopedConnection connectionPtr(*mysql_connection_pool_, safe_grab);
if (connectionPtr == nullptr) {
return Status::Error("Failed to connect to database server");
}
Query hasNonIndexFilesQuery = connectionPtr->query();
//since table_id is a unique column we just need to check whether it exists or not
hasNonIndexFilesQuery << "SELECT EXISTS " <<
"(SELECT 1 FROM TableFiles " <<
"WHERE table_id = " << quote << table_id << " AND " <<
"(file_type = " << std::to_string(TableFileSchema::RAW) << " OR " <<
"file_type = " << std::to_string(TableFileSchema::NEW) << " OR " <<
"file_type = " << std::to_string(TableFileSchema::TO_INDEX) << ")) " <<
"AS " << quote << "check" << ";";
ENGINE_LOG_DEBUG << "MySQLMetaImpl::HasNonIndexFiles: " << hasNonIndexFilesQuery.str();
res = hasNonIndexFilesQuery.store();
} //Scoped Connection
int check = res[0]["check"];
has = (check == 1);
} catch (const BadQuery &er) {
// Handle any query errors
ENGINE_LOG_ERROR << "QUERY ERROR WHEN CHECKING IF NON INDEX FILES EXISTS" << ": " << er.what();
return Status::DBTransactionError("QUERY ERROR WHEN CHECKING IF NON INDEX FILES EXISTS", er.what());
} catch (const Exception &er) {
// Catch-all for any other MySQL++ exceptions
ENGINE_LOG_ERROR << "GENERAL ERROR WHEN CHECKING IF NON INDEX FILES EXISTS" << ": " << er.what();
return Status::DBTransactionError("GENERAL ERROR WHEN CHECKING IF NON INDEX FILES EXISTS", er.what());
}
return Status::OK();
}
@ -1378,11 +1420,17 @@ Status MySQLMetaImpl::UpdateTableFilesToIndex(const std::string &table_id) {
Query updateTableFilesToIndexQuery = connectionPtr->query();
updateTableFilesToIndexQuery << "UPDATE TableFiles " <<
"SET file_type = " << std::to_string(TableFileSchema::TO_INDEX) << " " <<
"WHERE table_id = " << quote << table_id << " AND " <<
"file_type = " << std::to_string(TableFileSchema::RAW) << ";";
"SET file_type = " << std::to_string(TableFileSchema::TO_INDEX) << " " <<
"WHERE table_id = " << quote << table_id << " AND " <<
"file_type = " << std::to_string(TableFileSchema::RAW) << ";";
ENGINE_LOG_DEBUG << "MySQLMetaImpl::UpdateTableFile: " << updateTableFilesToIndexQuery.str();
ENGINE_LOG_DEBUG << "MySQLMetaImpl::UpdateTableFilesToIndex: " << updateTableFilesToIndexQuery.str();
if (!updateTableFilesToIndexQuery.exec()) {
ENGINE_LOG_ERROR << "QUERY ERROR WHEN UPDATING TABLE FILE";
return Status::DBTransactionError("QUERY ERROR WHEN UPDATING TABLE FILE",
updateTableFilesToIndexQuery.error());
}
} catch (const BadQuery &er) {
// Handle any query errors
@ -1530,8 +1578,7 @@ Status MySQLMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) {
utils::DeleteTableFilePath(options_, table_file);
ENGINE_LOG_DEBUG << "Removing deleted id =" << table_file.id_ << " location = "
<< table_file.location_ << std::endl;
ENGINE_LOG_DEBUG << "Removing file id:" << table_file.id_ << " location:" << table_file.location_;
idsToDelete.emplace_back(std::to_string(table_file.id_));
}

View File

@ -63,7 +63,9 @@ struct Options {
size_t index_trigger_size = ONE_GB; //unit: byte
DBMetaOptions meta;
int mode = MODE::SINGLE;
size_t insert_buffer_size = 4 * ONE_GB;
bool insert_cache_immediately_ = false;
}; // Options

View File

@ -32,7 +32,7 @@ public:
IndexLoadTaskPtr loader = std::static_pointer_cast<IndexLoadTask>(task);
if(index_files.find(loader->file_->id_) != index_files.end()){
ENGINE_LOG_INFO << "Append SearchContext to exist IndexLoaderContext";
ENGINE_LOG_DEBUG << "Append SearchContext to exist IndexLoaderContext";
index_files.erase(loader->file_->id_);
loader->search_contexts_.push_back(context);
}
@ -40,7 +40,7 @@ public:
//index_files still contains some index files, create new loader
for(auto& pair : index_files) {
ENGINE_LOG_INFO << "Create new IndexLoaderContext for: " << pair.second->location_;
ENGINE_LOG_DEBUG << "Create new IndexLoaderContext for: " << pair.second->location_;
IndexLoadTaskPtr new_loader = std::make_shared<IndexLoadTask>();
new_loader->search_contexts_.push_back(context);
new_loader->file_ = pair.second;

View File

@ -31,7 +31,7 @@ SearchContext::AddIndexFile(TableFileSchemaPtr& index_file) {
return false;
}
SERVER_LOG_INFO << "SearchContext " << identity_ << " add index file: " << index_file->id_;
SERVER_LOG_DEBUG << "SearchContext " << identity_ << " add index file: " << index_file->id_;
map_index_files_[index_file->id_] = index_file;
return true;
@ -42,7 +42,7 @@ SearchContext::IndexSearchDone(size_t index_id) {
std::unique_lock <std::mutex> lock(mtx_);
map_index_files_.erase(index_id);
done_cond_.notify_all();
SERVER_LOG_INFO << "SearchContext " << identity_ << " finish index file: " << index_id;
SERVER_LOG_DEBUG << "SearchContext " << identity_ << " finish index file: " << index_id;
}
void

View File

@ -37,9 +37,19 @@ public:
const ResultSet& GetResult() const { return result_; }
ResultSet& GetResult() { return result_; }
std::string Identity() const { return identity_; }
void IndexSearchDone(size_t index_id);
void WaitResult();
void AccumLoadCost(double span) { time_cost_load_ += span; }
void AccumSearchCost(double span) { time_cost_search_ += span; }
void AccumReduceCost(double span) { time_cost_reduce_ += span; }
double LoadCost() const { return time_cost_load_; }
double SearchCost() const { return time_cost_search_; }
double ReduceCost() const { return time_cost_reduce_; }
private:
uint64_t topk_ = 0;
uint64_t nq_ = 0;
@ -52,6 +62,10 @@ private:
std::condition_variable done_cond_;
std::string identity_; //for debug
double time_cost_load_ = 0.0; //time cost for load all index files, unit: us
double time_cost_search_ = 0.0; //time cost for entire search, unit: us
double time_cost_reduce_ = 0.0; //time cost for entire reduce, unit: us
};
using SearchContextPtr = std::shared_ptr<SearchContext>;

View File

@ -41,20 +41,21 @@ IndexLoadTask::IndexLoadTask()
}
std::shared_ptr<IScheduleTask> IndexLoadTask::Execute() {
ENGINE_LOG_INFO << "Loading index(" << file_->id_ << ") from location: " << file_->location_;
server::TimeRecorder rc("Load index");
server::TimeRecorder rc("");
//step 1: load index
ExecutionEnginePtr index_ptr = EngineFactory::Build(file_->dimension_,
file_->location_,
(EngineType)file_->engine_type_);
index_ptr->Load();
rc.Record("load index file to memory");
size_t file_size = index_ptr->PhysicalSize();
LOG(DEBUG) << "Index file type " << file_->file_type_ << " Of Size: "
<< file_size/(1024*1024) << " M";
std::string info = "Load file id:" + std::to_string(file_->id_) + " file type:" + std::to_string(file_->file_type_)
+ " size:" + std::to_string(file_size) + " bytes from location: " + file_->location_ + " totally cost";
double span = rc.ElapseFromBegin(info);
for(auto& context : search_contexts_) {
context->AccumLoadCost(span);
}
CollectFileMetrics(file_->file_type_, file_size);

View File

@ -51,10 +51,10 @@ std::shared_ptr<IScheduleTask> SearchTask::Execute() {
return nullptr;
}
SERVER_LOG_INFO << "Searching in index(" << index_id_<< ") with "
SERVER_LOG_DEBUG << "Searching in file id:" << index_id_<< " with "
<< search_contexts_.size() << " tasks";
server::TimeRecorder rc("DoSearch index(" + std::to_string(index_id_) + ")");
server::TimeRecorder rc("DoSearch file id:" + std::to_string(index_id_));
auto start_time = METRICS_NOW_TIME;
@ -71,17 +71,19 @@ std::shared_ptr<IScheduleTask> SearchTask::Execute() {
index_engine_->Search(context->nq(), context->vectors(), inner_k, output_distence.data(),
output_ids.data());
rc.Record("do search");
double span = rc.RecordSection("do search for context:" + context->Identity());
context->AccumSearchCost(span);
//step 3: cluster result
SearchContext::ResultSet result_set;
auto spec_k = index_engine_->Count() < context->topk() ? index_engine_->Count() : context->topk();
SearchTask::ClusterResult(output_ids, output_distence, context->nq(), spec_k, result_set);
rc.Record("cluster result");
//step 4: pick up topk result
SearchTask::TopkResult(result_set, inner_k, metric_l2, context->GetResult());
rc.Record("reduce topk");
span = rc.RecordSection("reduce topk for context:" + context->Identity());
context->AccumReduceCost(span);
} catch (std::exception& ex) {
SERVER_LOG_ERROR << "SearchTask encounter exception: " << ex.what();
@ -97,7 +99,7 @@ std::shared_ptr<IScheduleTask> SearchTask::Execute() {
auto total_time = METRICS_MICROSECONDS(start_time, end_time);
CollectDurationMetrics(index_type_, total_time);
rc.Elapse("totally cost");
rc.ElapseFromBegin("totally cost");
return nullptr;
}

View File

@ -150,7 +150,7 @@ namespace {
std::cout << "The top 1 result is wrong: " << result_id
<< " vs. " << search_id << std::endl;
} else {
std::cout << "Check result sucessfully" << std::endl;
std::cout << "No." << index-1 << " Check result successfully" << std::endl;
}
}
BLOCK_SPLITER
@ -236,6 +236,7 @@ ClientTest::Test(const std::string& address, const std::string& port) {
std::vector<std::pair<int64_t, RowRecord>> search_record_array;
{//add vectors
for (int i = 0; i < ADD_VECTOR_LOOP; i++) {//add vectors
TimeRecorder recorder("Add vector No." + std::to_string(i));
std::vector<RowRecord> record_array;
int64_t begin_index = i * BATCH_ROW_COUNT;
BuildVectors(begin_index, begin_index + BATCH_ROW_COUNT, record_array);
@ -257,6 +258,7 @@ ClientTest::Test(const std::string& address, const std::string& port) {
}
{//wait unit build index finish
TimeRecorder recorder("Build index");
std::cout << "Wait until build all index done" << std::endl;
Status stat = conn->BuildIndex(TABLE_NAME);
std::cout << "BuildIndex function call status: " << stat.ToString() << std::endl;

View File

@ -210,17 +210,25 @@ ClientProxy::SearchVector(const std::string &table_name,
}
//step 3: search vectors
std::vector<thrift::TopKQueryResult> result_array;
ClientPtr()->interface()->SearchVector(result_array, table_name, thrift_records, thrift_ranges, topk);
std::vector<thrift::TopKQueryBinResult> result_array;
ClientPtr()->interface()->SearchVector2(result_array, table_name, thrift_records, thrift_ranges, topk);
//step 4: convert result array
for(auto& thrift_topk_result : result_array) {
TopKQueryResult result;
for(auto& thrift_query_result : thrift_topk_result.query_result_arrays) {
size_t id_count = thrift_topk_result.id_array.size()/sizeof(int64_t);
size_t dist_count = thrift_topk_result.distance_array.size()/ sizeof(double);
if(id_count != dist_count) {
return Status(StatusCode::UnknownError, "illegal result");
}
int64_t* id_ptr = (int64_t*)thrift_topk_result.id_array.data();
double* dist_ptr = (double*)thrift_topk_result.distance_array.data();
for(size_t i = 0; i < id_count; i++) {
QueryResult query_result;
query_result.id = thrift_query_result.id;
query_result.distance = thrift_query_result.distance;
query_result.id = id_ptr[i];
query_result.distance = dist_ptr[i];
result.query_result_arrays.emplace_back(query_result);
}

View File

@ -16,19 +16,19 @@ namespace server {
DBWrapper::DBWrapper() {
zilliz::milvus::engine::Options opt;
ConfigNode& config = ServerConfig::GetInstance().GetConfig(CONFIG_DB);
opt.meta.backend_uri = config.GetValue(CONFIG_DB_URL);
std::string db_path = config.GetValue(CONFIG_DB_PATH);
ConfigNode& db_config = ServerConfig::GetInstance().GetConfig(CONFIG_DB);
opt.meta.backend_uri = db_config.GetValue(CONFIG_DB_URL);
std::string db_path = db_config.GetValue(CONFIG_DB_PATH);
opt.meta.path = db_path + "/db";
std::string db_slave_path = config.GetValue(CONFIG_DB_SLAVE_PATH);
std::string db_slave_path = db_config.GetValue(CONFIG_DB_SLAVE_PATH);
StringHelpFunctions::SplitStringByDelimeter(db_slave_path, ";", opt.meta.slave_paths);
int64_t index_size = config.GetInt64Value(CONFIG_DB_INDEX_TRIGGER_SIZE);
int64_t index_size = db_config.GetInt64Value(CONFIG_DB_INDEX_TRIGGER_SIZE);
if(index_size > 0) {//ensure larger than zero, unit is MB
opt.index_trigger_size = (size_t)index_size * engine::ONE_MB;
}
int64_t insert_buffer_size = config.GetInt64Value(CONFIG_DB_INSERT_BUFFER_SIZE, 4);
int64_t insert_buffer_size = db_config.GetInt64Value(CONFIG_DB_INSERT_BUFFER_SIZE, 4);
if (insert_buffer_size >= 1) {
opt.insert_buffer_size = insert_buffer_size * engine::ONE_GB;
}
@ -37,6 +37,9 @@ DBWrapper::DBWrapper() {
kill(0, SIGUSR1);
}
ConfigNode& cache_config = ServerConfig::GetInstance().GetConfig(CONFIG_CACHE);
opt.insert_cache_immediately_ = cache_config.GetBoolValue(CONFIG_INSERT_CACHE_IMMEDIATELY, false);
ConfigNode& serverConfig = ServerConfig::GetInstance().GetConfig(CONFIG_SERVER);
std::string mode = serverConfig.GetValue(CONFIG_CLUSTER_MODE, "single");
if (mode == "single") {
@ -55,8 +58,8 @@ DBWrapper::DBWrapper() {
//set archive config
engine::ArchiveConf::CriteriaT criterial;
int64_t disk = config.GetInt64Value(CONFIG_DB_ARCHIVE_DISK, 0);
int64_t days = config.GetInt64Value(CONFIG_DB_ARCHIVE_DAYS, 0);
int64_t disk = db_config.GetInt64Value(CONFIG_DB_ARCHIVE_DISK, 0);
int64_t days = db_config.GetInt64Value(CONFIG_DB_ARCHIVE_DAYS, 0);
if(disk > 0) {
criterial[engine::ARCHIVE_CONF_DISK] = disk;
}

View File

@ -76,7 +76,7 @@ MilvusServer::StartService() {
return;
}
stdcxx::shared_ptr<ThreadManager> threadManager(ThreadManager::newSimpleThreadManager());
stdcxx::shared_ptr<ThreadManager> threadManager(ThreadManager::newSimpleThreadManager(16));
stdcxx::shared_ptr<PosixThreadFactory> threadFactory(new PosixThreadFactory());
threadManager->threadFactory(threadFactory);
threadManager->start();

View File

@ -60,11 +60,22 @@ RequestHandler::SearchVector(std::vector<thrift::TopKQueryResult> &_return,
const std::vector<thrift::Range> &query_range_array,
const int64_t topk) {
// SERVER_LOG_DEBUG << "Entering RequestHandler::SearchVector";
BaseTaskPtr task_ptr = SearchVectorTask::Create(table_name, std::vector<std::string>(), query_record_array,
BaseTaskPtr task_ptr = SearchVectorTask1::Create(table_name, std::vector<std::string>(), query_record_array,
query_range_array, topk, _return);
RequestScheduler::ExecTask(task_ptr);
}
void
RequestHandler::SearchVector2(std::vector<thrift::TopKQueryBinResult> & _return,
const std::string& table_name,
const std::vector<thrift::RowRecord> & query_record_array,
const std::vector<thrift::Range> & query_range_array,
const int64_t topk) {
BaseTaskPtr task_ptr = SearchVectorTask2::Create(table_name, std::vector<std::string>(), query_record_array,
query_range_array, topk, _return);
RequestScheduler::ExecTask(task_ptr);
}
void
RequestHandler::SearchVectorInFiles(std::vector<::milvus::thrift::TopKQueryResult> &_return,
const std::string& table_name,
@ -73,7 +84,7 @@ RequestHandler::SearchVectorInFiles(std::vector<::milvus::thrift::TopKQueryResul
const std::vector<::milvus::thrift::Range> &query_range_array,
const int64_t topk) {
// SERVER_LOG_DEBUG << "Entering RequestHandler::SearchVectorInFiles. file_id_array size = " << std::to_string(file_id_array.size());
BaseTaskPtr task_ptr = SearchVectorTask::Create(table_name, file_id_array, query_record_array,
BaseTaskPtr task_ptr = SearchVectorTask1::Create(table_name, file_id_array, query_record_array,
query_range_array, topk, _return);
RequestScheduler::ExecTask(task_ptr);
}

View File

@ -106,6 +106,29 @@ public:
const std::vector<::milvus::thrift::Range> & query_range_array,
const int64_t topk);
/**
* @brief Query vector
*
* This method is used to query vector in table.
*
* @param table_name, table_name is queried.
* @param query_record_array, all vector are going to be queried.
* @param query_range_array, optional ranges for conditional search. If not specified, search whole table
* @param topk, how many similarity vectors will be searched.
*
* @return query binary result array.
*
* @param table_name
* @param query_record_array
* @param query_range_array
* @param topk
*/
void SearchVector2(std::vector<::milvus::thrift::TopKQueryBinResult> & _return,
const std::string& table_name,
const std::vector<::milvus::thrift::RowRecord> & query_record_array,
const std::vector<::milvus::thrift::Range> & query_range_array,
const int64_t topk);
/**
* @brief Internal use query interface
*

View File

@ -12,6 +12,10 @@
#include "DBWrapper.h"
#include "version.h"
#ifdef MILVUS_ENABLE_PROFILING
#include "gperftools/profiler.h"
#endif
namespace zilliz {
namespace milvus {
namespace server {
@ -127,6 +131,18 @@ namespace {
}
}
}
std::string
GetCurrTimeStr() {
char tm_buf[20] = {0};
time_t tt;
time(&tt);
tt = tt + 8 * 60 * 60;
tm* t = gmtime(&tt);
sprintf(tm_buf, "%4d%02d%02d_%02d%02d%02d", (t->tm_year+1900), (t->tm_mon+1), (t->tm_mday),
(t->tm_hour), (t->tm_min), (t->tm_sec));
return tm_buf;
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@ -178,7 +194,7 @@ ServerError CreateTableTask::OnExecute() {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());
}
rc.Record("done");
rc.ElapseFromBegin("totally cost");
return SERVER_SUCCESS;
}
@ -223,7 +239,7 @@ ServerError DescribeTableTask::OnExecute() {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());
}
rc.Record("done");
rc.ElapseFromBegin("totally cost");
return SERVER_SUCCESS;
}
@ -261,7 +277,7 @@ ServerError BuildIndexTask::OnExecute() {
return SetError(SERVER_BUILD_INDEX_ERROR, "Engine failed: " + stat.ToString());
}
rc.Elapse("totally cost");
rc.ElapseFromBegin("totally cost");
} catch (std::exception& ex) {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());
}
@ -298,7 +314,7 @@ ServerError HasTableTask::OnExecute() {
return SetError(DB_META_TRANSACTION_FAILED, "Engine failed: " + stat.ToString());
}
rc.Elapse("totally cost");
rc.ElapseFromBegin("totally cost");
} catch (std::exception& ex) {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());
}
@ -340,8 +356,6 @@ ServerError DeleteTableTask::OnExecute() {
}
}
rc.Record("check validation");
//step 3: delete table
std::vector<DB_DATE> dates;
stat = DBWrapper::DB()->DeleteTable(table_name_, dates);
@ -349,8 +363,7 @@ ServerError DeleteTableTask::OnExecute() {
return SetError(DB_META_TRANSACTION_FAILED, "Engine failed: " + stat.ToString());
}
rc.Record("deleta table");
rc.Elapse("total cost");
rc.ElapseFromBegin("totally cost");
} catch (std::exception& ex) {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());
}
@ -428,7 +441,13 @@ ServerError AddVectorTask::OnExecute() {
}
}
rc.Record("check validation");
rc.RecordSection("check validation");
#ifdef MILVUS_ENABLE_PROFILING
std::string fname = "/tmp/insert_" + std::to_string(this->record_array_.size()) +
"_" + GetCurrTimeStr() + ".profiling";
ProfilerStart(fname.c_str());
#endif
//step 3: prepare float data
std::vector<float> vec_f;
@ -439,12 +458,11 @@ ServerError AddVectorTask::OnExecute() {
return SetError(error_code, error_msg);
}
rc.Record("prepare vectors data");
rc.RecordSection("prepare vectors data");
//step 4: insert vectors
uint64_t vec_count = (uint64_t)record_array_.size();
stat = DBWrapper::DB()->InsertVectors(table_name_, vec_count, vec_f.data(), record_ids_);
rc.Record("add vectors to engine");
if(!stat.ok()) {
return SetError(SERVER_CACHE_ERROR, "Cache error: " + stat.ToString());
}
@ -455,8 +473,12 @@ ServerError AddVectorTask::OnExecute() {
return SetError(SERVER_ILLEGAL_VECTOR_ID, msg);
}
rc.Record("do insert");
rc.Elapse("total cost");
#ifdef MILVUS_ENABLE_PROFILING
ProfilerStop();
#endif
rc.RecordSection("add vectors to engine");
rc.ElapseFromBegin("totally cost");
} catch (std::exception& ex) {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());
@ -466,33 +488,21 @@ ServerError AddVectorTask::OnExecute() {
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
SearchVectorTask::SearchVectorTask(const std::string &table_name,
const std::vector<std::string>& file_id_array,
const std::vector<thrift::RowRecord> &query_record_array,
const std::vector<thrift::Range> &query_range_array,
const int64_t top_k,
std::vector<thrift::TopKQueryResult> &result_array)
SearchVectorTaskBase::SearchVectorTaskBase(const std::string &table_name,
const std::vector<std::string>& file_id_array,
const std::vector<thrift::RowRecord> &query_record_array,
const std::vector<thrift::Range> &query_range_array,
const int64_t top_k)
: BaseTask(DQL_TASK_GROUP),
table_name_(table_name),
file_id_array_(file_id_array),
record_array_(query_record_array),
range_array_(query_range_array),
top_k_(top_k),
result_array_(result_array) {
top_k_(top_k) {
}
BaseTaskPtr SearchVectorTask::Create(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<thrift::RowRecord> & query_record_array,
const std::vector<thrift::Range> & query_range_array,
const int64_t top_k,
std::vector<thrift::TopKQueryResult>& result_array) {
return std::shared_ptr<BaseTask>(new SearchVectorTask(table_name, file_id_array,
query_record_array, query_range_array, top_k, result_array));
}
ServerError SearchVectorTask::OnExecute() {
ServerError SearchVectorTaskBase::OnExecute() {
try {
TimeRecorder rc("SearchVectorTask");
@ -503,7 +513,7 @@ ServerError SearchVectorTask::OnExecute() {
return SetError(res, "Invalid table name: " + table_name_);
}
if(top_k_ <= 0) {
if(top_k_ <= 0 || top_k_ > 1024) {
return SetError(SERVER_INVALID_TOPK, "Invalid topk: " + std::to_string(top_k_));
}
if(record_array_.empty()) {
@ -531,7 +541,14 @@ ServerError SearchVectorTask::OnExecute() {
return SetError(error_code, error_msg);
}
rc.Record("check validation");
double span_check = rc.RecordSection("check validation");
#ifdef MILVUS_ENABLE_PROFILING
std::string fname = "/tmp/search_nq_" + std::to_string(this->record_array_.size()) +
"_top_" + std::to_string(this->top_k_) + "_" +
GetCurrTimeStr() + ".profiling";
ProfilerStart(fname.c_str());
#endif
//step 3: prepare float data
std::vector<float> vec_f;
@ -540,7 +557,7 @@ ServerError SearchVectorTask::OnExecute() {
return SetError(error_code, error_msg);
}
rc.Record("prepare vector data");
double span_prepare = rc.RecordSection("prepare vector data");
//step 4: search vectors
engine::QueryResults results;
@ -552,7 +569,7 @@ ServerError SearchVectorTask::OnExecute() {
stat = DBWrapper::DB()->Query(table_name_, file_id_array_, (size_t) top_k_, record_count, vec_f.data(), dates, results);
}
rc.Record("search vectors from engine");
double span_search = rc.RecordSection("search vectors from engine");
if(!stat.ok()) {
return SetError(DB_META_TRANSACTION_FAILED, "Engine failed: " + stat.ToString());
}
@ -567,26 +584,22 @@ ServerError SearchVectorTask::OnExecute() {
return SetError(SERVER_ILLEGAL_SEARCH_RESULT, msg);
}
rc.Record("do search");
//step 5: construct result array
for(uint64_t i = 0; i < record_count; i++) {
auto& result = results[i];
const auto& record = record_array_[i];
ConstructResult(results);
thrift::TopKQueryResult thrift_topk_result;
for(auto& pair : result) {
thrift::QueryResult thrift_result;
thrift_result.__set_id(pair.first);
thrift_result.__set_distance(pair.second);
#ifdef MILVUS_ENABLE_PROFILING
ProfilerStop();
#endif
thrift_topk_result.query_result_arrays.emplace_back(thrift_result);
}
double span_result = rc.RecordSection("construct result");
rc.ElapseFromBegin("totally cost");
result_array_.emplace_back(thrift_topk_result);
}
rc.Record("construct result");
rc.Elapse("total cost");
//step 6: print time cost percent
double total_cost = span_check + span_prepare + span_search + span_result;
SERVER_LOG_DEBUG << "SearchVectorTask: " << "check validation(" << (span_check/total_cost)*100.0 << "%)"
<< " prepare data(" << (span_prepare/total_cost)*100.0 << "%)"
<< " search(" << (span_search/total_cost)*100.0 << "%)"
<< " construct result(" << (span_result/total_cost)*100.0 << "%)";
} catch (std::exception& ex) {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());
@ -595,6 +608,100 @@ ServerError SearchVectorTask::OnExecute() {
return SERVER_SUCCESS;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
SearchVectorTask1::SearchVectorTask1(const std::string &table_name,
const std::vector<std::string>& file_id_array,
const std::vector<thrift::RowRecord> &query_record_array,
const std::vector<thrift::Range> &query_range_array,
const int64_t top_k,
std::vector<thrift::TopKQueryResult> &result_array)
: SearchVectorTaskBase(table_name, file_id_array, query_record_array, query_range_array, top_k),
result_array_(result_array) {
}
BaseTaskPtr SearchVectorTask1::Create(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<thrift::RowRecord> & query_record_array,
const std::vector<thrift::Range> & query_range_array,
const int64_t top_k,
std::vector<thrift::TopKQueryResult>& result_array) {
return std::shared_ptr<BaseTask>(new SearchVectorTask1(table_name, file_id_array,
query_record_array, query_range_array, top_k, result_array));
}
ServerError SearchVectorTask1::ConstructResult(engine::QueryResults& results) {
for(uint64_t i = 0; i < results.size(); i++) {
auto& result = results[i];
const auto& record = record_array_[i];
thrift::TopKQueryResult thrift_topk_result;
for(auto& pair : result) {
thrift::QueryResult thrift_result;
thrift_result.__set_id(pair.first);
thrift_result.__set_distance(pair.second);
thrift_topk_result.query_result_arrays.emplace_back(thrift_result);
}
result_array_.emplace_back(thrift_topk_result);
}
return SERVER_SUCCESS;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
SearchVectorTask2::SearchVectorTask2(const std::string &table_name,
const std::vector<std::string>& file_id_array,
const std::vector<thrift::RowRecord> &query_record_array,
const std::vector<thrift::Range> &query_range_array,
const int64_t top_k,
std::vector<thrift::TopKQueryBinResult> &result_array)
: SearchVectorTaskBase(table_name, file_id_array, query_record_array, query_range_array, top_k),
result_array_(result_array) {
}
BaseTaskPtr SearchVectorTask2::Create(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<thrift::RowRecord> & query_record_array,
const std::vector<thrift::Range> & query_range_array,
const int64_t top_k,
std::vector<thrift::TopKQueryBinResult>& result_array) {
return std::shared_ptr<BaseTask>(new SearchVectorTask2(table_name, file_id_array,
query_record_array, query_range_array, top_k, result_array));
}
ServerError SearchVectorTask2::ConstructResult(engine::QueryResults& results) {
for(size_t i = 0; i < results.size(); i++) {
auto& result = results[i];
thrift::TopKQueryBinResult thrift_topk_result;
if(result.empty()) {
result_array_.emplace_back(thrift_topk_result);
continue;
}
std::string str_ids, str_distances;
str_ids.resize(sizeof(engine::IDNumber)*result.size());
str_distances.resize(sizeof(double)*result.size());
engine::IDNumber* ids_ptr = (engine::IDNumber*)str_ids.data();
double* distance_ptr = (double*)str_distances.data();
for(size_t k = 0; k < result.size(); k++) {
auto& pair = result[k];
ids_ptr[k] = pair.first;
distance_ptr[k] = pair.second;
}
thrift_topk_result.__set_id_array(str_ids);
thrift_topk_result.__set_distance_array(str_distances);
result_array_.emplace_back(thrift_topk_result);
}
return SERVER_SUCCESS;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
GetTableRowCountTask::GetTableRowCountTask(const std::string& table_name, int64_t& row_count)
: BaseTask(DDL_DML_TASK_GROUP),
@ -627,7 +734,7 @@ ServerError GetTableRowCountTask::OnExecute() {
row_count_ = (int64_t) row_count;
rc.Elapse("total cost");
rc.ElapseFromBegin("totally cost");
} catch (std::exception& ex) {
return SetError(SERVER_UNEXPECTED_ERROR, ex.what());

View File

@ -129,7 +129,28 @@ private:
};
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class SearchVectorTask : public BaseTask {
class SearchVectorTaskBase : public BaseTask {
protected:
SearchVectorTaskBase(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<::milvus::thrift::RowRecord> & query_record_array,
const std::vector<::milvus::thrift::Range> & query_range_array,
const int64_t top_k);
ServerError OnExecute() override;
virtual ServerError ConstructResult(engine::QueryResults& results) = 0;
protected:
std::string table_name_;
std::vector<std::string> file_id_array_;
int64_t top_k_;
const std::vector<::milvus::thrift::RowRecord>& record_array_;
const std::vector<::milvus::thrift::Range>& range_array_;
};
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class SearchVectorTask1 : public SearchVectorTaskBase {
public:
static BaseTaskPtr Create(const std::string& table_name,
const std::vector<std::string>& file_id_array,
@ -139,24 +160,43 @@ public:
std::vector<::milvus::thrift::TopKQueryResult>& result_array);
protected:
SearchVectorTask(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<::milvus::thrift::RowRecord> & query_record_array,
const std::vector<::milvus::thrift::Range> & query_range_array,
const int64_t top_k,
SearchVectorTask1(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<::milvus::thrift::RowRecord> & query_record_array,
const std::vector<::milvus::thrift::Range> & query_range_array,
const int64_t top_k,
std::vector<::milvus::thrift::TopKQueryResult>& result_array);
ServerError OnExecute() override;
ServerError ConstructResult(engine::QueryResults& results) override;
private:
std::string table_name_;
std::vector<std::string> file_id_array_;
int64_t top_k_;
const std::vector<::milvus::thrift::RowRecord>& record_array_;
const std::vector<::milvus::thrift::Range>& range_array_;
std::vector<::milvus::thrift::TopKQueryResult>& result_array_;
};
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class SearchVectorTask2 : public SearchVectorTaskBase {
public:
static BaseTaskPtr Create(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<::milvus::thrift::RowRecord> & query_record_array,
const std::vector<::milvus::thrift::Range> & query_range_array,
const int64_t top_k,
std::vector<::milvus::thrift::TopKQueryBinResult>& result_array);
protected:
SearchVectorTask2(const std::string& table_name,
const std::vector<std::string>& file_id_array,
const std::vector<::milvus::thrift::RowRecord> & query_record_array,
const std::vector<::milvus::thrift::Range> & query_range_array,
const int64_t top_k,
std::vector<::milvus::thrift::TopKQueryBinResult>& result_array);
ServerError ConstructResult(engine::QueryResults& results) override;
private:
std::vector<::milvus::thrift::TopKQueryBinResult>& result_array_;
};
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
class GetTableRowCountTask : public BaseTask {
public:

View File

@ -34,6 +34,8 @@ static const std::string CONFIG_LOG = "log_config";
static const std::string CONFIG_CACHE = "cache_config";
static const std::string CONFIG_CPU_CACHE_CAPACITY = "cpu_cache_capacity";
static const std::string CONFIG_GPU_CACHE_CAPACITY = "gpu_cache_capacity";
static const std::string CACHE_FREE_PERCENT = "cache_free_percent";
static const std::string CONFIG_INSERT_CACHE_IMMEDIATELY = "insert_cache_immediately";
static const std::string CONFIG_LICENSE = "license_config";
static const std::string CONFIG_LICENSE_PATH = "license_path";

File diff suppressed because it is too large Load Diff

View File

@ -104,6 +104,25 @@ class MilvusServiceIf {
*/
virtual void SearchVector(std::vector<TopKQueryResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk) = 0;
/**
* @brief Query vector
*
* This method is used to query vector in table.
*
* @param table_name, table_name is queried.
* @param query_record_array, all vector are going to be queried.
* @param query_range_array, optional ranges for conditional search. If not specified, search whole table
* @param topk, how many similarity vectors will be searched.
*
* @return query binary result array.
*
* @param table_name
* @param query_record_array
* @param query_range_array
* @param topk
*/
virtual void SearchVector2(std::vector<TopKQueryBinResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk) = 0;
/**
* @brief Internal use query interface
*
@ -218,6 +237,9 @@ class MilvusServiceNull : virtual public MilvusServiceIf {
void SearchVector(std::vector<TopKQueryResult> & /* _return */, const std::string& /* table_name */, const std::vector<RowRecord> & /* query_record_array */, const std::vector<Range> & /* query_range_array */, const int64_t /* topk */) {
return;
}
void SearchVector2(std::vector<TopKQueryBinResult> & /* _return */, const std::string& /* table_name */, const std::vector<RowRecord> & /* query_record_array */, const std::vector<Range> & /* query_range_array */, const int64_t /* topk */) {
return;
}
void SearchVectorInFiles(std::vector<TopKQueryResult> & /* _return */, const std::string& /* table_name */, const std::vector<std::string> & /* file_id_array */, const std::vector<RowRecord> & /* query_record_array */, const std::vector<Range> & /* query_range_array */, const int64_t /* topk */) {
return;
}
@ -912,6 +934,139 @@ class MilvusService_SearchVector_presult {
};
typedef struct _MilvusService_SearchVector2_args__isset {
_MilvusService_SearchVector2_args__isset() : table_name(false), query_record_array(false), query_range_array(false), topk(false) {}
bool table_name :1;
bool query_record_array :1;
bool query_range_array :1;
bool topk :1;
} _MilvusService_SearchVector2_args__isset;
class MilvusService_SearchVector2_args {
public:
MilvusService_SearchVector2_args(const MilvusService_SearchVector2_args&);
MilvusService_SearchVector2_args& operator=(const MilvusService_SearchVector2_args&);
MilvusService_SearchVector2_args() : table_name(), topk(0) {
}
virtual ~MilvusService_SearchVector2_args() throw();
std::string table_name;
std::vector<RowRecord> query_record_array;
std::vector<Range> query_range_array;
int64_t topk;
_MilvusService_SearchVector2_args__isset __isset;
void __set_table_name(const std::string& val);
void __set_query_record_array(const std::vector<RowRecord> & val);
void __set_query_range_array(const std::vector<Range> & val);
void __set_topk(const int64_t val);
bool operator == (const MilvusService_SearchVector2_args & rhs) const
{
if (!(table_name == rhs.table_name))
return false;
if (!(query_record_array == rhs.query_record_array))
return false;
if (!(query_range_array == rhs.query_range_array))
return false;
if (!(topk == rhs.topk))
return false;
return true;
}
bool operator != (const MilvusService_SearchVector2_args &rhs) const {
return !(*this == rhs);
}
bool operator < (const MilvusService_SearchVector2_args & ) const;
uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
};
class MilvusService_SearchVector2_pargs {
public:
virtual ~MilvusService_SearchVector2_pargs() throw();
const std::string* table_name;
const std::vector<RowRecord> * query_record_array;
const std::vector<Range> * query_range_array;
const int64_t* topk;
uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
};
typedef struct _MilvusService_SearchVector2_result__isset {
_MilvusService_SearchVector2_result__isset() : success(false), e(false) {}
bool success :1;
bool e :1;
} _MilvusService_SearchVector2_result__isset;
class MilvusService_SearchVector2_result {
public:
MilvusService_SearchVector2_result(const MilvusService_SearchVector2_result&);
MilvusService_SearchVector2_result& operator=(const MilvusService_SearchVector2_result&);
MilvusService_SearchVector2_result() {
}
virtual ~MilvusService_SearchVector2_result() throw();
std::vector<TopKQueryBinResult> success;
Exception e;
_MilvusService_SearchVector2_result__isset __isset;
void __set_success(const std::vector<TopKQueryBinResult> & val);
void __set_e(const Exception& val);
bool operator == (const MilvusService_SearchVector2_result & rhs) const
{
if (!(success == rhs.success))
return false;
if (!(e == rhs.e))
return false;
return true;
}
bool operator != (const MilvusService_SearchVector2_result &rhs) const {
return !(*this == rhs);
}
bool operator < (const MilvusService_SearchVector2_result & ) const;
uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
};
typedef struct _MilvusService_SearchVector2_presult__isset {
_MilvusService_SearchVector2_presult__isset() : success(false), e(false) {}
bool success :1;
bool e :1;
} _MilvusService_SearchVector2_presult__isset;
class MilvusService_SearchVector2_presult {
public:
virtual ~MilvusService_SearchVector2_presult() throw();
std::vector<TopKQueryBinResult> * success;
Exception e;
_MilvusService_SearchVector2_presult__isset __isset;
uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
};
typedef struct _MilvusService_SearchVectorInFiles_args__isset {
_MilvusService_SearchVectorInFiles_args__isset() : table_name(false), file_id_array(false), query_record_array(false), query_range_array(false), topk(false) {}
bool table_name :1;
@ -1531,6 +1686,9 @@ class MilvusServiceClient : virtual public MilvusServiceIf {
void SearchVector(std::vector<TopKQueryResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void send_SearchVector(const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void recv_SearchVector(std::vector<TopKQueryResult> & _return);
void SearchVector2(std::vector<TopKQueryBinResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void send_SearchVector2(const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void recv_SearchVector2(std::vector<TopKQueryBinResult> & _return);
void SearchVectorInFiles(std::vector<TopKQueryResult> & _return, const std::string& table_name, const std::vector<std::string> & file_id_array, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void send_SearchVectorInFiles(const std::string& table_name, const std::vector<std::string> & file_id_array, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void recv_SearchVectorInFiles(std::vector<TopKQueryResult> & _return);
@ -1567,6 +1725,7 @@ class MilvusServiceProcessor : public ::apache::thrift::TDispatchProcessor {
void process_BuildIndex(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
void process_AddVector(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
void process_SearchVector(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
void process_SearchVector2(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
void process_SearchVectorInFiles(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
void process_DescribeTable(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
void process_GetTableRowCount(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
@ -1581,6 +1740,7 @@ class MilvusServiceProcessor : public ::apache::thrift::TDispatchProcessor {
processMap_["BuildIndex"] = &MilvusServiceProcessor::process_BuildIndex;
processMap_["AddVector"] = &MilvusServiceProcessor::process_AddVector;
processMap_["SearchVector"] = &MilvusServiceProcessor::process_SearchVector;
processMap_["SearchVector2"] = &MilvusServiceProcessor::process_SearchVector2;
processMap_["SearchVectorInFiles"] = &MilvusServiceProcessor::process_SearchVectorInFiles;
processMap_["DescribeTable"] = &MilvusServiceProcessor::process_DescribeTable;
processMap_["GetTableRowCount"] = &MilvusServiceProcessor::process_GetTableRowCount;
@ -1670,6 +1830,16 @@ class MilvusServiceMultiface : virtual public MilvusServiceIf {
return;
}
void SearchVector2(std::vector<TopKQueryBinResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk) {
size_t sz = ifaces_.size();
size_t i = 0;
for (; i < (sz - 1); ++i) {
ifaces_[i]->SearchVector2(_return, table_name, query_record_array, query_range_array, topk);
}
ifaces_[i]->SearchVector2(_return, table_name, query_record_array, query_range_array, topk);
return;
}
void SearchVectorInFiles(std::vector<TopKQueryResult> & _return, const std::string& table_name, const std::vector<std::string> & file_id_array, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk) {
size_t sz = ifaces_.size();
size_t i = 0;
@ -1767,6 +1937,9 @@ class MilvusServiceConcurrentClient : virtual public MilvusServiceIf {
void SearchVector(std::vector<TopKQueryResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
int32_t send_SearchVector(const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void recv_SearchVector(std::vector<TopKQueryResult> & _return, const int32_t seqid);
void SearchVector2(std::vector<TopKQueryBinResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
int32_t send_SearchVector2(const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void recv_SearchVector2(std::vector<TopKQueryBinResult> & _return, const int32_t seqid);
void SearchVectorInFiles(std::vector<TopKQueryResult> & _return, const std::string& table_name, const std::vector<std::string> & file_id_array, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
int32_t send_SearchVectorInFiles(const std::string& table_name, const std::vector<std::string> & file_id_array, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk);
void recv_SearchVectorInFiles(std::vector<TopKQueryResult> & _return, const int32_t seqid);

View File

@ -120,6 +120,28 @@ class MilvusServiceHandler : virtual public MilvusServiceIf {
printf("SearchVector\n");
}
/**
* @brief Query vector
*
* This method is used to query vector in table.
*
* @param table_name, table_name is queried.
* @param query_record_array, all vector are going to be queried.
* @param query_range_array, optional ranges for conditional search. If not specified, search whole table
* @param topk, how many similarity vectors will be searched.
*
* @return query binary result array.
*
* @param table_name
* @param query_record_array
* @param query_range_array
* @param topk
*/
void SearchVector2(std::vector<TopKQueryBinResult> & _return, const std::string& table_name, const std::vector<RowRecord> & query_record_array, const std::vector<Range> & query_range_array, const int64_t topk) {
// Your implementation goes here
printf("SearchVector2\n");
}
/**
* @brief Internal use query interface
*

View File

@ -781,4 +781,119 @@ void TopKQueryResult::printTo(std::ostream& out) const {
out << ")";
}
TopKQueryBinResult::~TopKQueryBinResult() throw() {
}
void TopKQueryBinResult::__set_id_array(const std::string& val) {
this->id_array = val;
}
void TopKQueryBinResult::__set_distance_array(const std::string& val) {
this->distance_array = val;
}
std::ostream& operator<<(std::ostream& out, const TopKQueryBinResult& obj)
{
obj.printTo(out);
return out;
}
uint32_t TopKQueryBinResult::read(::apache::thrift::protocol::TProtocol* iprot) {
::apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
uint32_t xfer = 0;
std::string fname;
::apache::thrift::protocol::TType ftype;
int16_t fid;
xfer += iprot->readStructBegin(fname);
using ::apache::thrift::protocol::TProtocolException;
bool isset_id_array = false;
bool isset_distance_array = false;
while (true)
{
xfer += iprot->readFieldBegin(fname, ftype, fid);
if (ftype == ::apache::thrift::protocol::T_STOP) {
break;
}
switch (fid)
{
case 1:
if (ftype == ::apache::thrift::protocol::T_STRING) {
xfer += iprot->readBinary(this->id_array);
isset_id_array = true;
} else {
xfer += iprot->skip(ftype);
}
break;
case 2:
if (ftype == ::apache::thrift::protocol::T_STRING) {
xfer += iprot->readBinary(this->distance_array);
isset_distance_array = true;
} else {
xfer += iprot->skip(ftype);
}
break;
default:
xfer += iprot->skip(ftype);
break;
}
xfer += iprot->readFieldEnd();
}
xfer += iprot->readStructEnd();
if (!isset_id_array)
throw TProtocolException(TProtocolException::INVALID_DATA);
if (!isset_distance_array)
throw TProtocolException(TProtocolException::INVALID_DATA);
return xfer;
}
uint32_t TopKQueryBinResult::write(::apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
::apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot);
xfer += oprot->writeStructBegin("TopKQueryBinResult");
xfer += oprot->writeFieldBegin("id_array", ::apache::thrift::protocol::T_STRING, 1);
xfer += oprot->writeBinary(this->id_array);
xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldBegin("distance_array", ::apache::thrift::protocol::T_STRING, 2);
xfer += oprot->writeBinary(this->distance_array);
xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
}
void swap(TopKQueryBinResult &a, TopKQueryBinResult &b) {
using ::std::swap;
swap(a.id_array, b.id_array);
swap(a.distance_array, b.distance_array);
}
TopKQueryBinResult::TopKQueryBinResult(const TopKQueryBinResult& other19) {
id_array = other19.id_array;
distance_array = other19.distance_array;
}
TopKQueryBinResult& TopKQueryBinResult::operator=(const TopKQueryBinResult& other20) {
id_array = other20.id_array;
distance_array = other20.distance_array;
return *this;
}
void TopKQueryBinResult::printTo(std::ostream& out) const {
using ::apache::thrift::to_string;
out << "TopKQueryBinResult(";
out << "id_array=" << to_string(id_array);
out << ", " << "distance_array=" << to_string(distance_array);
out << ")";
}
}} // namespace

View File

@ -63,6 +63,8 @@ class QueryResult;
class TopKQueryResult;
class TopKQueryBinResult;
typedef struct _Exception__isset {
_Exception__isset() : code(false), reason(false) {}
bool code :1;
@ -346,6 +348,47 @@ void swap(TopKQueryResult &a, TopKQueryResult &b);
std::ostream& operator<<(std::ostream& out, const TopKQueryResult& obj);
class TopKQueryBinResult : public virtual ::apache::thrift::TBase {
public:
TopKQueryBinResult(const TopKQueryBinResult&);
TopKQueryBinResult& operator=(const TopKQueryBinResult&);
TopKQueryBinResult() : id_array(), distance_array() {
}
virtual ~TopKQueryBinResult() throw();
std::string id_array;
std::string distance_array;
void __set_id_array(const std::string& val);
void __set_distance_array(const std::string& val);
bool operator == (const TopKQueryBinResult & rhs) const
{
if (!(id_array == rhs.id_array))
return false;
if (!(distance_array == rhs.distance_array))
return false;
return true;
}
bool operator != (const TopKQueryBinResult &rhs) const {
return !(*this == rhs);
}
bool operator < (const TopKQueryBinResult & ) const;
uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
virtual void printTo(std::ostream& out) const;
};
void swap(TopKQueryBinResult &a, TopKQueryBinResult &b);
std::ostream& operator<<(std::ostream& out, const TopKQueryBinResult& obj);
}} // namespace
#endif

View File

@ -84,6 +84,14 @@ struct TopKQueryResult {
1: list<QueryResult> query_result_arrays; ///< TopK query result
}
/**
* @brief TopK query binary result
*/
struct TopKQueryBinResult {
1: required binary id_array; ///< id array, interger array
2: required binary distance_array; ///< distance array, double array
}
service MilvusService {
/**
* @brief Create table method
@ -158,6 +166,23 @@ service MilvusService {
4: list<Range> query_range_array,
5: i64 topk) throws(1: Exception e);
/**
* @brief Query vector
*
* This method is used to query vector in table.
*
* @param table_name, table_name is queried.
* @param query_record_array, all vector are going to be queried.
* @param query_range_array, optional ranges for conditional search. If not specified, search whole table
* @param topk, how many similarity vectors will be searched.
*
* @return query binary result array.
*/
list<TopKQueryBinResult> SearchVector2(2: string table_name,
3: list<RowRecord> query_record_array,
4: list<Range> query_range_array,
5: i64 topk) throws(1: Exception e);
/**
* @brief Internal use query interface
*

View File

@ -12,131 +12,81 @@ namespace milvus {
namespace server {
TimeRecorder::TimeRecorder(const std::string &header,
TimeRecorder::TimeDisplayUnit unit,
int64_t log_level) :
header_(header),
time_unit_(unit),
log_level_(log_level) {
header_(header),
log_level_(log_level) {
start_ = last_ = stdclock::now();
span_ = 0.0;
}
TimeRecorder::~TimeRecorder() {
}
std::string
TimeRecorder::GetTimeSpanStr(TimeRecorder::TimeDisplayUnit &unit, double span) const {
std::string spanStr;
std::string unitStr;
TimeRecorder::GetTimeSpanStr(double span) {
std::string str_sec = std::to_string(span * 0.000001) + ((span > 1000000) ? " seconds" : " second");
std::string str_ms = std::to_string(span * 0.001) + " ms";
switch (unit) {
case TimeRecorder::eTimeAutoUnit: {
if (span >= 1000000) {
int64_t t = (int64_t) span;
int64_t hour, minute;
double second;
hour = t / 1000000 / 3600;
t -= hour * 3600 * 1000000;
minute = t / 1000000 / 60;
t -= minute * 60 * 1000000;
second = t * 0.000001;
spanStr += (hour < 10 ? "0" : "") + std::to_string(hour) + ":";
spanStr += (minute < 10 ? "0" : "") + std::to_string(minute) + ":";
spanStr += (second < 10 ? "0" : "") + std::to_string(second);
unitStr = "";
} else if (span >= 1000) {
spanStr = std::to_string(span * 0.001);
unitStr = " ms";
} else {
spanStr = std::to_string(span);
unitStr = " us";
}
}
break;
case TimeRecorder::eTimeHourUnit:
spanStr = std::to_string((span * 0.000001) / 3600);
unitStr = " hour";
break;
case TimeRecorder::eTimeMinuteUnit:
spanStr = std::to_string((span * 0.000001) / 60);
unitStr = " min";
break;
case TimeRecorder::eTimeSecondUnit:
spanStr = std::to_string(span * 0.000001);
unitStr = " sec";
break;
case TimeRecorder::eTimeMilliSecUnit:
spanStr = std::to_string(span * 0.001);
unitStr = " ms";
break;
case TimeRecorder::eTimeMicroSecUnit:
default:
spanStr = std::to_string(span);
unitStr = " us";
break;
}
return spanStr + unitStr;
return str_sec + " [" + str_ms + "]";
}
void
TimeRecorder::PrintTimeRecord(const std::string &msg, double span) {
std::string strLog;
if (!header_.empty()) strLog += header_ + ": ";
strLog += msg;
strLog += " (";
strLog += GetTimeSpanStr(time_unit_, span);
strLog += ")";
std::string str_log;
if (!header_.empty()) str_log += header_ + ": ";
str_log += msg;
str_log += " (";
str_log += TimeRecorder::GetTimeSpanStr(span);
str_log += ")";
switch (log_level_) {
case 0: {
SERVER_LOG_TRACE << strLog;
SERVER_LOG_TRACE << str_log;
break;
}
case 1: {
SERVER_LOG_DEBUG << strLog;
SERVER_LOG_DEBUG << str_log;
break;
}
case 2: {
SERVER_LOG_INFO << strLog;
SERVER_LOG_INFO << str_log;
break;
}
case 3: {
SERVER_LOG_WARNING << strLog;
SERVER_LOG_WARNING << str_log;
break;
}
case 4: {
SERVER_LOG_ERROR << strLog;
SERVER_LOG_ERROR << str_log;
break;
}
case 5: {
SERVER_LOG_FATAL << strLog;
SERVER_LOG_FATAL << str_log;
break;
}
default: {
SERVER_LOG_INFO << strLog;
SERVER_LOG_INFO << str_log;
break;
}
}
}
void
TimeRecorder::Record(const std::string &msg) {
double
TimeRecorder::RecordSection(const std::string &msg) {
stdclock::time_point curr = stdclock::now();
span_ = (std::chrono::duration<double, std::micro>(curr - last_)).count();
double span = (std::chrono::duration<double, std::micro>(curr - last_)).count();
last_ = curr;
PrintTimeRecord(msg, span_);
}
void
TimeRecorder::Elapse(const std::string &msg) {
stdclock::time_point curr = stdclock::now();
span_ = (std::chrono::duration<double, std::micro>(curr - start_)).count();
PrintTimeRecord(msg, span_);
PrintTimeRecord(msg, span);
return span;
}
double
TimeRecorder::Span() {
return span_;
TimeRecorder::ElapseFromBegin(const std::string &msg) {
stdclock::time_point curr = stdclock::now();
double span = (std::chrono::duration<double, std::micro>(curr - start_)).count();
PrintTimeRecord(msg, span);
return span;
}
}

View File

@ -17,36 +17,24 @@ class TimeRecorder {
using stdclock = std::chrono::high_resolution_clock;
public:
enum TimeDisplayUnit {
eTimeAutoUnit = 0,
eTimeHourUnit,
eTimeMinuteUnit,
eTimeSecondUnit,
eTimeMilliSecUnit,
eTimeMicroSecUnit,
};
TimeRecorder(const std::string &header,
TimeRecorder::TimeDisplayUnit unit = TimeRecorder::eTimeAutoUnit,
int64_t log_level = 1); //trace = 0, debug = 1, info = 2, warn = 3, error = 4, critical = 5
int64_t log_level = 1);
void Record(const std::string &msg);
~TimeRecorder();//trace = 0, debug = 1, info = 2, warn = 3, error = 4, critical = 5
void Elapse(const std::string &msg);
double RecordSection(const std::string &msg);
double Span();
double ElapseFromBegin(const std::string &msg);
static std::string GetTimeSpanStr(double span);
private:
std::string GetTimeSpanStr(TimeRecorder::TimeDisplayUnit &unit, double span) const;
void PrintTimeRecord(const std::string &msg, double span);
private:
std::string header_;
TimeRecorder::TimeDisplayUnit time_unit_;
stdclock::time_point start_;
stdclock::time_point last_;
double span_;
int64_t log_level_;
};

View File

@ -61,7 +61,6 @@ ValidateTableIndexType(int32_t index_type) {
return SERVER_INVALID_INDEX_TYPE;
}
SERVER_LOG_DEBUG << "Index type: " << index_type;
return SERVER_SUCCESS;
}

View File

@ -2,7 +2,8 @@ ARROW_VERSION=zilliz
BOOST_VERSION=1.70.0
BZIP2_VERSION=1.0.6
EASYLOGGINGPP_VERSION=v9.96.7
FAISS_VERSION=7b07685
FAISS_VERSION=v1.5.3
MKL_VERSION=2019.4.243
GTEST_VERSION=1.8.1
JSONCONS_VERSION=0.126.0
LAPACK_VERSION=v3.8.0
@ -19,5 +20,7 @@ YAMLCPP_VERSION=0.6.2
ZLIB_VERSION=v1.2.11
ZSTD_VERSION=v1.4.0
AWS_VERSION=1.7.125
LIBUNWIND_VERSION=1.3.1
GPERFTOOLS_VERSION=2.7
# vim: set filetype=sh:

View File

@ -28,7 +28,6 @@ set(unittest_libs
easyloggingpp
pthread
metrics
openblas
gfortran
prometheus-cpp-pull
prometheus-cpp-push

View File

@ -23,10 +23,7 @@ link_directories("/usr/local/cuda/lib64")
include_directories(/usr/include/mysql)
#add_definitions(-DBOOST_ERROR_CODE_HEADER_ONLY)
set(db_test_src
#${unittest_srcs}
${config_files}
${cache_srcs}
${db_srcs}
@ -38,7 +35,6 @@ set(db_test_src
cuda_add_executable(db_test ${db_test_src})
set(db_libs
libgpufaiss.a
faiss
cudart
cublas
@ -49,6 +45,11 @@ set(db_libs
mysqlpp
)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "ON")
set(db_libs ${db_libs} ${MKL_LIBS} ${MKL_LIBS})
endif()
target_link_libraries(db_test ${db_libs} ${unittest_libs})
install(TARGETS db_test DESTINATION bin)

View File

@ -273,7 +273,7 @@ TEST_F(NewMemManagerTest, INSERT_TEST) {
int insert_loop = 20;
for (int i = 0; i < insert_loop; ++i) {
int64_t nb = 409600;
int64_t nb = 40960;
std::vector<float> xb;
BuildVectors(nb, xb);
engine::IDNumbers vector_ids;
@ -308,7 +308,7 @@ TEST_F(NewMemManagerTest, CONCURRENT_INSERT_SEARCH_TEST) {
engine::IDNumbers vector_ids;
engine::IDNumbers target_ids;
int64_t nb = 409600;
int64_t nb = 40960;
std::vector<float> xb;
BuildVectors(nb, xb);

View File

@ -24,7 +24,6 @@ set(wrapper_libs
stdc++
boost_system_static
boost_filesystem_static
libgpufaiss.a
faiss
cudart
cublas
@ -35,6 +34,10 @@ set(wrapper_libs
zstd
lz4
)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "ON")
set(wrapper_libs ${wrapper_libs} ${MKL_LIBS} ${MKL_LIBS})
endif()
target_link_libraries(wrapper_test ${wrapper_libs} ${unittest_libs})
set(topk_test_src

View File

@ -10,8 +10,6 @@
include_directories(../../src)
aux_source_directory(../../src/db db_srcs)
aux_source_directory(../../src/config config_files)
aux_source_directory(../../src/cache cache_srcs)
@ -33,21 +31,10 @@ include_directories(../../third_party/build/include)
link_directories(../../third_party/build/lib)
include_directories(/usr/local/cuda/include)
link_directories("/usr/local/cuda/lib64")
#include_directories(../db/utils.h)
include_directories(../../src/metrics)
include_directories(/usr/include/mysql)
#set(metrics_src_files
# ../../src/metrics/Metrics.cpp
# ../../src/metrics/Metrics.h
# ../../src/metrics/PrometheusMetrics.cpp
# ../../src/metrics/MetricBase.h
# ../../src/server/ServerConfig.cpp
# ../../src/utils/CommonUtil.cpp
# ../../src/utils/TimeRecorder.cpp
# )
set(count_test_src
${config_files}
${cache_srcs}
@ -62,7 +49,6 @@ set(count_test_src
add_executable(metrics_test ${count_test_src} ${require_files} )
target_link_libraries(metrics_test
libgpufaiss.a
faiss
cudart
cublas
@ -77,5 +63,8 @@ target_link_libraries(metrics_test
mysqlpp
${unittest_libs}
)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "ON")
target_link_libraries(metrics_test ${MKL_LIBS} ${MKL_LIBS})
endif()
install(TARGETS metrics_test DESTINATION bin)

View File

@ -33,7 +33,6 @@ cuda_add_executable(server_test
set(require_libs
stdc++
libgpufaiss.a
faiss
cudart
cublas
@ -48,6 +47,10 @@ set(require_libs
pthread
)
if(${BUILD_FAISS_WITH_MKL} STREQUAL "ON")
set(require_libs ${require_libs} ${MKL_LIBS} ${MKL_LIBS})
endif()
target_link_libraries(server_test
${require_libs}
${cuda_library}

View File

@ -19,15 +19,6 @@ namespace {
static const std::string LOG_FILE_PATH = "./milvus/conf/log_config.conf";
using TimeUnit = server::TimeRecorder::TimeDisplayUnit;
double TestTimeRecorder(TimeUnit unit, int64_t log_level, int64_t sleep_ms) {
server::TimeRecorder rc("test rc", unit, log_level);
rc.Record("begin");
std::this_thread::sleep_for(std::chrono::milliseconds(sleep_ms));
rc.Elapse("end");
return rc.Span();
}
}
TEST(UtilTest, EXCEPTION_TEST) {
@ -124,23 +115,6 @@ TEST(UtilTest, STRINGFUNCTIONS_TEST) {
}
TEST(UtilTest, TIMERECORDER_TEST) {
double span = TestTimeRecorder(TimeUnit::eTimeAutoUnit, 0, 1001);
ASSERT_GT(span, 0.0);
span = TestTimeRecorder(TimeUnit::eTimeAutoUnit, 0, 101);
ASSERT_GT(span, 0.0);
span = TestTimeRecorder(TimeUnit::eTimeHourUnit, 1, 10);
ASSERT_GT(span, 0.0);
span = TestTimeRecorder(TimeUnit::eTimeMinuteUnit, 2, 10);
ASSERT_GT(span, 0.0);
span = TestTimeRecorder(TimeUnit::eTimeSecondUnit, 3, 10);
ASSERT_GT(span, 0.0);
span = TestTimeRecorder(TimeUnit::eTimeMilliSecUnit, 4, 10);
ASSERT_GT(span, 0.0);
span = TestTimeRecorder(TimeUnit::eTimeMicroSecUnit, -1, 10);
ASSERT_GT(span, 0.0);
}
TEST(UtilTest, BLOCKINGQUEUE_TEST) {
server::BlockingQueue<std::string> bq;