2024-03-01 12:42:44 +08:00
################################### Basic configure #################################
2024-06-19 20:12:23 +08:00
server.port = 9096
2024-03-21 13:11:01 +08:00
# Note: If you need to use IP+ port to facilitate access by external machines, do not set it to localhost, 0.0.0.0 is recommended
2024-04-19 12:28:03 +08:00
server.address = 0.0.0.0
2024-03-01 12:42:44 +08:00
# Fixed serialized data missing for 8 hours
spring.jackson.time-zone = GMT+8
spring.jackson.date-format = yyyy-MM-dd HH:mm:ss
# datacap security management configuration
datacap.security.secret = DataCapSecretKey
datacap.security.expiration = 86400000
# datacap editor configuration
datacap.editor.sugs.maxSize = 1000
################################ Web configure #################################
# Forwarding system preset related errors to custom processing
spring.mvc.throw-exception-if-no-handler-found = true
spring.resources.add-mappings = false
spring.web.resources.add-mappings = true
################################ Database configure #################################
# If you enable MySQL storage, please modify the following configuration
# If version >=8.x please set allowPublicKeyRetrieval=true
spring.datasource.url = jdbc:mysql://mysql:3306/datacap?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true&useSSL=false&useOldAliasMetadataBehavior=true&jdbcCompliantTruncation=false&allowPublicKeyRetrieval=true
spring.datasource.username = root
spring.datasource.password = 12345678
################################ Cache configure #################################
# Set redis environment
spring.redis.host = localhost
spring.redis.port = 6379
spring.redis.database = 0
# If your server sets an authorization password
# please open the following configuration and enter the password of your server
# spring.redis.password=
################################ Executor configure #################################
# If this directory is not set
# the system will get the project root directory to build the data subdirectory
2024-03-21 13:11:01 +08:00
# ------ Apache Seatunnel for Spark ------ #
2024-03-01 12:42:44 +08:00
datacap.executor.data =
datacap.executor.way = LOCAL
datacap.executor.mode = CLIENT
2024-03-21 13:11:01 +08:00
datacap.executor.engine = SPARK
datacap.executor.startScript = start-seatunnel-spark-connector-v2.sh
2024-03-01 12:42:44 +08:00
datacap.executor.seatunnel.home = /opt/lib/seatunnel
2024-03-21 13:11:01 +08:00
# ------ Apache Seatunnel for Flink ------ #
# datacap.executor.data=
# datacap.executor.way=LOCAL
# datacap.executor.mode=CLIENT
# datacap.executor.engine=FLINK
# datacap.executor.startScript=start-seatunnel-flink-13-connector-v2.sh
# datacap.executor.seatunnel.home=/opt/lib/seatunnel
# ------ Apache Seatunnel for seatunnel ------ #
# datacap.executor.data=
## Only support LOCAL
# datacap.executor.way=LOCAL
# datacap.executor.mode=CLIENT
# datacap.executor.engine=SEATUNNEL
# datacap.executor.startScript=seatunnel.sh
# datacap.executor.seatunnel.home=/opt/lib/seatunnel
2024-03-01 12:42:44 +08:00
################################ Upload configure #################################
datacap.config.data =
datacap.cache.data =
############################### OpenAI configure #################################
datacap.openai.backend = https://api.openai.com
datacap.openai.token =
# Support list:
# GPT_35_TURBO
# GPT_35_TURBO_0301
# GPT_35_TURBO_0613
# GPT_35_TURBO_16K
# GPT_35_TURBO_16K_0613
# GPT_4
# GPT_4_0314
# GPT_4_32K
# GPT_4_32K_0314
# GPT_4_0613
# GPT_4_32K_0613
2024-06-19 20:12:23 +08:00
# GPT_4O
2024-03-01 12:42:44 +08:00
datacap.openai.model = GPT_35_TURBO_0613
# Access remote API timeout, in seconds
datacap.openai.timeout = 30
############################### System configure #################################
# Whether to enable new user registration
datacap.registration.enable =
# Whether to enable the verification code, the location where the verification code is enabled is registration & login
datacap.captcha.enable =
# Buffering the maximum number of storages requires expanding the JVM memory
datacap.cache.maximum = 100000
# Unit minutes
datacap.cache.expiration = 5
# Whether to print the currently executed SQL
datacap.audit.sql.print = false
################################# Pipeline configure #################################
# Maximum number of pipeline running
datacap.pipeline.maxRunning = 100
# Maximum number of pipeline queue
datacap.pipeline.maxQueue = 200
# When the service is restarted, the status of the pipeline with status RUNNING is reset.
datacap.pipeline.reset = STOPPED
################################# DataSet configure #################################
datacap.dataset.type = ClickHouse
2024-04-19 12:28:03 +08:00
datacap.dataset.host = clickhouse
2024-03-01 12:42:44 +08:00
datacap.dataset.port = 8123
datacap.dataset.username =
datacap.dataset.password =
datacap.dataset.database = datacap
datacap.dataset.tablePrefix = datacap_
datacap.dataset.tableDefaultEngine = MergeTree
################################# Plugin configure #################################
datacap.parser.sql.defaultEngine = Trino
2024-06-19 20:12:23 +08:00
################################# File System configure #################################
# For the file storage type, please refer to datacap-fs-<Type>, which defaults to Local
# ------ Local File System ------ #
datacap.fs.type = Local
datacap.fs.access =
datacap.fs.secret =
datacap.fs.endpoint =
datacap.fs.bucket =
# ------ Qiniu File System ------#
#datacap.fs.type=Qiniu
#datacap.fs.access=
#datacap.fs.secret=
#datacap.fs.endpoint=
#datacap.fs.bucket=
# ------ Ali yun OSS File System ------#
#datacap.fs.type=AliOss
#datacap.fs.access=
#datacap.fs.secret=
#datacap.fs.endpoint=
#datacap.fs.bucket=
2024-03-01 12:42:44 +08:00
################################# Experimental features #################################
# This configuration is used to dynamically increase the total number of rows of returned data in SQL during query, and currently only takes effect for user-directed queries
# If the total number of rows returned is included in the SQL, it will not be automatically incremented
datacap.experimental.autoLimit = true
# For data storage and directories, {user.dir} is supported, or the relative path can be specified
datacap.experimental.data = {user.dir}/data
# The path to upload the user's avatar
# `{username}` Fixed format, currently not supported to modify, this configuration will automatically get the current user name by default
datacap.experimental.avatarPath = {username}/avatar/