[Core] [Fs] Support Qiniu cloud storage (#694)

This commit is contained in:
qianmoQ 2024-03-01 21:19:03 +08:00 committed by GitHub
commit a733d8efa1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
54 changed files with 666 additions and 71 deletions

View File

@ -0,0 +1,113 @@
################################### Basic configure #################################
server.port=9096
server.address=localhost
# Fixed serialized data missing for 8 hours
spring.jackson.time-zone=GMT+8
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
# datacap security management configuration
datacap.security.secret=DataCapSecretKey
datacap.security.expiration=86400000
# datacap editor configuration
datacap.editor.sugs.maxSize=1000
################################ Web configure #################################
# Forwarding system preset related errors to custom processing
spring.mvc.throw-exception-if-no-handler-found=true
spring.resources.add-mappings=false
spring.web.resources.add-mappings=true
################################ Database configure #################################
# If you enable MySQL storage, please modify the following configuration
# If version >=8.x please set allowPublicKeyRetrieval=true
spring.datasource.url=jdbc:mysql://mysql:3306/datacap?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true&useSSL=false&useOldAliasMetadataBehavior=true&jdbcCompliantTruncation=false&allowPublicKeyRetrieval=true
spring.datasource.username=root
spring.datasource.password=12345678
################################ Cache configure #################################
# Set redis environment
spring.redis.host=localhost
spring.redis.port=6379
spring.redis.database=0
# If your server sets an authorization password
# please open the following configuration and enter the password of your server
# spring.redis.password=
################################ Executor configure #################################
# If this directory is not set
# the system will get the project root directory to build the data subdirectory
datacap.executor.data=
datacap.executor.way=LOCAL
datacap.executor.mode=CLIENT
datacap.executor.seatunnel.home=/opt/lib/seatunnel
################################ Upload configure #################################
datacap.config.data=
datacap.cache.data=
############################### OpenAI configure #################################
datacap.openai.backend=https://api.openai.com
datacap.openai.token=
# Support list:
# GPT_35_TURBO
# GPT_35_TURBO_0301
# GPT_35_TURBO_0613
# GPT_35_TURBO_16K
# GPT_35_TURBO_16K_0613
# GPT_4
# GPT_4_0314
# GPT_4_32K
# GPT_4_32K_0314
# GPT_4_0613
# GPT_4_32K_0613
datacap.openai.model=GPT_35_TURBO_0613
# Access remote API timeout, in seconds
datacap.openai.timeout=30
############################### System configure #################################
# Whether to enable new user registration
datacap.registration.enable=
# Whether to enable the verification code, the location where the verification code is enabled is registration & login
datacap.captcha.enable=
# Buffering the maximum number of storages requires expanding the JVM memory
datacap.cache.maximum=100000
# Unit minutes
datacap.cache.expiration=5
# Whether to print the currently executed SQL
datacap.audit.sql.print=false
################################# Pipeline configure #################################
# Maximum number of pipeline running
datacap.pipeline.maxRunning=100
# Maximum number of pipeline queue
datacap.pipeline.maxQueue=200
# When the service is restarted, the status of the pipeline with status RUNNING is reset.
datacap.pipeline.reset=STOPPED
################################# DataSet configure #################################
datacap.dataset.type=ClickHouse
datacap.dataset.host=localhost
datacap.dataset.port=8123
datacap.dataset.username=
datacap.dataset.password=
datacap.dataset.database=datacap
datacap.dataset.tablePrefix=datacap_
datacap.dataset.tableDefaultEngine=MergeTree
################################# Plugin configure #################################
datacap.parser.sql.defaultEngine=Trino
################################# Experimental features #################################
# This configuration is used to dynamically increase the total number of rows of returned data in SQL during query, and currently only takes effect for user-directed queries
# If the total number of rows returned is included in the SQL, it will not be automatically incremented
datacap.experimental.autoLimit=true
# For data storage and directories, {user.dir} is supported, or the relative path can be specified
datacap.experimental.data={user.dir}/data
# The path to upload the user's avatar
# `{username}` Fixed format, currently not supported to modify, this configuration will automatically get the current user name by default
datacap.experimental.avatarPath={username}/avatar/
# For the file storage type, please refer to datacap-fs-<Type>, which defaults to Local
datacap.experimental.fs.type=Local
datacap.experimental.fs.access=
datacap.experimental.fs.secret=
datacap.experimental.fs.endpoint=
datacap.experimental.fs.bucket=

View File

@ -394,6 +394,11 @@
<artifactId>datacap-fs-local</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>io.edurt.datacap</groupId>
<artifactId>datacap-fs-qiniu</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>

22
docker-compose.yml Normal file
View File

@ -0,0 +1,22 @@
version: '3.8'
services:
mysql:
image: mysql:latest
environment:
MYSQL_ROOT_PASSWORD: 12345678
MYSQL_DATABASE: datacap
ports:
- "3306:3306"
volumes:
- ./core/datacap-server/src/main/schema/datacap.sql:/docker-entrypoint-initdb.d/schema.sql
datacap:
image: qianmoq/datacap:latest
restart: always
ports:
- "9099:9099"
depends_on:
- mysql
volumes:
- ./configure/docker/application.properties:/opt/app/datacap/configure/application.properties

View File

@ -0,0 +1,53 @@
---
title: 即席查询
---
在数据集列表中在每行数据的末尾有 `操作` 按钮,大概如下
![img.png](img.png)
我们点击 `操作` 按钮后会跳转到 `即席查询` 页面
![img_1.png](img_1.png)
页面分为左右两侧,左侧是当前数据集的 `指标` & `维度` 配置,右侧是 `查询` 配置
当拖拽左侧的 `指标` & `维度` 时会在右侧显示查询结果
![img_2.png](img_2.png)
### 指标配置
当查询列包含指标时,点击配置按钮,弹出如下窗口
![img_3.png](img_3.png)
可以配置当前指标的 `表达式``别名``排序`。
!!! warning
不同的类型指标包含不同的表达式
### 维度配置
当查询列包含维度时,点击配置按钮,弹出如下窗口
![img_4.png](img_4.png)
可以配置当前维度的 `别名``排序``自定义函数`。
### 图表配置
当查询成功后,可以配置多种图表类型。
![img_5.png](img_5.png)
可以根据自己的需求定制目前已经支持的图表。
### 发布图表
图表配置完成后,点击 `发布` 按钮,弹出如下窗口
![img_6.png](img_6.png)
配置图表的名称后点击 `发布` 按钮,图表发布成功后,可以在图表列表中查看。

Binary file not shown.

After

Width:  |  Height:  |  Size: 327 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@ -0,0 +1,25 @@
---
title: 清空数据
---
数据集提供了清空数据的操作,可以通过数据集列表中在每行数据的末尾有 `清空数据` 按钮,如下窗口
![img.png](img.png)
!!! note
该操作只会在当前数据集有数据的情况下可以操作。
当数据集无数据的情况下展示如下
![img_2.png](img_2.png)
该操作将被禁用。
点击 `清空数据` 按钮后会弹出 `清空数据` 窗口
![img_1.png](img_1.png)
该窗口中会展示当前数据集的 `总行数``总大小`
确定清空数据后,点击 `清空数据` 按钮后,系统会在后台创建清空数据任务,进行清空数据。

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

View File

@ -0,0 +1,39 @@
---
title: 新建&编辑
---
在查询页面进行查询后会在结果表格上方显示出如下窗口
![img.png](img.png)
点击图中红框选择的按钮,跳转到新建数据集页面
![img_1.png](img_1.png)
该页面分为为数据预览,点击顶部的配置按钮,窗口如下
![img_2.png](img_2.png)
在页面中我们可以配置 `数据列``基本信息`,根据选择不同的 tab 进行相关配置。
!!! note
在数据集中列分为两种 `虚拟列``真实列`
`虚拟列` 不会在实际的底层存储构建(只是用函数比较,只会在查询中实时展示),`真实列` 需要在底层存储中进行构建,这样在查询的时候会有更好的性能。
### 虚拟列
我们点击每行数据中的 `操作` 配置下的 `添加` 按钮,可以添加虚拟列
![img_3.png](img_3.png)
在虚拟列中部分配置无法使用,因为它不做具体的存储操作。
### 基本配置
点击顶部的 `数据配置` 标签,配置项显示如下
![img_4.png](img_4.png)
我们完成基本的配置信息后点击顶部的 `创建数据集` 按钮,即可在后台构建数据集。届时会跳转到数据集列表。

Binary file not shown.

After

Width:  |  Height:  |  Size: 196 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 118 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 162 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

View File

@ -0,0 +1,19 @@
---
title: 同步历史
---
数据集提供了查看同步历史的操作,可以通过数据集列表中在每行数据的末尾有 `同步历史` 按钮,如下窗口
![img.png](img.png)
点击 `同步历史` 按钮后会弹出 `同步历史` 窗口
![img_1.png](img_1.png)
在该窗口中会展示当前数据集的所有同步历史,包含手动同步和定时任务同步的历史记录。
如果任务同步失败会展示任务的错误信息,点击任务的状态按钮即可看到错误信息。
![img_2.png](img_2.png)
该操作只会在同步失败的状态下生效。

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 176 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

View File

@ -1,65 +0,0 @@
---
title: 数据集
---
软件安装完成后点击顶部的 `数据集` 菜单,进入数据集页面。
![img.png](img.png)
### 添加数据集
---
新建报表功能在查询页面,当我们查询成功后会在结果表格上方显示出如下窗口
![img_1.png](img_1.png)
点击图中红框选择的按钮,跳转到新建数据集页面
![img_2.png](img_2.png)
该页面分为两部分顶部为数据预览,底部为当前数据集列配置功能,配置完成后点击底部的 创建数据集按钮弹出如下窗口
![img_3.png](img_3.png)
在该页面中输入相关信息点击创建数据集即可。
### 重建数据集
---
!!! note
该操作只会在数据集构建失败后才可以被重新触发。
列表页面查询记录后有操作按钮,点击 ![img_4.png](img_4.png){width="20"} 后可以弹出如下窗口
![img_5.png](img_5.png)
点击底部的重建按钮即可重建数据集。
### 编辑数据集
---
列表页面查询记录后有操作按钮,点击 ![img_6.png](img_6.png){width="20"} 后即可跳转到编辑页面,该页面和新增页面一致。
### 查看错误
---
!!! warning
只有数据集构建失败,才可使用该功能。
点击 ![img_7.png](img_7.png){width="20"} 后弹出如下窗口
![img_8.png](img_8.png)
### 同步数据
---
![img_9.png](img_9.png)
点击同步数据按钮后,系统将会在后台进行数据同步操作。

Binary file not shown.

Before

Width:  |  Height:  |  Size: 162 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 218 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 194 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 842 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 306 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

View File

@ -0,0 +1,13 @@
---
title: 同步数据
---
数据集提供了手动同步数据的操作,可以通过数据集列表中在每行数据的末尾有 `同步数据` 按钮,如下窗口
![img.png](img.png)
点击 `同步数据` 按钮后会弹出 `同步数据` 窗口
![img_1.png](img_1.png)
确定同步数据后,点击 `同步数据` 按钮后,系统会在后台创建同步数据任务,进行数据同步。

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View File

@ -1,5 +1,5 @@
---
title: 自主机部署
title: 自主机部署
---
DataCap 是支持用户将服务部署到自主机中。通过本文档用户可以了解如何在自主机中部署 DataCap。

View File

@ -1,5 +1,5 @@
---
title: Docker 部署
title: Docker 部署
---
DataCap 项目提供 [qianmoq/datacap](https://hub.docker.com/r/qianmoq/datacap) 包含 DataCap 服务器和默认配置的 Docker 映像。Docker 映像发布到 Docker Hub可以与 Docker 运行时等一起使用。

View File

@ -0,0 +1,68 @@
---
title: Docker Compose 部署
---
DataCap 项目提供 Docker Compose 方式部署,通过下载 [docker-compose.yml](https://github.com/devlive-community/datacap/blob/dev/docker-compose.yml) 文件,或者使用以下代码进行服务部署。
```yaml
version: '3.8'
services:
mysql:
image: mysql:latest
environment:
MYSQL_ROOT_PASSWORD: 12345678
MYSQL_DATABASE: datacap
ports:
- "3306:3306"
volumes:
- ./core/datacap-server/src/main/schema/datacap.sql:/docker-entrypoint-initdb.d/schema.sql
datacap:
image: qianmoq/datacap:latest
restart: always
ports:
- "9099:9099"
depends_on:
- mysql
volumes:
- ./configure/docker/application.properties:/opt/app/datacap/configure/application.properties
```
!!! warning
需要同时下载一下多个文件:
- [datacap.sql](https://github.com/devlive-community/datacap/blob/dev/core/datacap-server/src/main/schema/datacap.sql)
- [application.properties](https://github.com/devlive-community/datacap/blob/dev/configure/docker/application.properties)
下载完成后将他们放置到指定目录,也就是 `./configure/docker/``./core/datacap-server/src/main/schema/` 如果需要自定义目录,可以修改 `docker-compose.yml` 文件中挂载的 `volumes` 配置即可。
## 启动服务
---
以上工作完成后,使用以下命令进行启动服务。**必须在包含 docker-compose.yml 文件的目录下执行**
```bash
docker-compose up
```
如果需要后台启动使用以下命令
```bash
docker-compose up -d
```
启动成功后,浏览器打开 http://localhost:9096/ 即可看到网站。
## 停止服务
---
停止服务需要使用以下命令
```bash
docker-compose down
```

View File

@ -1,5 +1,5 @@
---
title: Rainbond 部署
title: Rainbond 部署
---
如果您不熟悉 Kubernetes想在 Kubernetes 中安装 DataCap可以使用 Rainbond 来部署。[Rainbond](https://www.rainbond.com/) 是一个基于 Kubernetes 构建的云原生应用管理平台,可以很简单的将你的应用部署到 Kubernetes 中。

View File

@ -28,7 +28,7 @@ hide:
<div class="grid cards" markdown>
- :octicons-project-16: __DataCap 开发者__
- __DataCap 开发者__
---
@ -36,7 +36,7 @@ hide:
[:octicons-arrow-right-24: 访问](https://github.com/EdurtIO/datacap)
- :octicons-project-16: __Devlive 社区__
- __Devlive 社区__
---

View File

@ -124,6 +124,7 @@ plugins:
NavDeveloper: 开发
NavDeveloperPlugin: 插件
NavReleaseNote: 更新日志
NavDataset: 数据集
- locale: en
name: English
build: true
@ -138,6 +139,7 @@ plugins:
NavDeveloper: Developer
NavDeveloperPlugin: Plugin
NavReleaseNote: Release Note
NavDataset: Dataset
- search
- git-revision-date-localized:
enable_creation_date: true
@ -154,10 +156,16 @@ nav:
- NavDocumentation:
- reference/getStarted/install.md
- reference/getStarted/installContainers.md
- reference/getStarted/installFromDockerCompose.md
- reference/getStarted/installRainbond.md
- reference/getStarted/query/home.md
- reference/getStarted/dashboard/home.md
- reference/getStarted/dataset/home.md
- NavDataset:
- reference/getStarted/dataset/create/home.md
- reference/getStarted/dataset/adhoc/home.md
- reference/getStarted/dataset/sync/home.md
- reference/getStarted/dataset/history/home.md
- reference/getStarted/dataset/clear/home.md
- NavClient:
- reference/clients/cli.md
- NavManager:

View File

@ -0,0 +1,46 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>io.edurt.datacap</groupId>
<artifactId>datacap</artifactId>
<version>2024.02.1-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>datacap-fs-qiniu</artifactId>
<description>DataCap - File system for qiniu</description>
<dependencies>
<dependency>
<groupId>io.edurt.datacap</groupId>
<artifactId>datacap-common</artifactId>
</dependency>
<dependency>
<groupId>io.edurt.datacap</groupId>
<artifactId>datacap-fs-spi</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-reflect</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.qiniu</groupId>
<artifactId>qiniu-java-sdk</artifactId>
<version>${datacap.qiniu.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.jetbrains.dokka</groupId>
<artifactId>dokka-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,49 @@
package io.edurt.datacap.fs.qiniu
import com.qiniu.storage.Configuration
import com.qiniu.storage.DownloadUrl
import com.qiniu.storage.Region
import com.qiniu.storage.UploadManager
import com.qiniu.storage.model.DefaultPutRet
import com.qiniu.util.Auth
import io.edurt.datacap.common.utils.JsonUtils
import io.edurt.datacap.fs.FsRequest
import java.io.BufferedInputStream
import java.io.InputStream
import java.net.URL
class IOUtils {
companion object {
private fun getRemoteUrlAsStream(urlString: String): InputStream {
val url = URL(urlString)
val connection = url.openConnection()
connection.connectTimeout = 5000
connection.readTimeout = 5000
return BufferedInputStream(connection.getInputStream())
}
@JvmStatic
fun copy(request: FsRequest, stream: InputStream, fileName: String): String? {
try {
val conf = Configuration(Region.autoRegion())
conf.resumableUploadAPIVersion = Configuration.ResumableUploadAPIVersion.V2
val manager = UploadManager(conf)
val auth = Auth.create(request.access, request.secret)
val token = auth.uploadToken(request.bucket, fileName)
val response = manager.put(stream, fileName, token, null, null)
val putRet = JsonUtils.toObject(response.bodyString(), DefaultPutRet::class.java)
return putRet.key
}
catch (e: Exception) {
throw RuntimeException(e)
}
}
@JvmStatic
fun reader(request: FsRequest): InputStream {
val downloadUrl = DownloadUrl(request.endpoint, false, request.fileName)
return getRemoteUrlAsStream(downloadUrl.buildURL())
}
}
}

View File

@ -0,0 +1,57 @@
package io.edurt.datacap.fs.qiniu
import io.edurt.datacap.fs.Fs
import io.edurt.datacap.fs.FsRequest
import io.edurt.datacap.fs.FsResponse
import io.edurt.datacap.fs.qiniu.IOUtils.Companion.copy
import org.slf4j.LoggerFactory.getLogger
import java.io.File
import java.lang.String.join
class QiniuFs : Fs {
private val log = getLogger(QiniuFs::class.java)
override fun writer(request: FsRequest?): FsResponse {
requireNotNull(request) { "request must not be null" }
log.info("QiniuFs writer origin path [ {} ]", request.fileName)
val targetPath = join(File.separator, request.endpoint, request.bucket, request.fileName)
val response = FsResponse.builder()
.origin(request.fileName)
.remote(targetPath)
.successful(true)
.build()
log.info("QiniuFs writer target path [ {} ]", request.fileName)
try {
val key = copy(request, request.stream, request.fileName)
response.remote = key
log.info("QiniuFs writer [ {} ] successfully", key)
}
catch (e: Exception) {
log.error("QiniuFs writer error", e)
response.isSuccessful = false
response.message = e.message
}
return response
}
override fun reader(request: FsRequest?): FsResponse {
requireNotNull(request) { "request must not be null" }
log.info("QiniuFs reader origin path [ {} ]", request.fileName)
val response = FsResponse.builder()
.remote(request.fileName)
.successful(true)
.build()
try {
response.context = IOUtils.reader(request)
log.info("QiniuFs reader [ {} ] successfully", request.fileName)
}
catch (e: java.lang.Exception) {
log.error("QiniuFs reader error", e)
response.isSuccessful = false
response.message = e.message
}
return response
}
}

View File

@ -0,0 +1,13 @@
package io.edurt.datacap.fs.qiniu
import com.google.inject.multibindings.Multibinder
import io.edurt.datacap.fs.Fs
import io.edurt.datacap.fs.FsModule
class QiniuModule : FsModule() {
override fun configure() {
Multibinder.newSetBinder(binder(), Fs::class.java)
.addBinding()
.to(QiniuFs::class.java)
}
}

View File

@ -0,0 +1 @@
io.edurt.datacap.fs.qiniu.QiniuModule

View File

@ -0,0 +1,33 @@
package io.edurt.datacap.fs.qiniu
import io.edurt.datacap.fs.FsRequest
import org.junit.Assert.assertNotNull
import org.junit.Assert.assertTrue
import org.junit.Before
import org.junit.Test
import java.io.FileInputStream
class IOUtilsTest {
private var request = FsRequest()
@Before
fun before() {
request.access = System.getProperty("access")
request.secret = System.getProperty("secret")
request.bucket = System.getProperty("bucket")
request.fileName = "IOUtilsTest.kt"
request.endpoint = System.getProperty("endpoint")
}
@Test
fun copy() {
val stream = FileInputStream("src/test/kotlin/io/edurt/datacap/fs/qiniu/IOUtilsTest.kt")
val result = IOUtils.copy(request, stream, "IOUtilsTest.kt")
assertTrue(result != null)
}
@Test
fun reader() {
assertNotNull(IOUtils.reader(request))
}
}

View File

@ -0,0 +1,67 @@
package io.edurt.datacap.fs.qiniu
import com.google.inject.Guice
import com.google.inject.Injector
import com.google.inject.Key
import com.google.inject.TypeLiteral
import io.edurt.datacap.fs.Fs
import io.edurt.datacap.fs.FsManager
import io.edurt.datacap.fs.FsRequest
import org.junit.Before
import org.junit.Test
import org.slf4j.LoggerFactory
import java.io.BufferedReader
import java.io.FileInputStream
import java.io.IOException
import java.io.InputStreamReader
import java.nio.charset.StandardCharsets
import kotlin.test.assertTrue
class QiniuFsTest {
private val log = LoggerFactory.getLogger(QiniuFsTest::class.java)
private val name = "Qiniu"
private var request = FsRequest()
private var injector: Injector? = null
@Before
fun before() {
request.access = System.getProperty("access")
request.secret = System.getProperty("secret")
request.bucket = System.getProperty("bucket")
request.fileName = "IOUtilsTest.kt"
request.endpoint = System.getProperty("endpoint")
injector = Guice.createInjector(FsManager())
}
@Test
fun writer() {
val plugins: Set<Fs?>? = injector?.getInstance(Key.get(object : TypeLiteral<Set<Fs?>?>() {}))
val plugin: Fs? = plugins?.first { v -> v?.name().equals(name) }
val stream = FileInputStream("src/test/kotlin/io/edurt/datacap/fs/qiniu/IOUtilsTest.kt")
request.stream = stream
val response = plugin !!.writer(request)
assertTrue(response.isSuccessful)
}
@Test
fun reader() {
val plugins: Set<Fs?>? = injector?.getInstance(Key.get(object : TypeLiteral<Set<Fs?>?>() {}))
val plugin: Fs? = plugins?.first { v -> v?.name().equals(name) }
val response = plugin !!.reader(request)
assertTrue(response.isSuccessful)
try {
BufferedReader(InputStreamReader(response.context, StandardCharsets.UTF_8)).use { reader ->
var line: String?
while ((reader.readLine().also { line = it }) != null) {
log.info(line)
}
}
}
catch (e: IOException) {
log.error("Reader error", e)
}
}
}

View File

@ -0,0 +1,27 @@
package io.edurt.datacap.fs.qiniu
import com.google.inject.Guice.createInjector
import com.google.inject.Injector
import com.google.inject.Key
import com.google.inject.TypeLiteral
import io.edurt.datacap.fs.Fs
import org.junit.Before
import org.junit.Test
import kotlin.test.assertTrue
class QiniuModuleTest {
private val name = "Qiniu"
private var injector: Injector? = null
@Before
fun before() {
injector = createInjector(QiniuModule())
}
@Test
fun test() {
val fs: Fs? = injector?.getInstance(Key.get(object : TypeLiteral<Set<Fs?>?>() {}))
?.first { v -> v?.name().equals(name) }
assertTrue(fs != null)
}
}

View File

@ -80,6 +80,7 @@
<module>shaded/datacap-shaded-pinot</module>
<module>fs/datacap-fs-spi</module>
<module>fs/datacap-fs-local</module>
<module>fs/datacap-fs-qiniu</module>
<module>parser/datacap-parser-spi</module>
<module>parser/datacap-parser-trino</module>
<module>parser/datacap-parser-mysql</module>
@ -164,6 +165,7 @@
<assembly-plugin.version>3.6.0</assembly-plugin.version>
<!-- datacap plugin dependency -->
<datacap.pgsql.version>42.6.0</datacap.pgsql.version>
<datacap.qiniu.version>7.15.0</datacap.qiniu.version>
<!-- maven plugin -->
<plugin.maven.checkstyle.version>3.0.0</plugin.maven.checkstyle.version>
<plugin.maven.findbugs.version>3.0.5</plugin.maven.findbugs.version>