no message

This commit is contained in:
Calvin 2024-10-31 15:19:55 +08:00
parent 21f500b92f
commit 7c0bd9d495
82 changed files with 4122 additions and 477 deletions

10
.gitignore vendored
View File

@ -48,3 +48,13 @@
2_nlp_sdks/translation/trans_nllb_sdk/models/traced_translation_gpu.pt
7_aigc/java_stable_diffusion_sdks/txt2image_sdk/models/pytorch/Download and put model here.md
7_aigc/java_stable_diffusion_sdks/txt2image_sdk/models/pytorch_cpu/Download and put model here.md
1_image_sdks/fire_smoke_sdk/target/
1_image_sdks/seg_sam2_sdk/models/sam2-hiera-large-gpu.pt
1_image_sdks/fire_smoke_sdk/models/fire_smoke.zip
4_video_sdks/camera_sdk/models/fire_smoke.zip
4_video_sdks/camera_sdk/target/
4_video_sdks/mp4_sdk/models/fire_smoke.zip
4_video_sdks/mp4_sdk/target/
archive/4_video_sdks/mp4_facemask_sdk/models/face_detection.zip
archive/4_video_sdks/mp4_facemask_sdk/models/face_mask.zip
archive/4_video_sdks/mp4_facemask_sdk/target/

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

View File

@ -25,13 +25,13 @@
<groupId>calvin</groupId>
<artifactId>fire-smoke-sdk</artifactId>
<version>0.17.0</version>
<version>0.23.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.17.0</djl.version>
<djl.version>0.23.0</djl.version>
</properties>
<build>
@ -82,14 +82,27 @@
<!-- Pytorch -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-engine</artifactId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<artifactId>pytorch-engine</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch GPU 配置 -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-native-cu117</artifactId>-->
<!-- <classifier>win-x86_64</classifier>-->
<!-- <version>1.13.1</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-jni</artifactId>-->
<!-- <version>1.13.1-0.23.0</version>-->
<!-- </dependency>-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>

View File

@ -0,0 +1,120 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>calvin</groupId>
<artifactId>fire-smoke-sdk</artifactId>
<version>0.17.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.23.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.6</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<!-- 服务器端推理引擎 -->
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<!-- Pytorch GPU 配置 -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cu117</artifactId>
<classifier>win-x86_64</classifier>
<version>1.13.1</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>1.13.1-0.22.1</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.18</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

Binary file not shown.

After

Width:  |  Height:  |  Size: 113 KiB

View File

@ -0,0 +1,132 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>aias</groupId>
<artifactId>seg_sam2_sdk</artifactId>
<version>0.30.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.30.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.6</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch 自动配置 -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cpu</artifactId>
<classifier>win-x86_64</classifier>
<scope>runtime</scope>
<version>2.4.0</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>2.4.0-0.30.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>ai.djl.opencv</groupId>
<artifactId>opencv</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.8</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.18</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,132 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>aias</groupId>
<artifactId>seg_sam2_sdk</artifactId>
<version>0.30.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.30.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.6</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch 自动配置 -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cu118</artifactId>
<classifier>linux-x86_64</classifier>
<version>2.0.1</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>2.0.1-0.23.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>ai.djl.opencv</groupId>
<artifactId>opencv</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.8</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.18</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -1,5 +1,6 @@
package top.aias.seg;
import ai.djl.Device;
import ai.djl.ModelException;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.DetectedObjects;
@ -28,11 +29,11 @@ import java.nio.file.Paths;
* @mail 179209347@qq.com
* @website www.aias.top
*/
public final class Sam2Example {
public final class Sam2ExampleCPU {
private static final Logger logger = LoggerFactory.getLogger(Sam2Example.class);
private static final Logger logger = LoggerFactory.getLogger(Sam2ExampleCPU.class);
private Sam2Example() {
private Sam2ExampleCPU() {
}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
@ -42,7 +43,7 @@ public final class Sam2Example {
Sam2Translator.Sam2Input input =
Sam2Translator.Sam2Input.builder(image).addPoint(575, 750).addBox(425, 600, 700, 875).build();
try (Sam2Model sam2Model = new Sam2Model("models/", "sam2-hiera-tiny.pt", 1)) {
try (Sam2Model sam2Model = new Sam2Model("models/", "sam2-hiera-tiny.pt", 1, Device.cpu())) {
DetectedObjects detection = sam2Model.predict(input);
showMask(input, detection);
logger.info("{}", detection);

View File

@ -0,0 +1,66 @@
package top.aias.seg;
import ai.djl.Device;
import ai.djl.ModelException;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.opencv.OpenCVImageFactory;
import ai.djl.translate.TranslateException;
import org.opencv.core.Mat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import top.aias.seg.model.Sam2Model;
import top.aias.seg.translator.Sam2Translator;
import top.aias.seg.utils.OpenCVUtils;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
/**
* Sam2图像分割
* 提供2个模型只支持CPU, GPU 存在底层库依赖问题修复中......
* sam2-hiera-large.pt
* sam2-hiera-tiny.pt
*
* @author Calvin
* @mail 179209347@qq.com
* @website www.aias.top
*/
public final class Sam2ExampleGPU {
private static final Logger logger = LoggerFactory.getLogger(Sam2ExampleGPU.class);
private Sam2ExampleGPU() {
}
public static void main(String[] args) throws IOException, ModelException, TranslateException {
Path imageFile = Paths.get("src/test/resources/truck.jpg");
Image image = OpenCVImageFactory.getInstance().fromFile(imageFile);
Sam2Translator.Sam2Input input =
Sam2Translator.Sam2Input.builder(image).addPoint(575, 750).addBox(425, 600, 700, 875).build();
try (Sam2Model sam2Model = new Sam2Model("models/", "sam2-hiera-large-gpu.pt", 1, Device.gpu())) {
DetectedObjects detection = sam2Model.predict(input);
showMask(input, detection);
logger.info("{}", detection);
}
}
private static void showMask(Sam2Translator.Sam2Input input, DetectedObjects detection) throws IOException {
Path outputDir = Paths.get("build/output");
Files.createDirectories(outputDir);
Image img = input.getImage();
img.drawBoundingBoxes(detection, 0.8f);
img.drawMarks(input.getPoints());
for (Rectangle rect : input.getBoxes()) {
OpenCVUtils.drawRectangle((Mat)img.getWrappedImage(),rect, 0xff0000, 6);
}
Path imagePath = outputDir.resolve("sam2.png");
img.save(Files.newOutputStream(imagePath), "png");
}
}

View File

@ -1,5 +1,6 @@
package top.aias.seg.model;
import ai.djl.Device;
import ai.djl.MalformedModelException;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
@ -26,12 +27,12 @@ public final class Sam2Model implements AutoCloseable {
private ZooModel<Sam2Translator.Sam2Input, DetectedObjects> model;
private SegPool segPool;
public Sam2Model(String modelPath, String modelName, int poolSize) throws ModelException, IOException {
init(modelPath, modelName, poolSize);
public Sam2Model(String modelPath, String modelName, int poolSize, Device device) throws ModelException, IOException {
init(modelPath, modelName, poolSize, device);
}
public void init(String modelPath, String modelName, int poolSize) throws MalformedModelException, ModelNotFoundException, IOException {
this.model = criteria(modelPath, modelName).loadModel();
public void init(String modelPath, String modelName, int poolSize, Device device) throws MalformedModelException, ModelNotFoundException, IOException {
this.model = criteria(modelPath, modelName, device).loadModel();
this.segPool = new SegPool(model, poolSize);
}
@ -47,15 +48,21 @@ public final class Sam2Model implements AutoCloseable {
this.segPool.close();
}
private Criteria<Sam2Translator.Sam2Input, DetectedObjects> criteria(String modelPath, String modelName) {
private Criteria<Sam2Translator.Sam2Input, DetectedObjects> criteria(String modelPath, String modelName, Device device) {
String encode = null;
if(!device.isGpu())
encode = "encode";
Criteria<Sam2Translator.Sam2Input, DetectedObjects> criteria =
Criteria.builder()
.setTypes(Sam2Translator.Sam2Input.class, DetectedObjects.class)
// sam2-hiera-tiny
// sam2-hiera-large
.optDevice(device)
.optModelPath(Paths.get(modelPath + modelName))
.optEngine("PyTorch")
.optTranslator(new Sam2Translator())
// .optOption("mapLocation","true")
.optTranslator(new Sam2Translator(encode))
.optProgress(new ProgressBar())
.build();
return criteria;

View File

@ -62,13 +62,13 @@ public class Sam2Translator implements NoBatchifyTranslator<Sam2Translator.Sam2I
private String encodeMethod;
/** Constructs a {@code Sam2Translator} instance. */
public Sam2Translator() {
public Sam2Translator(String encode) {
pipeline = new Pipeline();
pipeline.add(new Resize(1024, 1024));
pipeline.add(new ToTensor());
pipeline.add(new Normalize(MEAN, STD));
// this.encoderPath = builder.encoderPath;
this.encodeMethod = "encode";
this.encodeMethod = encode;
}
/** {@inheritDoc} */

View File

@ -111,20 +111,6 @@
<artifactId>pytorch-engine</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-native-cu118</artifactId>-->
<!-- <classifier>linux-x86_64</classifier>-->
<!-- <version>2.0.1</version>-->
<!-- <scope>runtime</scope>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-jni</artifactId>-->
<!-- <version>2.0.1-0.23.0</version>-->
<!-- <scope>runtime</scope>-->
<!-- </dependency>-->
<dependency>
<groupId>ai.djl.huggingface</groupId>

View File

@ -106,26 +106,19 @@
<version>${djl.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<!-- Pytorch GPU 配置 -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cu118</artifactId>
<classifier>linux-x86_64</classifier>
<version>2.0.1</version>
<scope>runtime</scope>
<artifactId>pytorch-native-cu117</artifactId>
<classifier>win-x86_64</classifier>
<version>1.13.1</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>2.0.1-0.23.0</version>
<scope>runtime</scope>
<version>1.13.1-0.22.1</version>
</dependency>
<dependency>
<groupId>ai.djl.huggingface</groupId>
<artifactId>tokenizers</artifactId>

View File

@ -0,0 +1,41 @@
## 目录:
http://aias.top/
### 下载模型放置于models目录
- 链接: https://pan.baidu.com/s/1LnKnwM2TcEi8SpP-cHbA9g?pwd=yu4g
### 烟火检测
支持烟雾-火灾2类检测.
### SDK功能
读取本地摄像头,实时(需要有显卡的台式机,否则会比较卡顿)检测口罩。
- 烟火检测,给出检测框和置信度
- 支持类别:
- fire
- smoke
#### 运行例子
- 测试图片
![fire_detect](https://aias-home.oss-cn-beijing.aliyuncs.com/AIAS/sec_sdks/images/fire_detect_result.png)
### 官网:
[官网链接](http://www.aias.top/)
### Git地址
[Github链接](https://github.com/mymagicpower/AIAS)
[Gitee链接](https://gitee.com/mymagicpower/AIAS)
#### 帮助文档:
- http://aias.top/guides.html
- 1.性能优化常见问题:
- http://aias.top/AIAS/guides/performance.html
- 2.引擎配置包括CPUGPU在线自动加载及本地配置:
- http://aias.top/AIAS/guides/engine_config.html
- 3.模型加载方式(在线自动加载,及本地配置):
- http://aias.top/AIAS/guides/load_model.html
- 4.Windows环境常见问题:
- http://aias.top/AIAS/guides/windows.html

View File

@ -0,0 +1,41 @@
## 目录:
http://aias.top/
### 下载模型放置于models目录
- 链接: https://pan.baidu.com/s/1LnKnwM2TcEi8SpP-cHbA9g?pwd=yu4g
### 烟火检测
支持烟雾-火灾2类检测.
### SDK功能
读取本地摄像头,实时(需要有显卡的台式机,否则会比较卡顿)检测口罩。
- 烟火检测,给出检测框和置信度
- 支持类别:
- fire
- smoke
#### 运行例子
- 测试图片
![fire_detect](https://aias-home.oss-cn-beijing.aliyuncs.com/AIAS/sec_sdks/images/fire_detect_result.png)
### 官网:
[官网链接](http://www.aias.top/)
### Git地址
[Github链接](https://github.com/mymagicpower/AIAS)
[Gitee链接](https://gitee.com/mymagicpower/AIAS)
#### 帮助文档:
- http://aias.top/guides.html
- 1.性能优化常见问题:
- http://aias.top/AIAS/guides/performance.html
- 2.引擎配置包括CPUGPU在线自动加载及本地配置:
- http://aias.top/AIAS/guides/engine_config.html
- 3.模型加载方式(在线自动加载,及本地配置):
- http://aias.top/AIAS/guides/load_model.html
- 4.Windows环境常见问题:
- http://aias.top/AIAS/guides/windows.html

View File

@ -0,0 +1,120 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>aias</groupId>
<artifactId>camera-sdk</artifactId>
<version>0.23.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.23.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.5</version>
</dependency>
<!-- 服务器端推理引擎 -->
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<!-- Pytorch GPU 配置 -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cu117</artifactId>
<classifier>win-x86_64</classifier>
<version>1.13.1</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>1.13.1-0.22.1</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.7</version>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,170 @@
package top.aias;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import top.aias.utils.FaceDetection;
import top.aias.utils.FaceMaskDetect;
import top.aias.utils.OpenCVImageUtil;
import org.bytedeco.javacv.CanvasFrame;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv.OpenCVFrameGrabber;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point;
import org.bytedeco.opencv.opencv_core.Rect;
import org.bytedeco.opencv.opencv_core.Scalar;
import javax.swing.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.List;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
/**
* 摄像头口罩检测
* Camera Mask Detection
*
* @author Calvin
*/
public class CameraExample {
public static void main(String[] args) throws IOException, ModelException, TranslateException {
faceMaskDetection();
}
/**
* 口罩检测
* Mask Detection
* */
public static void faceMaskDetection() throws IOException, ModelException, TranslateException {
float shrink = 0.5f;
float threshold = 0.7f;
Criteria<Image, DetectedObjects> criteria = new FaceDetection().criteria(shrink, threshold);
Criteria<Image, Classifications> maskCriteria = new FaceMaskDetect().criteria();
// 开启摄像头获取图像得到的图像为frame类型需要转换为mat类型进行检测和识别
// Open the camera, get the image (the obtained image is a frame type, which needs to be converted to a mat type for detection and recognition)
OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(0);
grabber.start();
// Frame与Mat转换
// Conversion between Frame and Mat
OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
CanvasFrame canvas = new CanvasFrame("Face Detection");
canvas.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
canvas.setVisible(true);
canvas.setFocusable(true);
// 窗口置顶
// Window top
if (canvas.isAlwaysOnTopSupported()) {
canvas.setAlwaysOnTop(true);
}
Frame frame = null;
try (ZooModel model = ModelZoo.loadModel(criteria);
Predictor<Image, DetectedObjects> predictor = model.newPredictor();
ZooModel classifyModel = ModelZoo.loadModel(maskCriteria);
Predictor<Image, Classifications> classifier = classifyModel.newPredictor()) {
// 获取图像帧
// Get image frames
for (; canvas.isVisible() && (frame = grabber.grab()) != null; ) {
// 将获取的frame转化成mat数据类型
// Convert the obtained frame into mat data type
Mat img = converter.convert(frame);
BufferedImage buffImg = OpenCVImageUtil.mat2BufferedImage(img);
Image image = ImageFactory.getInstance().fromImage(buffImg);
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
DetectedObjects detections = predictor.predict(image);
List<DetectedObjects.DetectedObject> items = detections.items();
// 遍历人脸
// Traversing the face
for (DetectedObjects.DetectedObject item : items) {
Image subImg = getSubImage(image, item.getBoundingBox());
Classifications classifications = classifier.predict(subImg);
String className = classifications.best().getClassName();
BoundingBox box = item.getBoundingBox();
Rectangle rectangle = box.getBounds();
int x = (int) (rectangle.getX() * imageWidth);
int y = (int) (rectangle.getY() * imageHeight);
Rect face =
new Rect(
x,
y,
(int) (rectangle.getWidth() * imageWidth),
(int) (rectangle.getHeight() * imageHeight));
// 绘制人脸矩形区域scalar色彩顺序BGR(蓝绿红)
// Drawing face rectangle area, scalar color order: BGR(blue green red)
rectangle(img, face, new Scalar(0, 0, 255, 1));
int pos_x = Math.max(face.tl().x() - 10, 0);
int pos_y = Math.max(face.tl().y() - 10, 0);
// 在人脸矩形上面绘制文字
// Drawing text on top of the face rectangle
putText(
img,
className,
new Point(pos_x, pos_y),
FONT_HERSHEY_COMPLEX,
1.0,
new Scalar(0, 0, 255, 2.0));
}
// 显示视频图像
// Displaying the video image
canvas.showImage(frame);
}
}
canvas.dispose();
grabber.close();
}
private static int[] extendSquare(
double xmin, double ymin, double width, double height, double percentage) {
double centerx = xmin + width / 2;
double centery = ymin + height / 2;
double maxDist = Math.max(width / 2, height / 2) * (1 + percentage);
return new int[] {(int) (centerx - maxDist), (int) (centery - maxDist), (int) (2 * maxDist)};
// return new int[] {(int) xmin, (int) ymin, (int) width, (int) height};
}
private static Image getSubImage(Image img, BoundingBox box) {
Rectangle rect = box.getBounds();
int width = img.getWidth();
int height = img.getHeight();
int[] squareBox =
extendSquare(
rect.getX() * width,
rect.getY() * height,
rect.getWidth() * width,
rect.getHeight() * height,
0); // 0.18
if (squareBox[0] < 0) squareBox[0] = 0;
if (squareBox[1] < 0) squareBox[1] = 0;
if (squareBox[0] > width) squareBox[0] = width;
if (squareBox[1] > height) squareBox[1] = height;
if ((squareBox[0] + squareBox[2]) > width) squareBox[2] = width - squareBox[0];
if ((squareBox[1] + squareBox[2]) > height) squareBox[2] = height - squareBox[1];
return img.getSubImage(squareBox[0], squareBox[1], squareBox[2], squareBox[2]);
// return img.getSubimage(squareBox[0], squareBox[1], squareBox[2], squareBox[3]);
}
}

View File

@ -0,0 +1,107 @@
package top.aias.utils;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.nio.file.Paths;
/**
*
* @author Calvin
*
* @email 179209347@qq.com
**/
public final class FaceDetection {
private static final Logger logger = LoggerFactory.getLogger(FaceDetection.class);
public FaceDetection() {}
public Criteria<Image, DetectedObjects> criteria(float shrink, float threshold) {
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(Paths.get("models/face_detection.zip"))
.optProgress(new ProgressBar())
.optTranslator(new FaceTranslator(shrink, threshold))
.build();
return criteria;
}
private final class FaceTranslator implements Translator<Image, DetectedObjects> {
private float shrink;
private float threshold;
private List<String> className;
FaceTranslator(float shrink, float threshold) {
this.shrink = shrink;
this.threshold = threshold;
className = Arrays.asList("Not Face", "Face");
}
@Override
public DetectedObjects processOutput(TranslatorContext ctx, NDList list) {
return processImageOutput(list, className, threshold);
}
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
return processImageInput(ctx.getNDManager(), input, shrink);
}
@Override
public Batchifier getBatchifier() {
return null;
}
NDList processImageInput(NDManager manager, Image input, float shrink) {
NDArray array = input.toNDArray(manager);
Shape shape = array.getShape();
array =
NDImageUtils.resize(array, (int) (shape.get(1) * shrink), (int) (shape.get(0) * shrink));
array = array.transpose(2, 0, 1).flip(0); // HWC -> CHW BGR -> RGB
NDArray mean = manager.create(new float[] {104f, 117f, 123f}, new Shape(3, 1, 1));
array = array.sub(mean).mul(0.007843f); // normalization
array = array.expandDims(0); // make batch dimension
return new NDList(array);
}
DetectedObjects processImageOutput(NDList list, List<String> className, float threshold) {
NDArray result = list.singletonOrThrow();
float[] probabilities = result.get(":,1").toFloatArray();
List<String> names = new ArrayList<>();
List<Double> prob = new ArrayList<>();
List<BoundingBox> boxes = new ArrayList<>();
for (int i = 0; i < probabilities.length; i++) {
if (probabilities[i] >= threshold) {
float[] array = result.get(i).toFloatArray();
names.add(className.get((int) array[0]));
prob.add((double) probabilities[i]);
boxes.add(new Rectangle(array[2], array[3], array[4] - array[2], array[5] - array[3]));
}
}
return new DetectedObjects(names, prob, boxes);
}
}
}

View File

@ -0,0 +1,103 @@
package top.aias.utils;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.transform.Normalize;
import ai.djl.modality.cv.transform.Resize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.ImageClassificationTranslator;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.nio.file.Paths;
/**
*
* @author Calvin
*
* @email 179209347@qq.com
**/
public final class FaceMaskDetect {
private static final Logger logger = LoggerFactory.getLogger(FaceMaskDetect.class);
public FaceMaskDetect() {}
public DetectedObjects predict(
Predictor<Image, DetectedObjects> faceDetector,
Predictor<Image, Classifications> classifier,
Image image)
throws TranslateException {
DetectedObjects detections = faceDetector.predict(image);
List<DetectedObjects.DetectedObject> faces = detections.items();
List<String> names = new ArrayList<>();
List<Double> prob = new ArrayList<>();
List<BoundingBox> rect = new ArrayList<>();
for (DetectedObjects.DetectedObject face : faces) {
Image subImg = getSubImage(image, face.getBoundingBox());
Classifications classifications = classifier.predict(subImg);
names.add(classifications.best().getClassName());
prob.add(face.getProbability());
rect.add(face.getBoundingBox());
}
return new DetectedObjects(names, prob, rect);
}
public Criteria<Image, Classifications> criteria() {
Criteria<Image, Classifications> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, Classifications.class)
.optTranslator(
ImageClassificationTranslator.builder()
.addTransform(new Resize(128, 128))
.addTransform(new ToTensor()) // HWC -> CHW div(255)
.addTransform(
new Normalize(
new float[] {0.5f, 0.5f, 0.5f}, new float[] {1.0f, 1.0f, 1.0f}))
.addTransform(nd -> nd.flip(0)) // RGB -> GBR
.build())
.optModelPath(Paths.get("models/face_mask.zip"))
.optProgress(new ProgressBar())
.build();
return criteria;
}
private int[] extendSquare(
double xmin, double ymin, double width, double height, double percentage) {
double centerx = xmin + width / 2;
double centery = ymin + height / 2;
double maxDist = Math.max(width / 2, height / 2) * (1 + percentage);
return new int[] {(int) (centerx - maxDist), (int) (centery - maxDist), (int) (2 * maxDist)};
// return new int[] {(int) xmin, (int) ymin, (int) width, (int) height};
}
private Image getSubImage(Image img, BoundingBox box) {
Rectangle rect = box.getBounds();
int width = img.getWidth();
int height = img.getHeight();
int[] squareBox =
extendSquare(
rect.getX() * width,
rect.getY() * height,
rect.getWidth() * width,
rect.getHeight() * height,
0); // 0.18
return img.getSubImage(squareBox[0], squareBox[1], squareBox[2], squareBox[2]);
// return img.getSubimage(squareBox[0], squareBox[1], squareBox[2], squareBox[3]);
}
}

View File

@ -0,0 +1,180 @@
package top.aias.utils;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.opencv_core.Mat;
import java.awt.*;
import java.awt.image.*;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
/**
* 图片类型转换
* Image type conversion
*
* @author Calvin
*/
public class OpenCVImageUtil {
/**
* BufferedImage Mat
* Convert BufferedImage to Mat
*
* @param original
*/
public static Mat bufferedImage2Mat(BufferedImage original) {
OpenCVFrameConverter.ToMat cv = new OpenCVFrameConverter.ToMat();
return cv.convertToMat(new Java2DFrameConverter().convert(original));
}
/**
* 将mat转BufferedImage
* Convert Mat to BufferedImage
*
* @param matrix
*/
public static BufferedImage mat2BufferedImage(Mat matrix) {
int cols = matrix.cols();
int rows = matrix.rows();
int elemSize = (int) matrix.elemSize();
byte[] data = new byte[cols * rows * elemSize];
matrix.data().get(data);
int type = 0;
switch (matrix.channels()) {
case 1:
type = BufferedImage.TYPE_BYTE_GRAY;
break;
case 3:
type = BufferedImage.TYPE_3BYTE_BGR;
byte b;
for (int i = 0; i < data.length; i = i + 3) {
b = data[i];
data[i] = data[i + 2];
data[i + 2] = b;
}
break;
default:
return null;
}
BufferedImage image = new BufferedImage(cols, rows, type);
// BufferedImage对象中最重要的两个组件为Raster和ColorModel分别用于存储图像的像素数据与颜色数据
// 表示像素矩形数组的类Raster 封装存储样本值的 DataBuffer以及描述如何在 DataBuffer 中定位给定样本值的 SampleModel
// 由于Raster对象是BufferedImage对象中的像素数据存储对象因此BufferedImage支持从Raster对象中获取任意位置xy点的像素值pxy
// The two most important components of a BufferedImage object are Raster and
// ColorModel, which are used to store the pixel data and color data of the
// image respectively.
// Raster is a class that represents a pixel rectangular array. Raster encapsulates
// the DataBuffer that stores the sample values, and the SampleModel that describes
// how to locate a given sample value in the DataBuffer.
// Because the Raster object is the pixel data storage object in the BufferedImage
// object, the BufferedImage supports getting the pixel value p(x, y) of any
// position (x, y) from the Raster object.
image.getRaster().setDataElements(0, 0, cols, rows, data);
return image;
}
/**
* 将bufferImage转Mat
* Convert bufferImage to Mat
*
* @param original
* @param matType
* @param msg
* @param x
* @param y
*/
public static Mat bufferedImage2Mat(
BufferedImage original, int matType, String msg, int x, int y) {
Graphics2D g = original.createGraphics();
try {
g.setComposite(AlphaComposite.Src);
g.drawImage(original, 0, 0, null);
g.drawString(msg, x, y);
} finally {
g.dispose();
}
Mat mat = new Mat(original.getHeight(), original.getWidth(), matType);
mat.data().put(((DataBufferByte) original.getRaster().getDataBuffer()).getData());
return mat;
}
/**
* 24位BGR数组转BufferedImage
* 24-bit BGR array to BufferedImage
*
* @param src -bgr排列的24位图像像素数据数组 - bgr-arranged 24-bit image pixel data array
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage BGR2BufferedImage(byte[] src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferByte db=(DataBufferByte)out;
ByteBuffer.wrap(db.getData()).put(src,0,src.length);
return image;
}
/**
* 24位BGR字节缓冲转BufferedImage
* 24-bit BGR byte buffer to BufferedImage
*
* @param src -bgr排列的24位图像像素数据数组 - bgr-arranged 24-bit image pixel data array
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage BGR2BufferedImage(ByteBuffer src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferByte db=(DataBufferByte)out;
ByteBuffer.wrap(db.getData()).put(src);
return image;
}
/**
* 24位整型BGR字节缓冲转BufferedImage
* 24-bit integer BGR byte buffer to BufferedImage
*
* @param src --rgb排列的24位图像像素整型缓冲int由3个byte组成
* @param src - bgr-arranged 24-bit image pixel integer buffer (int composed of 3 bytes)
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage BGR2BufferedImage(IntBuffer src, int width, int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferInt db=(DataBufferInt)out;
IntBuffer.wrap(db.getData()).put(src);
return image;
}
/**
* 24位整型BGR字节缓冲转BufferedImage
* 24-bit integer BGR byte buffer to BufferedImage
* @param src --rgb排列的24位图像像素整型缓冲int由3个byte组成
* @param src - bgr-arranged 24-bit image pixel integer buffer (int composed of 3 bytes)
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage RGB2BufferedImage(IntBuffer src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferInt db=(DataBufferInt)out;
IntBuffer.wrap(db.getData()).put(src);
return image;
}
}

View File

@ -1,216 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="CheckStyle-IDEA-Module">
<option name="configuration">
<map />
</option>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Maven: commons-cli:commons-cli:1.4" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-slf4j-impl:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.25" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-api:2.12.1" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: org.apache.logging.log4j:log4j-core:2.12.1" level="project" />
<orderEntry type="library" name="Maven: com.google.code.gson:gson:2.8.5" level="project" />
<orderEntry type="library" name="Maven: ai.djl:api:0.14.0" level="project" />
<orderEntry type="library" name="Maven: net.java.dev.jna:jna:5.9.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-compress:1.21" level="project" />
<orderEntry type="library" name="Maven: ai.djl:basicdataset:0.14.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-csv:1.8" level="project" />
<orderEntry type="library" name="Maven: ai.djl:model-zoo:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-engine:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-model-zoo:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-native-auto:1.9.1" level="project" />
<orderEntry type="library" name="Maven: ai.djl.paddlepaddle:paddlepaddle-engine:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.paddlepaddle:paddlepaddle-native-auto:2.0.2" level="project" />
<orderEntry type="library" name="Maven: ai.djl.paddlepaddle:paddlepaddle-model-zoo:0.14.0" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacv-platform:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacv:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas-platform:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp-platform:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-arm:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-arm64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-x86:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:ios-arm64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:ios-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-armhf:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-arm64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-ppc64le:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-x86:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:macosx-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:windows-x86:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:windows-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-arm:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-arm64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-x86:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:ios-arm64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:ios-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-x86:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-armhf:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-arm64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-ppc64le:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:macosx-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:windows-x86:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:windows-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv-platform:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-arm:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-arm64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-x86:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:ios-arm64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:ios-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-x86:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-armhf:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-arm64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-ppc64le:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:macosx-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:windows-x86:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:windows-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg-platform:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-arm:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-arm64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-x86:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-x86:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-armhf:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-arm64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-ppc64le:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:macosx-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:windows-x86:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:windows-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture-platform:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-x86:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-x86_64:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-armhf:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-arm64:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:windows-x86:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:windows-x86_64:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394-platform:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-x86:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-x86_64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-armhf:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-arm64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-ppc64le:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:macosx-x86_64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:windows-x86:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:windows-x86_64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect-platform:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-x86:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-x86_64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-armhf:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-arm64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-ppc64le:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:macosx-x86_64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:windows-x86:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:windows-x86_64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2-platform:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:linux-x86:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:linux-x86_64:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:macosx-x86_64:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:windows-x86_64:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense-platform:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:linux-x86:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:linux-x86_64:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:macosx-x86_64:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:windows-x86:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:windows-x86_64:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2-platform:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:linux-x86:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:linux-x86_64:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:macosx-x86_64:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:windows-x86:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:windows-x86_64:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput-platform:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput:windows-x86:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput:windows-x86_64:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus-platform:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-arm:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-arm64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-x86:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-x86:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-armhf:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-arm64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-ppc64le:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:macosx-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:windows-x86:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:windows-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark-platform:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-arm:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-arm64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-x86:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-x86:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-armhf:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-arm64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-ppc64le:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:macosx-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:windows-x86:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:windows-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica-platform:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-arm:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-arm64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-x86:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-x86:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-armhf:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-arm64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-ppc64le:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:macosx-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:windows-x86:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:windows-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract-platform:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-arm:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-arm64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-x86:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-x86:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-armhf:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-arm64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-ppc64le:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:macosx-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:windows-x86:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:windows-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.testng:testng:6.8.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.10" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.beanshell:bsh:2.0b4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.beust:jcommander:1.27" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.yaml:snakeyaml:1.6" level="project" />
</component>
</module>

View File

@ -1,218 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="CheckStyle-IDEA-Module">
<option name="configuration">
<map />
</option>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="aais-face-lib-0.1.0" level="project" />
<orderEntry type="library" name="Maven: commons-cli:commons-cli:1.4" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-slf4j-impl:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.25" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-api:2.12.1" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: org.apache.logging.log4j:log4j-core:2.12.1" level="project" />
<orderEntry type="library" name="Maven: com.google.code.gson:gson:2.8.5" level="project" />
<orderEntry type="library" name="Maven: ai.djl:api:0.12.0" level="project" />
<orderEntry type="library" name="Maven: net.java.dev.jna:jna:5.8.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-compress:1.20" level="project" />
<orderEntry type="library" name="Maven: ai.djl:basicdataset:0.12.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-csv:1.8" level="project" />
<orderEntry type="library" name="Maven: ai.djl:model-zoo:0.12.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.mxnet:mxnet-model-zoo:0.12.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.mxnet:mxnet-engine:0.12.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.mxnet:mxnet-native-auto:1.8.0" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: ai.djl.paddlepaddle:paddlepaddle-engine:0.12.0" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: ai.djl.paddlepaddle:paddlepaddle-native-auto:2.0.2" level="project" />
<orderEntry type="library" name="Maven: ai.djl.paddlepaddle:paddlepaddle-model-zoo:0.12.0" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacv-platform:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacv:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas-platform:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp-platform:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-arm:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-arm64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-x86:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:android-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:ios-arm64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:ios-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-armhf:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-arm64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-ppc64le:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-x86:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:linux-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:macosx-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:windows-x86:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacpp:windows-x86_64:1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-arm:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-arm64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-x86:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:android-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:ios-arm64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:ios-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-x86:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-armhf:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-arm64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:linux-ppc64le:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:macosx-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:windows-x86:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:openblas:windows-x86_64:0.3.10-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv-platform:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-arm:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-arm64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-x86:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:android-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:ios-arm64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:ios-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-x86:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-armhf:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-arm64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:linux-ppc64le:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:macosx-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:windows-x86:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:opencv:windows-x86_64:4.4.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg-platform:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-arm:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-arm64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-x86:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:android-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-x86:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-armhf:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-arm64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:linux-ppc64le:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:macosx-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:windows-x86:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:ffmpeg:windows-x86_64:4.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture-platform:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-x86:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-x86_64:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-armhf:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:linux-arm64:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:windows-x86:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flycapture:windows-x86_64:2.13.3.31-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394-platform:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-x86:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-x86_64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-armhf:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-arm64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:linux-ppc64le:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:macosx-x86_64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:windows-x86:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libdc1394:windows-x86_64:2.2.6-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect-platform:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-x86:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-x86_64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-armhf:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-arm64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:linux-ppc64le:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:macosx-x86_64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:windows-x86:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect:windows-x86_64:0.5.7-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2-platform:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:linux-x86:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:linux-x86_64:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:macosx-x86_64:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:libfreenect2:windows-x86_64:0.2.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense-platform:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:linux-x86:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:linux-x86_64:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:macosx-x86_64:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:windows-x86:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense:windows-x86_64:1.12.4-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2-platform:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:linux-x86:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:linux-x86_64:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:macosx-x86_64:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:windows-x86:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:librealsense2:windows-x86_64:2.29.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput-platform:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput:windows-x86:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:videoinput:windows-x86_64:0.200-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus-platform:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-arm:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-arm64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-x86:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:android-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-x86:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-armhf:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-arm64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:linux-ppc64le:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:macosx-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:windows-x86:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:artoolkitplus:windows-x86_64:2.3.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark-platform:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-arm:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-arm64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-x86:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:android-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-x86:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-armhf:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-arm64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:linux-ppc64le:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:macosx-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:windows-x86:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:flandmark:windows-x86_64:1.07-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica-platform:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-arm:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-arm64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-x86:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:android-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-x86:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-armhf:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-arm64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:linux-ppc64le:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:macosx-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:windows-x86:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:leptonica:windows-x86_64:1.80.0-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract-platform:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-arm:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-arm64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-x86:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:android-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-x86:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-armhf:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-arm64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:linux-ppc64le:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:macosx-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:windows-x86:4.1.1-1.5.4" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:windows-x86_64:4.1.1-1.5.4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.testng:testng:6.8.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.10" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.beanshell:bsh:2.0b4" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: com.beust:jcommander:1.27" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.yaml:snakeyaml:1.6" level="project" />
</component>
</module>

View File

@ -0,0 +1,41 @@
## 目录:
http://aias.top/
### 下载模型放置于models目录
- 链接: https://pan.baidu.com/s/1LnKnwM2TcEi8SpP-cHbA9g?pwd=yu4g
### 烟火检测
支持烟雾-火灾2类检测.
### SDK功能
读取本地摄像头,实时(需要有显卡的台式机,否则会比较卡顿)检测口罩。
- 烟火检测,给出检测框和置信度
- 支持类别:
- fire
- smoke
#### 运行例子
- 测试图片
![fire_detect](https://aias-home.oss-cn-beijing.aliyuncs.com/AIAS/sec_sdks/images/fire_detect_result.png)
### 官网:
[官网链接](http://www.aias.top/)
### Git地址
[Github链接](https://github.com/mymagicpower/AIAS)
[Gitee链接](https://gitee.com/mymagicpower/AIAS)
#### 帮助文档:
- http://aias.top/guides.html
- 1.性能优化常见问题:
- http://aias.top/AIAS/guides/performance.html
- 2.引擎配置包括CPUGPU在线自动加载及本地配置:
- http://aias.top/AIAS/guides/engine_config.html
- 3.模型加载方式(在线自动加载,及本地配置):
- http://aias.top/AIAS/guides/load_model.html
- 4.Windows环境常见问题:
- http://aias.top/AIAS/guides/windows.html

View File

@ -0,0 +1,41 @@
## 目录:
http://aias.top/
### 下载模型放置于models目录
- 链接: https://pan.baidu.com/s/1LnKnwM2TcEi8SpP-cHbA9g?pwd=yu4g
### 烟火检测
支持烟雾-火灾2类检测.
### SDK功能
读取本地摄像头,实时(需要有显卡的台式机,否则会比较卡顿)检测口罩。
- 烟火检测,给出检测框和置信度
- 支持类别:
- fire
- smoke
#### 运行例子
- 测试图片
![fire_detect](https://aias-home.oss-cn-beijing.aliyuncs.com/AIAS/sec_sdks/images/fire_detect_result.png)
### 官网:
[官网链接](http://www.aias.top/)
### Git地址
[Github链接](https://github.com/mymagicpower/AIAS)
[Gitee链接](https://gitee.com/mymagicpower/AIAS)
#### 帮助文档:
- http://aias.top/guides.html
- 1.性能优化常见问题:
- http://aias.top/AIAS/guides/performance.html
- 2.引擎配置包括CPUGPU在线自动加载及本地配置:
- http://aias.top/AIAS/guides/engine_config.html
- 3.模型加载方式(在线自动加载,及本地配置):
- http://aias.top/AIAS/guides/load_model.html
- 4.Windows环境常见问题:
- http://aias.top/AIAS/guides/windows.html

View File

@ -0,0 +1,126 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>calvin</groupId>
<artifactId>mp4-sdk</artifactId>
<version>0.23.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.23.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.6</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<!-- 服务器端推理引擎 -->
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-engine</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch GPU 配置 -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-native-cu117</artifactId>-->
<!-- <classifier>win-x86_64</classifier>-->
<!-- <version>1.13.1</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-jni</artifactId>-->
<!-- <version>1.13.1-0.23.0</version>-->
<!-- </dependency>-->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.7</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.18</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,126 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>calvin</groupId>
<artifactId>mp4-sdk</artifactId>
<version>0.23.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.23.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.6</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<!-- 服务器端推理引擎 -->
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<!-- Pytorch GPU 配置 -->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cu117</artifactId>
<classifier>win-x86_64</classifier>
<version>1.13.1</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>1.13.1-0.23.0</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.7</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.18</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,145 @@
package me.aias.example;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import me.aias.example.utils.FireSmokeDetect;
import me.aias.example.utils.OpenCVImageUtil;
import org.bytedeco.javacv.CanvasFrame;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point;
import org.bytedeco.opencv.opencv_core.Rect;
import org.bytedeco.opencv.opencv_core.Scalar;
import javax.swing.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.List;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
/**
* 本地视频口罩检测
* Local video face mask detection
*
* @author Calvin
*/
public class MP4Example {
public static void main(String[] args) throws IOException, ModelException, TranslateException {
faceMaskDetection("src/test/resources/fire.mp4");
}
/**
* 人脸检测
* Face detection
*
* @param input 视频源 - video source
*/
public static void faceMaskDetection(String input)
throws IOException, ModelException, TranslateException {
Criteria<Image, DetectedObjects> criteria = new FireSmokeDetect().criteria();
try (ZooModel model = ModelZoo.loadModel(criteria);
Predictor<Image, DetectedObjects> predictor = model.newPredictor()) {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(input);
grabber.start();
// Frame与Mat转换
// Frame to Mat conversion
OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
CanvasFrame canvas = new CanvasFrame("Fire Detections");
canvas.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
canvas.setVisible(true);
canvas.setFocusable(true);
// 窗口置顶 - Top window
if (canvas.isAlwaysOnTopSupported()) {
canvas.setAlwaysOnTop(true);
}
Frame frame = null;
// 获取图像帧
// Get image frame
int count = 0;
for (; canvas.isVisible() && (frame = grabber.grabImage()) != null; ) {
count++;
// 5 帧取一帧
if (count < 5){
continue;
}else {
count = 0;
}
// 将获取的frame转化成mat数据类型
// Convert the obtained frame to mat data type
Mat img = converter.convert(frame);
BufferedImage buffImg = OpenCVImageUtil.mat2BufferedImage(img);
Image image = ImageFactory.getInstance().fromImage(buffImg);
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
DetectedObjects detections = predictor.predict(image);
List<DetectedObjects.DetectedObject> items = detections.items();
// 遍历
for (DetectedObjects.DetectedObject item : items) {
if (item.getProbability() < 0.5f) {
continue;
}
String className = item.getClassName() + " " + item.getProbability();
BoundingBox box = item.getBoundingBox();
Rectangle rectangle = box.getBounds();
int x = (int) (rectangle.getX() * imageWidth);
int y = (int) (rectangle.getY() * imageHeight);
Rect face =
new Rect(
x,
y,
(int) (rectangle.getWidth() * imageWidth),
(int) (rectangle.getHeight() * imageHeight));
// 绘制矩形区域scalar色彩顺序BGR(蓝绿红)
// Draw face rectangle, scalar color order: BGR (blue, green, red)
rectangle(img, face, new Scalar(0, 0, 255, 1));
int pos_x = Math.max(face.tl().x() - 10, 0);
int pos_y = Math.max(face.tl().y() - 10, 0);
// 在矩形上面绘制文字
// Draw text above the face rectangle
putText(
img,
className,
new Point(pos_x, pos_y),
FONT_HERSHEY_COMPLEX,
1.0,
new Scalar(0, 0, 255, 2.0));
}
// 显示视频图像
// Display video image
canvas.showImage(frame);
}
canvas.dispose();
grabber.close();
}
}
}

View File

@ -0,0 +1,338 @@
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package me.aias.example.utils;
import ai.djl.Model;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.transform.CenterCrop;
import ai.djl.modality.cv.transform.Normalize;
import ai.djl.modality.cv.transform.Resize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.translate.*;
import ai.djl.util.Utils;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Built-in {@code Translator} that provides default image pre-processing.
*
* @param <T> the output object type
*/
public abstract class BaseImageTranslator<T> implements Translator<Image, T> {
private static final float[] MEAN = {0.485f, 0.456f, 0.406f};
private static final float[] STD = {0.229f, 0.224f, 0.225f};
private Image.Flag flag;
private Pipeline pipeline;
private Batchifier batchifier;
/**
* Constructs an ImageTranslator with the provided builder.
*
* @param builder the data to build with
*/
public BaseImageTranslator(BaseBuilder<?> builder) {
flag = builder.flag;
pipeline = builder.pipeline;
batchifier = builder.batchifier;
}
/** {@inheritDoc} */
@Override
public Batchifier getBatchifier() {
return batchifier;
}
/**
* Processes the {@link Image} input and converts it to NDList.
*
* @param ctx the toolkit that helps create the input NDArray
* @param input the {@link Image} input
* @return a {@link NDList}
*/
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
NDArray array = input.toNDArray(ctx.getNDManager(), flag);
array = NDImageUtils.resize(array, 640, 640);
array = array.transpose(2, 0, 1); // HWC -> CHW RGB -> BGR
// array = array.expandDims(0);
array = array.div(255f);
return new NDList(array);
// return pipeline.transform(new NDList(array));
}
protected static String getStringValue(Map<String, ?> arguments, String key, String def) {
Object value = arguments.get(key);
if (value == null) {
return def;
}
return value.toString();
}
protected static int getIntValue(Map<String, ?> arguments, String key, int def) {
Object value = arguments.get(key);
if (value == null) {
return def;
}
return (int) Double.parseDouble(value.toString());
}
protected static float getFloatValue(Map<String, ?> arguments, String key, float def) {
Object value = arguments.get(key);
if (value == null) {
return def;
}
return (float) Double.parseDouble(value.toString());
}
protected static boolean getBooleanValue(Map<String, ?> arguments, String key, boolean def) {
Object value = arguments.get(key);
if (value == null) {
return def;
}
return Boolean.parseBoolean(value.toString());
}
/**
* A builder to extend for all classes extending the {@link BaseImageTranslator}.
*
* @param <T> the concrete builder type
*/
@SuppressWarnings("rawtypes")
public abstract static class BaseBuilder<T extends BaseBuilder> {
protected int width = 224;
protected int height = 224;
protected Image.Flag flag = Image.Flag.COLOR;
protected Pipeline pipeline;
protected Batchifier batchifier = Batchifier.STACK;
/**
* Sets the optional {@link Image.Flag} (default is {@link
* Image.Flag#COLOR}).
*
* @param flag the color mode for the images
* @return this builder
*/
public T optFlag(Image.Flag flag) {
this.flag = flag;
return self();
}
/**
* Sets the {@link Pipeline} to use for pre-processing the image.
*
* @param pipeline the pre-processing pipeline
* @return this builder
*/
public T setPipeline(Pipeline pipeline) {
this.pipeline = pipeline;
return self();
}
/**
* Adds the {@link Transform} to the {@link Pipeline} use for pre-processing the image.
*
* @param transform the {@link Transform} to be added
* @return this builder
*/
public T addTransform(Transform transform) {
if (pipeline == null) {
pipeline = new Pipeline();
}
pipeline.add(transform);
return self();
}
/**
* Sets the {@link Batchifier} for the {@link Translator}.
*
* @param batchifier the {@link Batchifier} to be set
* @return this builder
*/
public T optBatchifier(Batchifier batchifier) {
this.batchifier = batchifier;
return self();
}
protected abstract T self();
protected void validate() {
if (pipeline == null) {
throw new IllegalArgumentException("pipeline is required.");
}
}
protected void configPreProcess(Map<String, ?> arguments) {
if (pipeline == null) {
pipeline = new Pipeline();
}
width = getIntValue(arguments, "width", 224);
height = getIntValue(arguments, "height", 224);
if (arguments.containsKey("flag")) {
flag = Image.Flag.valueOf(arguments.get("flag").toString());
}
if (getBooleanValue(arguments, "centerCrop", false)) {
addTransform(new CenterCrop());
}
if (getBooleanValue(arguments, "resize", false)) {
addTransform(new Resize(width, height));
}
if (getBooleanValue(arguments, "toTensor", true)) {
addTransform(new ToTensor());
}
String normalize = getStringValue(arguments, "normalize", "false");
if ("true".equals(normalize)) {
addTransform(new Normalize(MEAN, STD));
} else if (!"false".equals(normalize)) {
String[] tokens = normalize.split("\\s*,\\s*");
if (tokens.length != 6) {
throw new IllegalArgumentException("Invalid normalize value: " + normalize);
}
float[] mean = {
Float.parseFloat(tokens[0]),
Float.parseFloat(tokens[1]),
Float.parseFloat(tokens[2])
};
float[] std = {
Float.parseFloat(tokens[3]),
Float.parseFloat(tokens[4]),
Float.parseFloat(tokens[5])
};
addTransform(new Normalize(mean, std));
}
String range = (String) arguments.get("range");
if ("0,1".equals(range)) {
addTransform(a -> a.div(255f));
} else if ("-1,1".equals(range)) {
addTransform(a -> a.div(128f).sub(1));
}
if (arguments.containsKey("batchifier")) {
batchifier = Batchifier.fromString((String) arguments.get("batchifier"));
}
}
protected void configPostProcess(Map<String, ?> arguments) {}
}
/** A Builder to construct a {@code ImageClassificationTranslator}. */
@SuppressWarnings("rawtypes")
public abstract static class ClassificationBuilder<T extends BaseBuilder>
extends BaseBuilder<T> {
protected SynsetLoader synsetLoader;
/**
* Sets the name of the synset file listing the potential classes for an image.
*
* @param synsetArtifactName a file listing the potential classes for an image
* @return the builder
*/
public T optSynsetArtifactName(String synsetArtifactName) {
synsetLoader = new SynsetLoader(synsetArtifactName);
return self();
}
/**
* Sets the URL of the synset file.
*
* @param synsetUrl the URL of the synset file
* @return the builder
*/
public T optSynsetUrl(String synsetUrl) {
try {
this.synsetLoader = new SynsetLoader(new URL(synsetUrl));
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid synsetUrl: " + synsetUrl, e);
}
return self();
}
/**
* Sets the potential classes for an image.
*
* @param synset the potential classes for an image
* @return the builder
*/
public T optSynset(List<String> synset) {
synsetLoader = new SynsetLoader(synset);
return self();
}
/** {@inheritDoc} */
@Override
protected void validate() {
super.validate();
if (synsetLoader == null) {
synsetLoader = new SynsetLoader("synset.txt");
}
}
/** {@inheritDoc} */
@Override
protected void configPostProcess(Map<String, ?> arguments) {
String synset = (String) arguments.get("synset");
if (synset != null) {
optSynset(Arrays.asList(synset.split(",")));
}
String synsetUrl = (String) arguments.get("synsetUrl");
if (synsetUrl != null) {
optSynsetUrl(synsetUrl);
}
String synsetFileName = (String) arguments.get("synsetFileName");
if (synsetFileName != null) {
optSynsetArtifactName(synsetFileName);
}
}
}
protected static final class SynsetLoader {
private String synsetFileName;
private URL synsetUrl;
private List<String> synset;
public SynsetLoader(List<String> synset) {
this.synset = synset;
}
public SynsetLoader(URL synsetUrl) {
this.synsetUrl = synsetUrl;
}
public SynsetLoader(String synsetFileName) {
this.synsetFileName = synsetFileName;
}
public List<String> load(Model model) throws IOException {
if (synset != null) {
return synset;
} else if (synsetUrl != null) {
try (InputStream is = synsetUrl.openStream()) {
return Utils.readLines(is);
}
}
return model.getArtifact(synsetFileName, Utils::readLines);
}
}
}

View File

@ -0,0 +1,54 @@
package me.aias.example.utils;
import ai.djl.Device;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Translator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.Paths;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* https://github.com/gengyanlei/fire-smoke-detect-yolov4
* @author Calvin
*
* @email 179209347@qq.com
**/
public final class FireSmokeDetect {
private static final Logger logger = LoggerFactory.getLogger(FireSmokeDetect.class);
public FireSmokeDetect() {}
public Criteria<Image, DetectedObjects> criteria() {
Map<String, Object> arguments = new ConcurrentHashMap<>();
arguments.put("width", 640);
arguments.put("height", 640);
arguments.put("resize", true);
arguments.put("rescale", true);
// arguments.put("toTensor", false);
// arguments.put("range", "0,1");
// arguments.put("normalize", "false");
arguments.put("threshold", 0.2);
arguments.put("nmsThreshold", 0.5);
Translator<Image, DetectedObjects> translator = YoloV5Translator.builder(arguments).build();
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(Paths.get("models/fire_smoke.zip"))
.optTranslator(translator)
.optProgress(new ProgressBar())
.optEngine("PyTorch")
.optDevice(Device.cpu())
.build();
return criteria;
}
}

View File

@ -0,0 +1,216 @@
package me.aias.example.utils;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
public class ImageUtils {
/**
* BufferedImage图片格式转DJL图片格式
* Convert BufferedImage image format to DJL image format
*
* @author Calvin
*/
public static Image convert(BufferedImage img) {
return ImageFactory.getInstance().fromImage(img);
}
/**
* 保存BufferedImage图片
* Save BufferedImage image
*
* @author Calvin
*/
public static void saveImage(BufferedImage img, String name, String path) {
Image djlImg = ImageFactory.getInstance().fromImage(img); // 支持多种图片格式自动适配
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
try {
djlImg.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 保存DJL图片
* Save DJL image
*
* @author Calvin
*/
public static void saveImage(Image img, String name, String path) {
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
try {
img.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 保存图片,含检测框
* Save image with bounding box
*
* @author Calvin
*/
public static void saveBoundingBoxImage(
Image img, DetectedObjects detection, String name, String path) throws IOException {
// Make image copy with alpha channel because original image was jpg
img.drawBoundingBoxes(detection);
Path outputDir = Paths.get(path);
Files.createDirectories(outputDir);
Path imagePath = outputDir.resolve(name);
// OpenJDK can't save jpg with alpha channel
img.save(Files.newOutputStream(imagePath), "png");
}
/**
* 绘制人脸关键点
* Draw facial landmarks
*
* @author Calvin
*/
public static void drawLandmark(Image img, BoundingBox box, float[] array) {
for (int i = 0; i < array.length / 2; i++) {
int x = getX(img, box, array[2 * i]);
int y = getY(img, box, array[2 * i + 1]);
Color c = new Color(0, 255, 0);
drawImageRect((BufferedImage) img.getWrappedImage(), x, y, 1, 1, c);
}
}
/**
* 画检测框
* Draw bounding box
*
* @author Calvin
*/
public static void drawImageRect(BufferedImage image, int x, int y, int width, int height) {
// 将绘制图像转换为Graphics2D
// Convert the drawing image to Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
g.setColor(new Color(246, 96, 0));
// 声明画笔属性 单位像素末端无修饰 折线处呈尖角
// Declare brush properties: thickness (in pixels) without end decoration, at the fold with sharp corners
BasicStroke bStroke = new BasicStroke(4, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawRect(x, y, width, height);
} finally {
g.dispose();
}
}
/**
* 画检测框
* Draw bounding box
*
* @author Calvin
*/
public static void drawImageRect(
BufferedImage image, int x, int y, int width, int height, Color c) {
// 将绘制图像转换为Graphics2D
// Convert the drawing image to Graphics2D
Graphics2D g = (Graphics2D) image.getGraphics();
try {
g.setColor(c);
// 声明画笔属性 单位像素末端无修饰 折线处呈尖角
// Declare brush properties: thickness (in pixels) without end decoration, at the fold with sharp corners
BasicStroke bStroke = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER);
g.setStroke(bStroke);
g.drawRect(x, y, width, height);
} finally {
g.dispose();
}
}
/**
* 显示文字
* Show text
*
* @author Calvin
*/
public static void drawImageText(BufferedImage image, String text) {
Graphics graphics = image.getGraphics();
int fontSize = 100;
Font font = new Font("楷体", Font.PLAIN, fontSize);
try {
graphics.setFont(font);
graphics.setColor(new Color(246, 96, 0));
int strWidth = graphics.getFontMetrics().stringWidth(text);
graphics.drawString(text, fontSize - (strWidth / 2), fontSize + 30);
} finally {
graphics.dispose();
}
}
/**
* 返回外扩人脸 factor = 1, 100%, factor = 0.2, 20%
* Returns the outward expansion of the face factor = 1, 100%, factor = 0.2, 20%
*
* @author Calvin
*/
public static Image getSubImage(Image img, BoundingBox box, float factor) {
Rectangle rect = box.getBounds();
// 左上角坐标
// Upper left corner coordinate
int x1 = (int) (rect.getX() * img.getWidth());
int y1 = (int) (rect.getY() * img.getHeight());
// 宽度高度
// Width and height
int w = (int) (rect.getWidth() * img.getWidth());
int h = (int) (rect.getHeight() * img.getHeight());
// 右下角坐标
// Lower right corner coordinate
int x2 = x1 + w;
int y2 = y1 + h;
// 外扩大100%防止对齐后人脸出现黑边
// Expand 100% to prevent black edges on the face after alignment
int new_x1 = Math.max((int) (x1 + x1 * factor / 2 - x2 * factor / 2), 0);
int new_x2 = Math.min((int) (x2 + x2 * factor / 2 - x1 * factor / 2), img.getWidth() - 1);
int new_y1 = Math.max((int) (y1 + y1 * factor / 2 - y2 * factor / 2), 0);
int new_y2 = Math.min((int) (y2 + y2 * factor / 2 - y1 * factor / 2), img.getHeight() - 1);
int new_w = new_x2 - new_x1;
int new_h = new_y2 - new_y1;
return img.getSubImage(new_x1, new_y1, new_w, new_h);
}
private static int getX(Image img, BoundingBox box, float x) {
Rectangle rect = box.getBounds();
// 左上角坐标
// Upper left corner coordinate
int x1 = (int) (rect.getX() * img.getWidth());
// 宽度
// Width
int w = (int) (rect.getWidth() * img.getWidth());
return (int) (x * w + x1);
}
private static int getY(Image img, BoundingBox box, float y) {
Rectangle rect = box.getBounds();
// 左上角坐标
// Upper left corner coordinate
int y1 = (int) (rect.getY() * img.getHeight());
// 高度
// Height
int h = (int) (rect.getHeight() * img.getHeight());
return (int) (y * h + y1);
}
}

View File

@ -0,0 +1,121 @@
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package me.aias.example.utils;
import ai.djl.Model;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.ndarray.NDArray;
import ai.djl.translate.TranslatorContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* A {@link BaseImageTranslator} that post-process the {@link NDArray} into {@link DetectedObjects}
* with boundaries.
*/
public abstract class ObjectDetectionTranslator extends BaseImageTranslator<DetectedObjects> {
protected float threshold;
private SynsetLoader synsetLoader;
protected List<String> classes;
protected double imageWidth;
protected double imageHeight;
/**
* Creates the {@link ObjectDetectionTranslator} from the given builder.
*
* @param builder the builder for the translator
*/
protected ObjectDetectionTranslator(ObjectDetectionBuilder<?> builder) {
super(builder);
this.threshold = builder.threshold;
this.synsetLoader = builder.synsetLoader;
this.imageWidth = builder.imageWidth;
this.imageHeight = builder.imageHeight;
}
/** {@inheritDoc} */
@Override
public void prepare(TranslatorContext ctx) throws IOException {
Model model = ctx.getModel();
if (classes == null) {
classes = synsetLoader.load(model);
}
}
/** The base builder for the object detection translator. */
@SuppressWarnings("rawtypes")
public abstract static class ObjectDetectionBuilder<T extends ObjectDetectionBuilder>
extends ClassificationBuilder<T> {
protected float threshold = 0.2f;
protected double imageWidth;
protected double imageHeight;
/**
* Sets the threshold for prediction accuracy.
*
* <p>Predictions below the threshold will be dropped.
*
* @param threshold the threshold for the prediction accuracy
* @return this builder
*/
public T optThreshold(float threshold) {
this.threshold = threshold;
return self();
}
/**
* Sets the optional rescale size.
*
* @param imageWidth the width to rescale images to
* @param imageHeight the height to rescale images to
* @return this builder
*/
public T optRescaleSize(double imageWidth, double imageHeight) {
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
return self();
}
/**
* Get resized image width.
*
* @return image width
*/
public double getImageWidth() {
return imageWidth;
}
/**
* Get resized image height.
*
* @return image height
*/
public double getImageHeight() {
return imageHeight;
}
/** {@inheritDoc} */
@Override
protected void configPostProcess(Map<String, ?> arguments) {
super.configPostProcess(arguments);
if (getBooleanValue(arguments, "rescale", false)) {
optRescaleSize(width, height);
}
threshold = getFloatValue(arguments, "threshold", 0.2f);
}
}
}

View File

@ -0,0 +1,314 @@
/*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package me.aias.example.utils;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.types.DataType;
import ai.djl.translate.TranslatorContext;
import java.util.*;
/**
* A translator for YoloV5 models. This was tested with ONNX exported Yolo models. For details check
* here: https://github.com/ultralytics/yolov5
*/
public class YoloV5Translator extends ObjectDetectionTranslator {
private YoloOutputType yoloOutputLayerType;
private float nmsThreshold;
/**
* Constructs an ImageTranslator with the provided builder.
*
* @param builder the data to build with
*/
protected YoloV5Translator(Builder builder) {
super(builder);
yoloOutputLayerType = builder.outputType;
nmsThreshold = builder.nmsThreshold;
}
/**
* Creates a builder to build a {@link YoloV5Translator}.
*
* @return a new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Creates a builder to build a {@code YoloV5Translator} with specified arguments.
*
* @param arguments arguments to specify builder options
* @return a new builder
*/
public static Builder builder(Map<String, ?> arguments) {
Builder builder = new Builder();
builder.configPreProcess(arguments);
builder.configPostProcess(arguments);
return builder;
}
protected double boxIntersection(Rectangle a, Rectangle b) {
double w =
overlap(
(a.getX() * 2 + a.getWidth()) / 2,
a.getWidth(),
(b.getX() * 2 + b.getWidth()) / 2,
b.getWidth());
double h =
overlap(
(a.getY() * 2 + a.getHeight()) / 2,
a.getHeight(),
(b.getY() * 2 + b.getHeight()) / 2,
b.getHeight());
if (w < 0 || h < 0) {
return 0;
}
return w * h;
}
protected double boxIou(Rectangle a, Rectangle b) {
return boxIntersection(a, b) / boxUnion(a, b);
}
protected double boxUnion(Rectangle a, Rectangle b) {
double i = boxIntersection(a, b);
return (a.getWidth()) * (a.getHeight()) + (b.getWidth()) * (b.getHeight()) - i;
}
protected DetectedObjects nms(List<IntermediateResult> list) {
List<String> retClasses = new ArrayList<>();
List<Double> retProbs = new ArrayList<>();
List<BoundingBox> retBB = new ArrayList<>();
for (int k = 0; k < classes.size(); k++) {
// 1.find max confidence per class
PriorityQueue<IntermediateResult> pq =
new PriorityQueue<>(
50,
(lhs, rhs) -> {
// Intentionally reversed to put high confidence at the head of the
// queue.
return Double.compare(rhs.getConfidence(), lhs.getConfidence());
});
for (IntermediateResult intermediateResult : list) {
if (intermediateResult.getDetectedClass() == k) {
pq.add(intermediateResult);
}
}
// 2.do non maximum suppression
while (pq.size() > 0) {
// insert detection with max confidence
IntermediateResult[] a = new IntermediateResult[pq.size()];
IntermediateResult[] detections = pq.toArray(a);
Rectangle rec = detections[0].getLocation();
retClasses.add(detections[0].id);
retProbs.add(detections[0].confidence);
retBB.add(
new Rectangle(
rec.getX() / super.imageWidth,
rec.getY() / super.imageHeight,
rec.getWidth() / super.imageWidth,
rec.getHeight() / super.imageHeight));
pq.clear();
for (int j = 1; j < detections.length; j++) {
IntermediateResult detection = detections[j];
Rectangle location = detection.getLocation();
if (boxIou(rec, location) < nmsThreshold) {
pq.add(detection);
}
}
}
}
return new DetectedObjects(retClasses, retProbs, retBB);
}
protected double overlap(double x1, double w1, double x2, double w2) {
double l1 = x1 - w1 / 2;
double l2 = x2 - w2 / 2;
double left = Math.max(l1, l2);
double r1 = x1 + w1 / 2;
double r2 = x2 + w2 / 2;
double right = Math.min(r1, r2);
return right - left;
}
private DetectedObjects processFromBoxOutput(NDList list) {
float[] flattened = list.get(0).toFloatArray();
ArrayList<IntermediateResult> intermediateResults = new ArrayList<>();
int sizeClasses = classes.size();
int stride = 5 + sizeClasses;
int size = flattened.length / stride;
for (int i = 0; i < size; i++) {
int indexBase = i * stride;
float maxClass = 0;
int maxIndex = 0;
for (int c = 0; c < sizeClasses; c++) {
if (flattened[indexBase + c + 5] > maxClass) {
maxClass = flattened[indexBase + c + 5];
maxIndex = c;
}
}
float score = maxClass * flattened[indexBase + 4];
if (score > threshold) {
float xPos = flattened[indexBase];
float yPos = flattened[indexBase + 1];
float w = flattened[indexBase + 2];
float h = flattened[indexBase + 3];
Rectangle rect = new Rectangle(Math.max(0, xPos - w / 2), Math.max(0, yPos - h / 2), w, h);
intermediateResults.add(
new IntermediateResult(classes.get(maxIndex), score, maxIndex, rect));
}
}
return nms(intermediateResults);
}
private DetectedObjects processFromDetectOutput() {
throw new UnsupportedOperationException(
"detect layer output is not supported yet, check correct YoloV5 export format");
}
/** {@inheritDoc} */
@Override
public DetectedObjects processOutput(TranslatorContext ctx, NDList list) {
switch (yoloOutputLayerType) {
case DETECT:
return processFromDetectOutput();
case AUTO:
if (list.get(0).getShape().dimension() > 2) {
return processFromDetectOutput();
} else {
return processFromBoxOutput(list);
}
case BOX:
default:
return processFromBoxOutput(list);
}
}
/** A enum represents the Yolo output type. */
public enum YoloOutputType {
BOX,
DETECT,
AUTO
}
/** The builder for {@link YoloV5Translator}. */
public static class Builder extends ObjectDetectionBuilder<Builder> {
YoloOutputType outputType = YoloOutputType.AUTO;
float nmsThreshold = 0.4f;
/**
* Sets the {@code YoloOutputType}.
*
* @param outputType the {@code YoloOutputType}
* @return this builder
*/
public Builder optOutputType(YoloOutputType outputType) {
this.outputType = outputType;
return this;
}
/**
* Sets the NMS threshold.
*
* @param nmsThreshold the NMS threshold
* @return this builder
*/
public Builder optNmsThreshold(float nmsThreshold) {
this.nmsThreshold = nmsThreshold;
return this;
}
/** {@inheritDoc} */
@Override
protected Builder self() {
return this;
}
/** {@inheritDoc} */
@Override
protected void configPostProcess(Map<String, ?> arguments) {
super.configPostProcess(arguments);
String type = getStringValue(arguments, "outputType", "AUTO");
outputType = YoloOutputType.valueOf(type.toUpperCase(Locale.ENGLISH));
nmsThreshold = getFloatValue(arguments, "nmsThreshold", 0.4f);
}
/**
* Builds the translator.
*
* @return the new translator
*/
public YoloV5Translator build() {
// custom pipeline to match default YoloV5 input layer
if (pipeline == null) {
addTransform(array -> array.transpose(2, 0, 1).toType(DataType.FLOAT32, false).div(255));
}
validate();
return new YoloV5Translator(this);
}
}
private static final class IntermediateResult {
/**
* A sortable score for how good the recognition is relative to others. Higher should be better.
*/
private double confidence;
/** Display name for the recognition. */
private int detectedClass;
/**
* A unique identifier for what has been recognized. Specific to the class, not the instance of
* the object.
*/
private String id;
/** Optional location within the source image for the location of the recognized object. */
private Rectangle location;
IntermediateResult(String id, double confidence, int detectedClass, Rectangle location) {
this.confidence = confidence;
this.id = id;
this.detectedClass = detectedClass;
this.location = location;
}
public double getConfidence() {
return confidence;
}
public int getDetectedClass() {
return detectedClass;
}
public String getId() {
return id;
}
public Rectangle getLocation() {
return new Rectangle(
location.getX(), location.getY(), location.getWidth(), location.getHeight());
}
}
}

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 393 KiB

After

Width:  |  Height:  |  Size: 423 KiB

View File

@ -0,0 +1,60 @@
#### Common Model Loading Methods
1. How to load a model online via URL?
```text
# Use optModelUrls to load a model via URL
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls("https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_ppocr_mobile_v2.0_det_infer.zip")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```
2. How to load a model locally?
```text
# Use optModelPath to load a model from a zipped file
Path modelPath = Paths.get("src/test/resources/ch_ppocr_mobile_v2.0_det_infer.zip");
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(modelPath)
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
# Use optModelPath to load a model from a local directory
Path modelPath = Paths.get("src/test/resources/ch_ppocr_mobile_v2.0_det_infer/");
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(modelPath)
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```
3. How to load a model packed into a JAR file?
```text
# Use optModelUrls to load a model
# Assuming the model is located in the JAR file at:
# BOOT-INF/classes/ch_ppocr_mobile_v2.0_det_infer.zip
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls("jar:///ch_ppocr_mobile_v2.0_det_infer.zip")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="INFO">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout
pattern="[%-5level] - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="info" additivity="false">
<AppenderRef ref="console"/>
</Root>
<Logger name="me.calvin" level="${sys:me.calvin.logging.level:-info}" additivity="false">
<AppenderRef ref="console"/>
</Logger>
</Loggers>
</Configuration>

View File

@ -0,0 +1,60 @@
#### Common Model Loading Methods
1. How to load a model online via URL?
```text
# Use optModelUrls to load a model via URL
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls("https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_ppocr_mobile_v2.0_det_infer.zip")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```
2. How to load a model locally?
```text
# Use optModelPath to load a model from a zipped file
Path modelPath = Paths.get("src/test/resources/ch_ppocr_mobile_v2.0_det_infer.zip");
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(modelPath)
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
# Use optModelPath to load a model from a local directory
Path modelPath = Paths.get("src/test/resources/ch_ppocr_mobile_v2.0_det_infer/");
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(modelPath)
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```
3. How to load a model packed into a JAR file?
```text
# Use optModelUrls to load a model
# Assuming the model is located in the JAR file at:
# BOOT-INF/classes/ch_ppocr_mobile_v2.0_det_infer.zip
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls("jar:///ch_ppocr_mobile_v2.0_det_infer.zip")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```

View File

@ -0,0 +1,140 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>aias</groupId>
<artifactId>mp4-facemask-sdk</artifactId>
<version>0.20.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.20.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.5</version>
</dependency>
<!-- 服务器端推理引擎 -->
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cpu</artifactId>
<classifier>win-x86_64</classifier>
<scope>runtime</scope>
<version>1.13.0</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>1.13.0-0.20.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- PaddlePaddle -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.paddlepaddle</groupId>-->
<!-- <artifactId>paddlepaddle-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>ai.djl.paddlepaddle</groupId>
<artifactId>paddlepaddle-native-cpu</artifactId>
<classifier>win-x86_64</classifier>
<version>2.3.2</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>ai.djl.paddlepaddle</groupId>
<artifactId>paddlepaddle-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.7</version>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,140 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>aias</groupId>
<artifactId>mp4-facemask-sdk</artifactId>
<version>0.20.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.20.0</djl.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.5</version>
</dependency>
<!-- 服务器端推理引擎 -->
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>basicdataset</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- Pytorch -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.pytorch</groupId>-->
<!-- <artifactId>pytorch-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-native-cpu</artifactId>
<classifier>win-x86_64</classifier>
<scope>runtime</scope>
<version>1.13.0</version>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-jni</artifactId>
<version>1.13.0-0.20.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<!-- PaddlePaddle -->
<!-- <dependency>-->
<!-- <groupId>ai.djl.paddlepaddle</groupId>-->
<!-- <artifactId>paddlepaddle-engine</artifactId>-->
<!-- <version>${djl.version}</version>-->
<!-- </dependency>-->
<dependency>
<groupId>ai.djl.paddlepaddle</groupId>
<artifactId>paddlepaddle-native-cpu</artifactId>
<classifier>win-x86_64</classifier>
<version>2.3.2</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>ai.djl.paddlepaddle</groupId>
<artifactId>paddlepaddle-model-zoo</artifactId>
<version>${djl.version}</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.7</version>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,180 @@
package me.aias.example.utils;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.opencv_core.Mat;
import java.awt.*;
import java.awt.image.*;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
/**
* 图片类型转换
* Image type conversion
*
* @author Calvin
*/
public class OpenCVImageUtil {
/**
* BufferedImage Mat
* Convert BufferedImage to Mat
*
* @param original
*/
public static Mat bufferedImage2Mat(BufferedImage original) {
OpenCVFrameConverter.ToMat cv = new OpenCVFrameConverter.ToMat();
return cv.convertToMat(new Java2DFrameConverter().convert(original));
}
/**
* 将mat转BufferedImage
* Convert Mat to BufferedImage
*
* @param matrix
*/
public static BufferedImage mat2BufferedImage(Mat matrix) {
int cols = matrix.cols();
int rows = matrix.rows();
int elemSize = (int) matrix.elemSize();
byte[] data = new byte[cols * rows * elemSize];
matrix.data().get(data);
int type = 0;
switch (matrix.channels()) {
case 1:
type = BufferedImage.TYPE_BYTE_GRAY;
break;
case 3:
type = BufferedImage.TYPE_3BYTE_BGR;
byte b;
for (int i = 0; i < data.length; i = i + 3) {
b = data[i];
data[i] = data[i + 2];
data[i + 2] = b;
}
break;
default:
return null;
}
BufferedImage image = new BufferedImage(cols, rows, type);
// BufferedImage对象中最重要的两个组件为Raster和ColorModel分别用于存储图像的像素数据与颜色数据
// 表示像素矩形数组的类Raster 封装存储样本值的 DataBuffer以及描述如何在 DataBuffer 中定位给定样本值的 SampleModel
// 由于Raster对象是BufferedImage对象中的像素数据存储对象因此BufferedImage支持从Raster对象中获取任意位置xy点的像素值pxy
// The two most important components of a BufferedImage object are Raster and
// ColorModel, which are used to store the pixel data and color data of the
// image respectively.
// Raster is a class that represents a pixel rectangular array. Raster encapsulates
// the DataBuffer that stores the sample values, and the SampleModel that describes
// how to locate a given sample value in the DataBuffer.
// Because the Raster object is the pixel data storage object in the BufferedImage
// object, the BufferedImage supports getting the pixel value p(x, y) of any
// position (x, y) from the Raster object.
image.getRaster().setDataElements(0, 0, cols, rows, data);
return image;
}
/**
* 将bufferImage转Mat
* Convert bufferImage to Mat
*
* @param original
* @param matType
* @param msg
* @param x
* @param y
*/
public static Mat bufferedImage2Mat(
BufferedImage original, int matType, String msg, int x, int y) {
Graphics2D g = original.createGraphics();
try {
g.setComposite(AlphaComposite.Src);
g.drawImage(original, 0, 0, null);
g.drawString(msg, x, y);
} finally {
g.dispose();
}
Mat mat = new Mat(original.getHeight(), original.getWidth(), matType);
mat.data().put(((DataBufferByte) original.getRaster().getDataBuffer()).getData());
return mat;
}
/**
* 24位BGR数组转BufferedImage
* 24-bit BGR array to BufferedImage
*
* @param src -bgr排列的24位图像像素数据数组 - bgr-arranged 24-bit image pixel data array
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage BGR2BufferedImage(byte[] src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferByte db=(DataBufferByte)out;
ByteBuffer.wrap(db.getData()).put(src,0,src.length);
return image;
}
/**
* 24位BGR字节缓冲转BufferedImage
* 24-bit BGR byte buffer to BufferedImage
*
* @param src -bgr排列的24位图像像素数据数组 - bgr-arranged 24-bit image pixel data array
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage BGR2BufferedImage(ByteBuffer src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferByte db=(DataBufferByte)out;
ByteBuffer.wrap(db.getData()).put(src);
return image;
}
/**
* 24位整型BGR字节缓冲转BufferedImage
* 24-bit integer BGR byte buffer to BufferedImage
*
* @param src --rgb排列的24位图像像素整型缓冲int由3个byte组成
* @param src - bgr-arranged 24-bit image pixel integer buffer (int composed of 3 bytes)
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage BGR2BufferedImage(IntBuffer src, int width, int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferInt db=(DataBufferInt)out;
IntBuffer.wrap(db.getData()).put(src);
return image;
}
/**
* 24位整型BGR字节缓冲转BufferedImage
* 24-bit integer BGR byte buffer to BufferedImage
* @param src --rgb排列的24位图像像素整型缓冲int由3个byte组成
* @param src - bgr-arranged 24-bit image pixel integer buffer (int composed of 3 bytes)
* @param width -宽度 - width
* @param height-高度 - height
* @return
*/
public static BufferedImage RGB2BufferedImage(IntBuffer src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferInt db=(DataBufferInt)out;
IntBuffer.wrap(db.getData()).put(src);
return image;
}
}

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="INFO">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout
pattern="[%-5level] - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="info" additivity="false">
<AppenderRef ref="console"/>
</Root>
<Logger name="me.calvin" level="${sys:me.calvin.logging.level:-info}" additivity="false">
<AppenderRef ref="console"/>
</Logger>
</Loggers>
</Configuration>

View File

@ -0,0 +1,33 @@
### Download the model and put it in the models directory
- Link 1: https://github.com/mymagicpower/AIAS/releases/download/apps/face_detection.zip
- Link 2: https://github.com/mymagicpower/AIAS/releases/download/apps/face_mask.zip
### Mask detection
Mask detection is helping to fight against pneumonia, and artificial intelligence technology is being applied to epidemic prevention and control. In cutting off the transmission pathway of anti-epidemic diseases, wearing masks has become one of the most important measures. However, in actual scenarios, there are still people who do not take it seriously, do not pay attention, or have a lucky mentality and do not wear masks, especially in public places, which poses great risks to individuals and the public. The mask detection function based on artificial intelligence can perform real-time detection based on the camera video stream.
## SDK function
Through rtsp streaming, real-time detection of masks (requires a desktop computer with a graphics card, otherwise it will be more laggy).
- RTSP address of cameras such as Hikvision/Dahua: rtsp://user:password@192.168.16.100:554/Streaing/Channels/1
- RTSP address of video platforms such as Hikvision/Dahua: rtsp://192.168.16.88:554/openUrl/6rcShva
- Your own RTSP address
## Run the face detection example
1. First, download the sample code
```bash
git clone https://github.com/mymagicpower/AIAS.git
```
2. Import the examples project into the IDE:
```
cd rtsp_facemask_sdk
```
3. Run the example code: RtspFaceMaskDetectionExample
## The effect is as follows:
![result](https://aias-home.oss-cn-beijing.aliyuncs.com/AIAS/mask_sdk/face-masks.png)

View File

@ -0,0 +1,56 @@
## 目录:
http://aias.top/
### 下载模型放置于models目录
- 链接: https://pan.baidu.com/s/1_pVwsMV6C04xUjIJmjGQsw?pwd=bpnp
### 口罩检测
口罩检测助力抗击肺炎,人工智能技术正被应用到疫情防控中来。 抗疫切断传播途径中,
佩戴口罩已经几乎成为了最重要的举措之一。但是在实际场景中,仍然有不重视、不注意、
侥幸心理的人员不戴口罩,尤其在公众场合,给个人和公众造成极大的风险隐患。
而基于人工智能的口罩检测功能可以基于摄像头视频流进行实时检测。
## SDK功能
通过rtsp取流实时需要有显卡的台式机否则会比较卡顿检测口罩。
- 海康/大华等摄像机的rtsp地址rtsp://user:password@192.168.16.100:554/Streaing/Channels/1
- 海康/大华等视频平台的rtsp地址rtsp://192.168.16.88:554/openUrl/6rcShva
- 自己的rtsp地址
## 运行人脸检测的例子
1. 首先下载例子代码
```bash
git clone https://github.com/mymagicpower/AIAS.git
```
2. 导入examples项目到IDE中
```
cd rtsp_facemask_sdk
```
3. 运行例子代码RtspFaceMaskDetectionExample
## 效果如下:
![result](https://aias-home.oss-cn-beijing.aliyuncs.com/AIAS/mask_sdk/face-masks.png)
### 帮助
引擎定制化配置,可以提升首次运行的引擎下载速度,解决外网无法访问或者带宽过低的问题。
[引擎定制化配置](http://aias.top/engine_cpu.html)
### 官网:
[官网链接](http://www.aias.top/)
### Git地址
[Github链接](https://github.com/mymagicpower/AIAS)
[Gitee链接](https://gitee.com/mymagicpower/AIAS)
#### 帮助文档:
- http://aias.top/guides.html
- 1.性能优化常见问题:
- http://aias.top/AIAS/guides/performance.html
- 2.引擎配置包括CPUGPU在线自动加载及本地配置:
- http://aias.top/AIAS/guides/engine_config.html
- 3.模型加载方式(在线自动加载,及本地配置):
- http://aias.top/AIAS/guides/load_model.html
- 4.Windows环境常见问题:
- http://aias.top/AIAS/guides/windows.html

View File

@ -0,0 +1,60 @@
#### Common Model Loading Methods
1. How to load a model online via URL?
```text
# Use optModelUrls to load a model via URL
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls("https://aias-home.oss-cn-beijing.aliyuncs.com/models/ocr_models/ch_ppocr_mobile_v2.0_det_infer.zip")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```
2. How to load a model locally?
```text
# Use optModelPath to load a model from a zipped file
Path modelPath = Paths.get("src/test/resources/ch_ppocr_mobile_v2.0_det_infer.zip");
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(modelPath)
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
# Use optModelPath to load a model from a local directory
Path modelPath = Paths.get("src/test/resources/ch_ppocr_mobile_v2.0_det_infer/");
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(modelPath)
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```
3. How to load a model packed into a JAR file?
```text
# Use optModelUrls to load a model
# Assuming the model is located in the JAR file at:
# BOOT-INF/classes/ch_ppocr_mobile_v2.0_det_infer.zip
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelUrls("jar:///ch_ppocr_mobile_v2.0_det_infer.zip")
.optTranslator(new PpWordDetectionTranslator(new ConcurrentHashMap<String, String>()))
.optProgress(new ProgressBar())
.build();
```

View File

@ -24,7 +24,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>aias</groupId>
<artifactId>mp4-facemask-sdk</artifactId>
<artifactId>rtsp-facemask-sdk</artifactId>
<version>0.17.0</version>
<properties>

View File

@ -0,0 +1,197 @@
package me.aias.example;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import me.aias.example.utils.FaceDetection;
import me.aias.example.utils.FaceMaskDetect;
import me.aias.example.utils.OpenCVImageUtil;
import org.bytedeco.javacv.CanvasFrame;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point;
import org.bytedeco.opencv.opencv_core.Rect;
import org.bytedeco.opencv.opencv_core.Scalar;
import javax.swing.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.List;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
/**
* Rtsp取流口罩检测
*
* @author Calvin
*/
public class RtspFaceMaskDetectionExample {
public static void main(String[] args) throws IOException, ModelException, TranslateException {
// 海康/大华等摄像机的rtsp地址rtsp://user:password@192.168.16.100:554/Streaing/Channels/1
// 海康/大华等视频平台的rtsp地址rtsp://192.168.16.88:554/openUrl/6rcShva
// 换成自己的rtsp地址
String rtsp ="";
faceMaskDetection(rtsp);
}
/**
* 口罩检测
*
* @param input 视频源
*/
public static void faceMaskDetection(String input)
throws IOException, ModelException, TranslateException {
float shrink = 0.5f;
float threshold = 0.7f;
Criteria<Image, DetectedObjects> criteria = new FaceDetection().criteria(shrink, threshold);
Criteria<Image, Classifications> maskCriteria = new FaceMaskDetect().criteria();
// 读取视频文件或者视频流获取图像得到的图像为frame类型需要转换为mat类型进行检测和识别
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(input);
if (input.indexOf("rtsp") > -1) {
grabber.setFormat("rtsp");
// 设置要从服务器接受的媒体类型为空默认支持所有媒体类型支持的媒体类型[videoaudiodata]
grabber.setOption("allowed_media_types", "video");
// 设置RTSP传输协议为tcp传输模式
grabber.setOption("rtsp_transport", "tcp");
/*
* rtsp_flags:[filter_src,prefer_tcp,listen]
* filter_src:仅接受来自协商对等地址和端口的数据包
* prefer_tcp:如果TCP可用作RTSP RTP传输请首先尝试使用TCP进行RTP传输
* listen:充当rtsp服务器监听rtsp连接
* rtp传输首选使用tcp传输模式
*/
grabber.setOption("rtsp_flags", "prefer_tcp");
/*
* 设置等待传入连接最大超时时间单位默认值-1无限等待
* 如果设置此选项上面的rtsp_flags配置将被设置成listen充当rtsp服务器监听rtsp连接
*/
// grabber.setOption("timeout","30");
// socket网络超时时间
grabber.setOption("stimeout", "3000000");
// 设置要缓冲以处理重新排序的数据包的数据包数量
// grabber.setOption("reorder_queue_size","");
// 设置本地最小的UDP端口默认为5000端口
// grabber.setOption("min_port","5000");
// 设置本地最大的UDP端口默认为65000端口
// grabber.setOption("max_port","65000");
}
grabber.start();
// Frame与Mat转换
OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
CanvasFrame canvas = new CanvasFrame("人脸检测"); // 新建一个预览窗口
canvas.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
canvas.setVisible(true);
canvas.setFocusable(true);
// 窗口置顶
if (canvas.isAlwaysOnTopSupported()) {
canvas.setAlwaysOnTop(true);
}
Frame frame = null;
try (ZooModel model = ModelZoo.loadModel(criteria);
Predictor<Image, DetectedObjects> predictor = model.newPredictor();
ZooModel classifyModel = ModelZoo.loadModel(maskCriteria);
Predictor<Image, Classifications> classifier = classifyModel.newPredictor()) {
// 获取图像帧
for (; canvas.isVisible() && (frame = grabber.grabImage()) != null; ) {
// 将获取的frame转化成mat数据类型
Mat img = converter.convert(frame);
BufferedImage buffImg = OpenCVImageUtil.mat2BufferedImage(img);
Image image = ImageFactory.getInstance().fromImage(buffImg);
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
DetectedObjects detections = predictor.predict(image);
List<DetectedObjects.DetectedObject> items = detections.items();
// 遍历人脸
for (DetectedObjects.DetectedObject item : items) {
Image subImg = getSubImage(image, item.getBoundingBox());
Classifications classifications = classifier.predict(subImg);
String className = classifications.best().getClassName();
BoundingBox box = item.getBoundingBox();
Rectangle rectangle = box.getBounds();
int x = (int) (rectangle.getX() * imageWidth);
int y = (int) (rectangle.getY() * imageHeight);
Rect face =
new Rect(
x,
y,
(int) (rectangle.getWidth() * imageWidth),
(int) (rectangle.getHeight() * imageHeight));
// 绘制人脸矩形区域scalar色彩顺序BGR(蓝绿红)
rectangle(img, face, new Scalar(0, 0, 255, 1));
int pos_x = Math.max(face.tl().x() - 10, 0);
int pos_y = Math.max(face.tl().y() - 10, 0);
// 在人脸矩形上面绘制文字
putText(
img,
className,
new Point(pos_x, pos_y),
FONT_HERSHEY_COMPLEX,
1.0,
new Scalar(0, 0, 255, 2.0));
}
// 显示视频图像
canvas.showImage(frame);
}
}
canvas.dispose();
grabber.close();
}
private static int[] extendSquare(
double xmin, double ymin, double width, double height, double percentage) {
double centerx = xmin + width / 2;
double centery = ymin + height / 2;
double maxDist = Math.max(width / 2, height / 2) * (1 + percentage);
return new int[] {(int) (centerx - maxDist), (int) (centery - maxDist), (int) (2 * maxDist)};
// return new int[] {(int) xmin, (int) ymin, (int) width, (int) height};
}
private static Image getSubImage(Image img, BoundingBox box) {
Rectangle rect = box.getBounds();
int width = img.getWidth();
int height = img.getHeight();
int[] squareBox =
extendSquare(
rect.getX() * width,
rect.getY() * height,
rect.getWidth() * width,
rect.getHeight() * height,
0); // 0.18
if (squareBox[0] < 0) squareBox[0] = 0;
if (squareBox[1] < 0) squareBox[1] = 0;
if (squareBox[0] > width) squareBox[0] = width;
if (squareBox[1] > height) squareBox[1] = height;
if ((squareBox[0] + squareBox[2]) > width) squareBox[2] = width - squareBox[0];
if ((squareBox[1] + squareBox[2]) > height) squareBox[2] = height - squareBox[1];
return img.getSubImage(squareBox[0], squareBox[1], squareBox[2], squareBox[2]);
// return img.getSubimage(squareBox[0], squareBox[1], squareBox[2], squareBox[3]);
}
}

View File

@ -0,0 +1,103 @@
package me.aias.example.utils;
import ai.djl.Device;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.nio.file.Path;
import java.nio.file.Paths;
public final class FaceDetection {
private static final Logger logger = LoggerFactory.getLogger(FaceDetection.class);
public FaceDetection() {}
public Criteria<Image, DetectedObjects> criteria(float shrink, float threshold) {
Criteria<Image, DetectedObjects> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, DetectedObjects.class)
.optModelPath(Paths.get("models/face_detection.zip"))
.optProgress(new ProgressBar())
.optTranslator(new FaceTranslator(shrink, threshold))
.build();
return criteria;
}
private final class FaceTranslator implements Translator<Image, DetectedObjects> {
private float shrink;
private float threshold;
private List<String> className;
FaceTranslator(float shrink, float threshold) {
this.shrink = shrink;
this.threshold = threshold;
className = Arrays.asList("Not Face", "Face");
}
@Override
public DetectedObjects processOutput(TranslatorContext ctx, NDList list) {
return processImageOutput(list, className, threshold);
}
@Override
public NDList processInput(TranslatorContext ctx, Image input) {
return processImageInput(ctx.getNDManager(), input, shrink);
}
@Override
public Batchifier getBatchifier() {
return null;
}
NDList processImageInput(NDManager manager, Image input, float shrink) {
NDArray array = input.toNDArray(manager);
Shape shape = array.getShape();
array =
NDImageUtils.resize(array, (int) (shape.get(1) * shrink), (int) (shape.get(0) * shrink));
array = array.transpose(2, 0, 1).flip(0); // HWC -> CHW BGR -> RGB
NDArray mean = manager.create(new float[] {104f, 117f, 123f}, new Shape(3, 1, 1));
array = array.sub(mean).mul(0.007843f); // normalization
array = array.expandDims(0); // make batch dimension
return new NDList(array);
}
DetectedObjects processImageOutput(NDList list, List<String> className, float threshold) {
NDArray result = list.singletonOrThrow();
float[] probabilities = result.get(":,1").toFloatArray();
List<String> names = new ArrayList<>();
List<Double> prob = new ArrayList<>();
List<BoundingBox> boxes = new ArrayList<>();
for (int i = 0; i < probabilities.length; i++) {
if (probabilities[i] >= threshold) {
float[] array = result.get(i).toFloatArray();
names.add(className.get((int) array[0]));
prob.add((double) probabilities[i]);
boxes.add(new Rectangle(array[2], array[3], array[4] - array[2], array[5] - array[3]));
}
}
return new DetectedObjects(names, prob, boxes);
}
}
}

View File

@ -0,0 +1,99 @@
package me.aias.example.utils;
import ai.djl.Device;
import ai.djl.inference.Predictor;
import ai.djl.modality.Classifications;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.modality.cv.transform.Normalize;
import ai.djl.modality.cv.transform.Resize;
import ai.djl.modality.cv.transform.ToTensor;
import ai.djl.modality.cv.translator.ImageClassificationTranslator;
import ai.djl.repository.zoo.Criteria;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.TranslateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.nio.file.Path;
import java.nio.file.Paths;
public final class FaceMaskDetect {
private static final Logger logger = LoggerFactory.getLogger(FaceMaskDetect.class);
public FaceMaskDetect() {}
public DetectedObjects predict(
Predictor<Image, DetectedObjects> faceDetector,
Predictor<Image, Classifications> classifier,
Image image)
throws TranslateException {
DetectedObjects detections = faceDetector.predict(image);
List<DetectedObjects.DetectedObject> faces = detections.items();
List<String> names = new ArrayList<>();
List<Double> prob = new ArrayList<>();
List<BoundingBox> rect = new ArrayList<>();
for (DetectedObjects.DetectedObject face : faces) {
Image subImg = getSubImage(image, face.getBoundingBox());
Classifications classifications = classifier.predict(subImg);
names.add(classifications.best().getClassName());
prob.add(face.getProbability());
rect.add(face.getBoundingBox());
}
return new DetectedObjects(names, prob, rect);
}
public Criteria<Image, Classifications> criteria() {
Criteria<Image, Classifications> criteria =
Criteria.builder()
.optEngine("PaddlePaddle")
.setTypes(Image.class, Classifications.class)
.optTranslator(
ImageClassificationTranslator.builder()
.addTransform(new Resize(128, 128))
.addTransform(new ToTensor()) // HWC -> CHW div(255)
.addTransform(
new Normalize(
new float[] {0.5f, 0.5f, 0.5f}, new float[] {1.0f, 1.0f, 1.0f}))
.addTransform(nd -> nd.flip(0)) // RGB -> GBR
.build())
.optModelPath(Paths.get("models/face_mask.zip"))
.optProgress(new ProgressBar())
.build();
return criteria;
}
private int[] extendSquare(
double xmin, double ymin, double width, double height, double percentage) {
double centerx = xmin + width / 2;
double centery = ymin + height / 2;
double maxDist = Math.max(width / 2, height / 2) * (1 + percentage);
return new int[] {(int) (centerx - maxDist), (int) (centery - maxDist), (int) (2 * maxDist)};
// return new int[] {(int) xmin, (int) ymin, (int) width, (int) height};
}
private Image getSubImage(Image img, BoundingBox box) {
Rectangle rect = box.getBounds();
int width = img.getWidth();
int height = img.getHeight();
int[] squareBox =
extendSquare(
rect.getX() * width,
rect.getY() * height,
rect.getWidth() * width,
rect.getHeight() * height,
0); // 0.18
return img.getSubImage(squareBox[0], squareBox[1], squareBox[2], squareBox[2]);
// return img.getSubimage(squareBox[0], squareBox[1], squareBox[2], squareBox[3]);
}
}

View File

@ -0,0 +1,156 @@
package me.aias.example.utils;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.opencv_core.Mat;
import java.awt.*;
import java.awt.image.*;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
/**
* 图片类型转换
*
* @author Calvin
*/
public class OpenCVImageUtil {
/**
* BufferedImage Mat
*
* @param original
*/
public static Mat bufferedImage2Mat(BufferedImage original) {
OpenCVFrameConverter.ToMat cv = new OpenCVFrameConverter.ToMat();
return cv.convertToMat(new Java2DFrameConverter().convert(original));
}
/**
* 将mat转BufferedImage
*
* @param matrix
*/
public static BufferedImage mat2BufferedImage(Mat matrix) {
int cols = matrix.cols();
int rows = matrix.rows();
int elemSize = (int) matrix.elemSize();
byte[] data = new byte[cols * rows * elemSize];
matrix.data().get(data);
int type = 0;
switch (matrix.channels()) {
case 1:
type = BufferedImage.TYPE_BYTE_GRAY;
break;
case 3:
type = BufferedImage.TYPE_3BYTE_BGR;
byte b;
for (int i = 0; i < data.length; i = i + 3) {
b = data[i];
data[i] = data[i + 2];
data[i + 2] = b;
}
break;
default:
return null;
}
BufferedImage image = new BufferedImage(cols, rows, type);
// BufferedImage对象中最重要的两个组件为Raster和ColorModel分别用于存储图像的像素数据与颜色数据
// 表示像素矩形数组的类Raster 封装存储样本值的 DataBuffer以及描述如何在 DataBuffer 中定位给定样本值的 SampleModel
// 由于Raster对象是BufferedImage对象中的像素数据存储对象因此BufferedImage支持从Raster对象中获取任意位置xy点的像素值pxy
image.getRaster().setDataElements(0, 0, cols, rows, data);
return image;
}
/**
* 将bufferImage转Mat
*
* @param original
* @param matType
* @param msg
* @param x
* @param y
*/
public static Mat bufferedImage2Mat(
BufferedImage original, int matType, String msg, int x, int y) {
Graphics2D g = original.createGraphics();
try {
g.setComposite(AlphaComposite.Src);
g.drawImage(original, 0, 0, null);
g.drawString(msg, x, y);
} finally {
g.dispose();
}
Mat mat = new Mat(original.getHeight(), original.getWidth(), matType);
mat.data().put(((DataBufferByte) original.getRaster().getDataBuffer()).getData());
return mat;
}
/**
* 24位BGR数组转BufferedImage
* @param src -bgr排列的24位图像像素数据数组
* @param width -宽度
* @param height-高度
* @return
*/
public static BufferedImage BGR2BufferedImage(byte[] src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferByte db=(DataBufferByte)out;
ByteBuffer.wrap(db.getData()).put(src,0,src.length);
return image;
}
/**
* 24位BGR字节缓冲转BufferedImage
* @param src -bgr排列的24位图像像素数据字节缓冲
* @param width -宽度
* @param height-高度
* @return
*/
public static BufferedImage BGR2BufferedImage(ByteBuffer src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferByte db=(DataBufferByte)out;
ByteBuffer.wrap(db.getData()).put(src);
return image;
}
/**
* 24位整型BGR字节缓冲转BufferedImage
* @param src -bgr排列的24位图像像素整型缓冲int由3个byte组成
* @param width -宽度
* @param height-高度
* @return
*/
public static BufferedImage BGR2BufferedImage(IntBuffer src, int width, int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_BGR);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferInt db=(DataBufferInt)out;
IntBuffer.wrap(db.getData()).put(src);
return image;
}
/**
* 24位整型RGB字节缓冲转BufferedImage
* @param src --rgb排列的24位图像像素整型缓冲int由3个byte组成
* @param width -宽度
* @param height-高度
* @return
*/
public static BufferedImage RGB2BufferedImage(IntBuffer src,int width,int height) {
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
Raster ra = image.getRaster();
DataBuffer out = ra.getDataBuffer();
DataBufferInt db=(DataBufferInt)out;
IntBuffer.wrap(db.getData()).put(src);
return image;
}
}

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="INFO">
<Appenders>
<Console name="console" target="SYSTEM_OUT">
<PatternLayout
pattern="[%-5level] - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="info" additivity="false">
<AppenderRef ref="console"/>
</Root>
<Logger name="me.calvin" level="${sys:me.calvin.logging.level:-info}" additivity="false">
<AppenderRef ref="console"/>
</Logger>
</Loggers>
</Configuration>