update opencv api

This commit is contained in:
Calvin 2022-03-14 11:36:43 +08:00
parent b1f4f905c1
commit 76deb4789e
14 changed files with 206 additions and 261 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 319 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 316 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 332 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 MiB

View File

@ -20,19 +20,19 @@
<orderEntry type="library" name="aais-face-align-lib-0.1.0" level="project" />
<orderEntry type="library" name="aais-retinaface-lib-0.1.0" level="project" />
<orderEntry type="library" name="Maven: commons-cli:commons-cli:1.4" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-slf4j-impl:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-slf4j-impl:2.15.0" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.25" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-api:2.12.1" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: org.apache.logging.log4j:log4j-core:2.12.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.logging.log4j:log4j-api:2.15.0" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: org.apache.logging.log4j:log4j-core:2.15.0" level="project" />
<orderEntry type="library" name="Maven: com.google.code.gson:gson:2.8.5" level="project" />
<orderEntry type="library" name="Maven: ai.djl:api:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl:api:0.15.0" level="project" />
<orderEntry type="library" name="Maven: net.java.dev.jna:jna:5.9.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-compress:1.21" level="project" />
<orderEntry type="library" name="Maven: ai.djl:basicdataset:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl:basicdataset:0.15.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-csv:1.8" level="project" />
<orderEntry type="library" name="Maven: ai.djl:model-zoo:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-engine:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-model-zoo:0.14.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl:model-zoo:0.15.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-engine:0.15.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-model-zoo:0.15.0" level="project" />
<orderEntry type="library" name="Maven: ai.djl.pytorch:pytorch-native-auto:1.9.1" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacv-platform:1.5.1" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:javacv:1.5.1" level="project" />
@ -184,6 +184,8 @@
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:macosx-x86_64:4.1.0-1.5.1" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:windows-x86:4.1.0-1.5.1" level="project" />
<orderEntry type="library" name="Maven: org.bytedeco:tesseract:windows-x86_64:4.1.0-1.5.1" level="project" />
<orderEntry type="library" name="Maven: ai.djl.opencv:opencv:0.15.0" level="project" />
<orderEntry type="library" name="Maven: org.openpnp:opencv:4.5.1-2" level="project" />
<orderEntry type="library" name="Maven: gov.nist.math:jama:1.0.3" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.testng:testng:6.8.1" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.10" level="project" />

View File

@ -31,7 +31,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<djl.version>0.14.0</djl.version>
<djl.version>0.15.0</djl.version>
</properties>
<build>
@ -102,7 +102,11 @@
<artifactId>javacv-platform</artifactId>
<version>1.5.1</version>
</dependency>
<dependency>
<groupId>ai.djl.opencv</groupId>
<artifactId>opencv</artifactId>
<version>0.15.0</version>
</dependency>
<dependency>
<groupId>gov.nist.math</groupId>
<artifactId>jama</artifactId>

View File

@ -10,16 +10,15 @@ import ai.djl.modality.cv.output.Point;
import ai.djl.modality.cv.output.Rectangle;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import ai.djl.opencv.OpenCVImageFactory;
import ai.djl.repository.zoo.ModelZoo;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.translate.TranslateException;
import me.aias.util.*;
import org.bytedeco.javacv.Java2DFrameUtils;
import org.bytedeco.opencv.opencv_core.Mat;
import org.opencv.core.Mat;
import org.opencv.core.Rect;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -41,9 +40,8 @@ public class FaceAlignExample {
public static void process(String facePath)
throws IOException, ModelException, TranslateException {
Path path = Paths.get(facePath);
File file = path.toFile();
BufferedImage img = (BufferedImage) ImageIO.read(file);
Image image = ImageFactory.getInstance().fromImage(img);
ImageFactory defFactory = new OpenCVImageFactory();
Image image = defFactory.fromFile(path);
// topk值
int topK = 500;
@ -69,12 +67,12 @@ public class FaceAlignExample {
// 人脸抠图
// factor = 0.1f, 意思是扩大10%防止人脸仿射变换后人脸被部分截掉
Rectangle subImageRect =
FaceUtils.getSubImageRect(rectangle, img.getWidth(), img.getHeight(), 1.0f);
FaceUtils.getSubImageRect(rectangle, image.getWidth(), image.getHeight(), 1.0f);
int x = (int) (subImageRect.getX());
int y = (int) (subImageRect.getY());
int w = (int) (subImageRect.getWidth());
int h = (int) (subImageRect.getHeight());
BufferedImage subImage = img.getSubimage(x, y, w, h);
Image subImage = image.getSubImage(x, y, w, h);
// 保存抠出的人脸图
ImageUtils.saveImage(subImage, "face_" + index + ".png", "build/output");
@ -90,9 +88,6 @@ public class FaceAlignExample {
// 计算人脸关键点在子图中的新坐标
double[][] pointsArray = FaceUtils.pointsArray(subImageRect, points);
// buffered image mat
Mat mat = Java2DFrameUtils.toMat(subImage);
// NDArray
NDManager manager = NDManager.newBaseManager();
NDArray srcPoints = manager.create(pointsArray);
@ -100,16 +95,12 @@ public class FaceAlignExample {
// 定制的5点仿射变换
Mat svdMat = NDArrayUtils.toOpenCVMat(manager, srcPoints, dstPoints);
mat = FaceAlignment.get5WarpAffineImg(mat, svdMat);
Mat mat = FaceAlignment.get5WarpAffineImg((Mat)subImage.getWrappedImage(), svdMat);
// mat转bufferedImage类型
BufferedImage bufferedImage = Java2DFrameUtils.toBufferedImage(mat);
int width = bufferedImage.getWidth() > 112 ? 112 : bufferedImage.getWidth();
int height = bufferedImage.getHeight() > 112 ? 112 : bufferedImage.getHeight();
bufferedImage = bufferedImage.getSubimage(0, 0, width, height);
// 保存对齐后的人脸图
ImageUtils.saveImage(bufferedImage, "face_align_" + index++ + ".png", "build/output");
int width = mat.width() > 112 ? 112 : mat.width();
int height = mat.height() > 112 ? 112 : mat.height();
Image img = OpenCVImageFactory.getInstance().fromImage(mat).getSubImage(0, 0, width, height);
ImageUtils.saveImage(img, "face_align_" + index++ + ".png", "build/output");
}
}
}

View File

@ -1,171 +1,16 @@
package me.aias.util;
import org.bytedeco.javacpp.indexer.DoubleRawIndexer;
import org.bytedeco.opencv.global.opencv_core;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.CvMat;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point2f;
import org.bytedeco.opencv.opencv_core.Point2fVector;
import java.nio.FloatBuffer;
import static org.bytedeco.opencv.global.opencv_calib3d.findHomography;
import static org.bytedeco.opencv.global.opencv_core.cvCreateMat;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
public class FaceAlignment {
public static Mat affineTransform(
Mat src, Point2f srcPoints, Point2f dstPoints) {
Mat dst = src.clone();
// https://github.com/bytedeco/javacv/issues/788
Mat warp_mat = opencv_imgproc.getAffineTransform(srcPoints.position(0), dstPoints.position(0));
opencv_imgproc.warpAffine(src, dst, warp_mat, dst.size());
return dst;
}
public static Mat perspectiveTransform(
Mat src, Point2f srcPoints, Point2f dstPoints) {
Mat dst = src.clone();
Mat warp_mat = opencv_imgproc.getPerspectiveTransform(srcPoints.position(0), dstPoints.position(0));
opencv_imgproc.warpPerspective(src, dst, warp_mat, dst.size());
return dst;
}
public static Mat getHomography(Mat src, double[] srcPoints, double[] dstPoints) {
Mat dst = src.clone();
Mat warp_mat = createHomography(srcPoints, dstPoints);
opencv_imgproc.warpPerspective(src, dst, warp_mat, dst.size());
return dst;
}
public static Mat createHomography(double[] src, double[] dst) {
CvMat srcPoints;
CvMat dstPoints;
int nbPoints = src.length / 2;
Mat homography;
srcPoints = cvCreateMat(2, nbPoints, opencv_core.CV_32FC1);
dstPoints = cvCreateMat(2, nbPoints, opencv_core.CV_32FC1);
Mat newSvdMat = new Mat(2, 3, opencv_core.CV_32FC1);
// homography = cvCreateMat(3, 3, opencv_core.CV_32FC1);
for (int i = 0; i < nbPoints; i++) {
srcPoints.put(i, src[2 * i]);
srcPoints.put(i + nbPoints, src[2 * i + 1]);
dstPoints.put(i, dst[2 * i]);
dstPoints.put(i + nbPoints, dst[2 * i + 1]);
}
homography = findHomography(returnMat(srcPoints), returnMat(dstPoints));
return homography;
}
public static Mat returnMat(CvMat mtx) {
double valor;
final int rows = mtx.rows();
final int cols = mtx.cols();
Mat mat = new Mat(rows, cols, opencv_core.CV_64F);
final int step = mtx.step() / 4;
FloatBuffer buf = mtx.getFloatBuffer();
DoubleRawIndexer ldIdx = mat.createIndexer();
for (int row = 0; row < rows; row++) {
buf.position(row * step);
for (int col = 0; col < cols; col++) {
valor = buf.get();
ldIdx.put(row, col, valor);
}
}
ldIdx.release();
return mat;
}
// 根据目标5点进行旋转仿射变换
public static Mat get5WarpAffineImg(Mat src, Mat rot_mat) {
Mat oral = new Mat();
src.copyTo(oral);
Mat rot = new Mat();
// 进行仿射变换变换后大小为src的大小
opencv_imgproc.warpAffine(src, rot, rot_mat, src.size());
return rot;
}
// 根据两眼中心点进行旋转仿射变换
// Point2fVector pv = FaceUtils.point2fVector(subImageRect, points);
// mat = FaceAlignment.get5WarpAffineImg(mat, pv);
public static Mat get5WarpAffineImg(Mat src, Point2fVector landmarks) {
Mat oral = new Mat();
src.copyTo(oral);
// 图中关键点坐标
// 1. left_eye_x , left_eye_y
// 2. right_eye_x , right_eye_y
// 3. nose_x , nose_y
// 4. left_mouth_x , left_mouth_y
// 5. right_mouth_x , right_mouth_y
// 计算两眼中心点按照此中心点进行旋转 第1个为左眼坐标第2个为右眼坐标
Point2f eyesCenter = new Point2f(landmarks.get()[2].x(), landmarks.get()[2].y()); // 3 个点为两眼之间
// 计算两个眼睛间的角度
float dy = (landmarks.get()[1].y() - landmarks.get()[0].y()); // 2 - 1
float dx = (landmarks.get()[1].x() - landmarks.get()[0].x()); // 2 - 1
double angle = Math.atan2(dy, dx) * 180.0 / opencv_core.CV_PI;
// 弧度转角度
// eyesCenter, angle, scale 按照公式计算仿射变换矩阵此时1.0表示不进行缩放
// cv2.getRotationMatrix2D 三个参数分别为1.旋转中心2.旋转角度3.缩放比例角度为正则图像逆时针旋转旋转后图像可能会超出边界
Mat rot_mat = opencv_imgproc.getRotationMatrix2D(eyesCenter, angle, 1.0);
Mat rot = new Mat();
// 进行仿射变换变换后大小为src的大小
opencv_imgproc.warpAffine(src, rot, rot_mat, src.size());
return rot;
}
public static Mat get68WarpAffineImg(Mat src, Point2fVector landmarks) {
Mat oral = new Mat();
src.copyTo(oral);
// for (int j = 0; j < landmarks.get().length; j++) {
// opencv_imgproc.circle(oral, new Point((int) landmarks.get()[j].x(),(int)
// landmarks.get()[j].y()),2,new Scalar(255, 0, 0 ,0));
// }
// opencv_imgcodecs.imwrite("/Users/calvin/Documents/Data_Faces_0/fa_result_1.jpg",oral);
// 计算两眼中心点按照此中心点进行旋转 第40个点为左眼坐标第43个点为右眼坐标
// Point2f eyesCenter = new Point2f( (landmarks.get()[39].x() + landmarks.get()[42].x()) * 0.5f,
// (landmarks.get()[39].y() + landmarks.get()[42].y()) * 0.5f );
Point2f eyesCenter =
new Point2f(landmarks.get()[27].x(), landmarks.get()[27].y()); // 28 个点为两眼之间
// 计算两个眼睛间的角度
float dy = (landmarks.get()[42].y() - landmarks.get()[39].y()); // 43 - 40
float dx = (landmarks.get()[42].x() - landmarks.get()[39].x()); // 43 - 40
double angle = Math.atan2(dy, dx) * 180.0 / opencv_core.CV_PI;
// 弧度转角度
// 由eyesCenter, angle, scale 按照公式计算仿射变换矩阵此时1.0表示不进行缩放
Mat rot_mat = opencv_imgproc.getRotationMatrix2D(eyesCenter, angle, 1.0);
Mat rot = new Mat();
// 进行仿射变换变换后大小为src的大小
opencv_imgproc.warpAffine(src, rot, rot_mat, src.size());
// PointVector marks = new PointVector();
// //按照仿射变换矩阵计算变换后各关键点在新图中所对应的位置坐标
// for (int n = 0; n<landmarks.get().length; n++) {
// Point p =new Point(0, 0);
// p.x((int)(rot_mat.ptr(0).get(0)* landmarks.get()[n].x() + rot_mat.ptr(0).get(1) *
// landmarks.get()[n].y() + rot_mat.ptr(0).get(2)));
// p.y((int)(rot_mat.ptr(1).get(0)* landmarks.get()[n].x() + rot_mat.ptr(1).get(1) *
// landmarks.get()[n].y() + rot_mat.ptr(1).get(2)));
// marks.push_back(p);
// }
// 标出关键点
// for (int j = 0; j < landmarks.get().length; j++) {
// opencv_imgproc.circle(rot, marks.get(j), 2,new Scalar(0, 0, 255, 0));
// }
// opencv_imgcodecs.imwrite("/Users/calvin/Documents/Data_Faces_0/fa_result_2.jpg",rot);
Imgproc.warpAffine(src, rot, rot_mat, src.size());
return rot;
}
}

View File

@ -0,0 +1,171 @@
package me.aias.util;
import org.bytedeco.javacpp.indexer.DoubleRawIndexer;
import org.bytedeco.opencv.global.opencv_core;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.CvMat;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point2f;
import org.bytedeco.opencv.opencv_core.Point2fVector;
import java.nio.FloatBuffer;
import static org.bytedeco.opencv.global.opencv_calib3d.findHomography;
import static org.bytedeco.opencv.global.opencv_core.cvCreateMat;
public class FaceAlignmentOld {
public static Mat affineTransform(
Mat src, Point2f srcPoints, Point2f dstPoints) {
Mat dst = src.clone();
// https://github.com/bytedeco/javacv/issues/788
Mat warp_mat = opencv_imgproc.getAffineTransform(srcPoints.position(0), dstPoints.position(0));
opencv_imgproc.warpAffine(src, dst, warp_mat, dst.size());
return dst;
}
public static Mat perspectiveTransform(
Mat src, Point2f srcPoints, Point2f dstPoints) {
Mat dst = src.clone();
Mat warp_mat = opencv_imgproc.getPerspectiveTransform(srcPoints.position(0), dstPoints.position(0));
opencv_imgproc.warpPerspective(src, dst, warp_mat, dst.size());
return dst;
}
public static Mat getHomography(Mat src, double[] srcPoints, double[] dstPoints) {
Mat dst = src.clone();
Mat warp_mat = createHomography(srcPoints, dstPoints);
opencv_imgproc.warpPerspective(src, dst, warp_mat, dst.size());
return dst;
}
public static Mat createHomography(double[] src, double[] dst) {
CvMat srcPoints;
CvMat dstPoints;
int nbPoints = src.length / 2;
Mat homography;
srcPoints = cvCreateMat(2, nbPoints, opencv_core.CV_32FC1);
dstPoints = cvCreateMat(2, nbPoints, opencv_core.CV_32FC1);
Mat newSvdMat = new Mat(2, 3, opencv_core.CV_32FC1);
// homography = cvCreateMat(3, 3, opencv_core.CV_32FC1);
for (int i = 0; i < nbPoints; i++) {
srcPoints.put(i, src[2 * i]);
srcPoints.put(i + nbPoints, src[2 * i + 1]);
dstPoints.put(i, dst[2 * i]);
dstPoints.put(i + nbPoints, dst[2 * i + 1]);
}
homography = findHomography(returnMat(srcPoints), returnMat(dstPoints));
return homography;
}
public static Mat returnMat(CvMat mtx) {
double valor;
final int rows = mtx.rows();
final int cols = mtx.cols();
Mat mat = new Mat(rows, cols, opencv_core.CV_64F);
final int step = mtx.step() / 4;
FloatBuffer buf = mtx.getFloatBuffer();
DoubleRawIndexer ldIdx = mat.createIndexer();
for (int row = 0; row < rows; row++) {
buf.position(row * step);
for (int col = 0; col < cols; col++) {
valor = buf.get();
ldIdx.put(row, col, valor);
}
}
ldIdx.release();
return mat;
}
// 根据目标5点进行旋转仿射变换
public static Mat get5WarpAffineImg(Mat src, Mat rot_mat) {
Mat oral = new Mat();
src.copyTo(oral);
Mat rot = new Mat();
// 进行仿射变换变换后大小为src的大小
opencv_imgproc.warpAffine(src, rot, rot_mat, src.size());
return rot;
}
// 根据两眼中心点进行旋转仿射变换
// Point2fVector pv = FaceUtils.point2fVector(subImageRect, points);
// mat = FaceAlignment.get5WarpAffineImg(mat, pv);
public static Mat get5WarpAffineImg(Mat src, Point2fVector landmarks) {
Mat oral = new Mat();
src.copyTo(oral);
// 图中关键点坐标
// 1. left_eye_x , left_eye_y
// 2. right_eye_x , right_eye_y
// 3. nose_x , nose_y
// 4. left_mouth_x , left_mouth_y
// 5. right_mouth_x , right_mouth_y
// 计算两眼中心点按照此中心点进行旋转 第1个为左眼坐标第2个为右眼坐标
Point2f eyesCenter = new Point2f(landmarks.get()[2].x(), landmarks.get()[2].y()); // 3 个点为两眼之间
// 计算两个眼睛间的角度
float dy = (landmarks.get()[1].y() - landmarks.get()[0].y()); // 2 - 1
float dx = (landmarks.get()[1].x() - landmarks.get()[0].x()); // 2 - 1
double angle = Math.atan2(dy, dx) * 180.0 / opencv_core.CV_PI;
// 弧度转角度
// eyesCenter, angle, scale 按照公式计算仿射变换矩阵此时1.0表示不进行缩放
// cv2.getRotationMatrix2D 三个参数分别为1.旋转中心2.旋转角度3.缩放比例角度为正则图像逆时针旋转旋转后图像可能会超出边界
Mat rot_mat = opencv_imgproc.getRotationMatrix2D(eyesCenter, angle, 1.0);
Mat rot = new Mat();
// 进行仿射变换变换后大小为src的大小
opencv_imgproc.warpAffine(src, rot, rot_mat, src.size());
return rot;
}
public static Mat get68WarpAffineImg(Mat src, Point2fVector landmarks) {
Mat oral = new Mat();
src.copyTo(oral);
// for (int j = 0; j < landmarks.get().length; j++) {
// opencv_imgproc.circle(oral, new Point((int) landmarks.get()[j].x(),(int)
// landmarks.get()[j].y()),2,new Scalar(255, 0, 0 ,0));
// }
// opencv_imgcodecs.imwrite("/Users/calvin/Documents/Data_Faces_0/fa_result_1.jpg",oral);
// 计算两眼中心点按照此中心点进行旋转 第40个点为左眼坐标第43个点为右眼坐标
// Point2f eyesCenter = new Point2f( (landmarks.get()[39].x() + landmarks.get()[42].x()) * 0.5f,
// (landmarks.get()[39].y() + landmarks.get()[42].y()) * 0.5f );
Point2f eyesCenter =
new Point2f(landmarks.get()[27].x(), landmarks.get()[27].y()); // 28 个点为两眼之间
// 计算两个眼睛间的角度
float dy = (landmarks.get()[42].y() - landmarks.get()[39].y()); // 43 - 40
float dx = (landmarks.get()[42].x() - landmarks.get()[39].x()); // 43 - 40
double angle = Math.atan2(dy, dx) * 180.0 / opencv_core.CV_PI;
// 弧度转角度
// 由eyesCenter, angle, scale 按照公式计算仿射变换矩阵此时1.0表示不进行缩放
Mat rot_mat = opencv_imgproc.getRotationMatrix2D(eyesCenter, angle, 1.0);
Mat rot = new Mat();
// 进行仿射变换变换后大小为src的大小
opencv_imgproc.warpAffine(src, rot, rot_mat, src.size());
// PointVector marks = new PointVector();
// //按照仿射变换矩阵计算变换后各关键点在新图中所对应的位置坐标
// for (int n = 0; n<landmarks.get().length; n++) {
// Point p =new Point(0, 0);
// p.x((int)(rot_mat.ptr(0).get(0)* landmarks.get()[n].x() + rot_mat.ptr(0).get(1) *
// landmarks.get()[n].y() + rot_mat.ptr(0).get(2)));
// p.y((int)(rot_mat.ptr(1).get(0)* landmarks.get()[n].x() + rot_mat.ptr(1).get(1) *
// landmarks.get()[n].y() + rot_mat.ptr(1).get(2)));
// marks.push_back(p);
// }
// 标出关键点
// for (int j = 0; j < landmarks.get().length; j++) {
// opencv_imgproc.circle(rot, marks.get(j), 2,new Scalar(0, 0, 255, 0));
// }
// opencv_imgcodecs.imwrite("/Users/calvin/Documents/Data_Faces_0/fa_result_2.jpg",rot);
return rot;
}
}

View File

@ -1,7 +1,6 @@
package me.aias.util;
import ai.djl.modality.cv.Image;
import ai.djl.modality.cv.ImageFactory;
import ai.djl.modality.cv.output.BoundingBox;
import ai.djl.modality.cv.output.DetectedObjects;
import ai.djl.modality.cv.output.Joints;
@ -17,23 +16,6 @@ import java.nio.file.Paths;
import java.util.Iterator;
public class ImageUtils {
public static Image bufferedImage2DJLImage(BufferedImage img) {
return ImageFactory.getInstance().fromImage(img);
}
public static void saveImage(BufferedImage img, String name, String path) {
Image newImage = ImageFactory.getInstance().fromImage(img); // 支持多种图片格式自动适配
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);
// OpenJDK 不能保存 jpg 图片的 alpha channel
try {
newImage.save(Files.newOutputStream(imagePath), "png");
} catch (IOException e) {
e.printStackTrace();
}
}
public static void saveImage(Image img, String name, String path) {
Path outputDir = Paths.get(path);
Path imagePath = outputDir.resolve(name);

View File

@ -2,69 +2,19 @@ package me.aias.util;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDManager;
import org.bytedeco.javacpp.indexer.DoubleRawIndexer;
import org.bytedeco.opencv.global.opencv_core;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Point2f;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
public class NDArrayUtils {
// NDArray opencv_core.Mat
public static Mat toOpenCVMat(NDArray points, int rows, int cols) {
double[] doubleArray = points.toDoubleArray();
// CV_32F = FloatRawIndexer
// CV_64F = DoubleRawIndexer
Mat mat = new Mat(rows, cols, opencv_core.CV_64F);
DoubleRawIndexer ldIdx = mat.createIndexer();
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
ldIdx.put(i, j, doubleArray[i * cols + j]);
}
}
ldIdx.release();
return mat;
}
// NDArray opencv_core.Mat
public static Mat toOpenCVMat(NDManager manager, NDArray srcPoints, NDArray dstPoints) {
NDArray svdMat = SVDUtils.transformationFromPoints(manager, srcPoints, dstPoints);
double[] doubleArray = svdMat.toDoubleArray();
Mat newSvdMat = new Mat(2, 3, opencv_core.CV_64F);
DoubleRawIndexer ldIdx = newSvdMat.createIndexer();
Mat newSvdMat = new Mat(2, 3, CvType.CV_64F);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
ldIdx.put(i, j, doubleArray[i * 3 + j]);
newSvdMat.put(i, j, doubleArray[i * 3 + j]);
}
}
ldIdx.release();
return newSvdMat;
}
// NDArray opencv_core.Point2f
public static Point2f toOpenCVPoint2f(NDArray points, int rows) {
double[] doubleArray = points.toDoubleArray();
Point2f points2f = new Point2f(rows);
for (int i = 0; i < rows; i++) {
points2f.position(i).x((float) doubleArray[i * 2]).y((float) doubleArray[i * 2 + 1]);
}
return points2f;
}
// DoubleArray opencv_core.Point2f
public static Point2f toOpenCVPoint2f(double[] doubleArray, int rows) {
Point2f points2f = new Point2f(rows);
for (int i = 0; i < rows; i++) {
points2f.position(i).x((float) doubleArray[i * 2]).y((float) doubleArray[i * 2 + 1]);
}
return points2f;
}
}