using System;
using System.Collections;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using DlibFaceLandmarkDetector;
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using UnityEngine;
using UnityEngine.Networking;
using UnityEngine.UI;
public class BlendMgr : Singleton<BlendMgr>
{
[Header("输入输出")]
public Texture2D srcFace; //需处理的人脸图像
Texture2D copyTexture; //人脸图像的拷贝
public Texture2D drugFace; //人脸纹理适配图像
public Texture2D drug; //人脸纹理
public Texture2D drugFace1; //人脸纹理适配图像
public Texture2D drug1; //人脸纹理
public Texture2D drugFace2; //人脸纹理适配图像
public Texture2D drug2; //人脸纹理
public Texture2D drugFace3; //人脸纹理适配图像
public Texture2D drug3; //人脸纹理
public Texture2D oldFace; //老化后的人脸图像
public Texture2D old; //老化后的人脸纹理适配图像
[Range(0, 1)]
public double fill; //填充
[Range(0, 1)]
public float thinFactor = 0.5f;
//人脸检测器
FaceLandmarkDetector mainDetector;
FaceLandmarkDetector minorDetector;
FaceLandmarkDetector minorDetector1;
string dlibShapePredictorFileName = "sp_human_face_68.dat";
string dlibShapePredictorFilePath;
//图像矩阵
Mat srcMat;
Mat effectMat;
Mat copyMat;
Mat targetMat;
void Start()
{
#if UNITY_EDITOR
dlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(
dlibShapePredictorFileName
);
#else
dlibShapePredictorFilePath = System.IO.Path.Combine(
Application.streamingAssetsPath,
dlibShapePredictorFileName
);
#endif
mainDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
minorDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);
minorDetector1 = new FaceLandmarkDetector(dlibShapePredictorFilePath);
}
// 自动检测人脸区域,返回腾讯API需要的Rect列表
List<UnityEngine.Rect> GetFaceRectsForTencentAPI(Texture2D tex)
{
List<UnityEngine.Rect> result = new List<UnityEngine.Rect>();
if (mainDetector == null)
{
Debug.LogWarning("faceLandmarkDetector未初始化");
return result;
}
mainDetector.SetImage(tex);
var faces = mainDetector.Detect();
foreach (var r in faces)
{
// 腾讯API要求整数Rect
result.Add(new UnityEngine.Rect((int)r.x, (int)r.y, (int)r.width, (int)r.height));
}
return result;
}
double SoftLightBlend(double _a, double _b, double _opacity, double _fill)
{
//标准化
_a /= 255;
_b /= 255;
_opacity /= 255;
//_a *= _fill;
double res = 0;
if (_a > 0.5)
{
res = 2 * _b * (1 - _a) + (2 * _a - 1) * Mathf.Sqrt((float)_b);
}
else
{
res = 2 * _a * _b + _b * _b * (1 - 2 * _a);
}
res = _fill * res + (1 - _fill) * _b;
res = _opacity * res + (1 - _opacity) * _b;
//映射回原来的色彩值
res *= 255;
return res;
}
Mat GetWarpedTexture(Texture2D srcFace, Mat srcMat, Texture2D effectFace, Mat effectMat) //返回变换后的人脸纹理矩阵
{
//源图像人脸识别
mainDetector.SetImage(srcFace);
List<UnityEngine.Rect> srcFaces = mainDetector.Detect();
List<Point> srcPoints = new List<Point>();
if (srcFaces.Count == 0)
{
Debug.Log("no faces detected in srcFace");
return null;
}
else
{
List<Vector2> facePoints = mainDetector.DetectLandmark(srcFaces[0]);
for (int i = 0; i < facePoints.Count; i++)
{
srcPoints.Add(new Point(facePoints[i].x, facePoints[i].y));
}
}
//纹理人脸识别
minorDetector.SetImage(effectFace);
List<UnityEngine.Rect> effectFaces = minorDetector.Detect();
List<Point> effectPoints = new List<Point>();
if (effectFaces.Count == 0)
{
Debug.Log("no faces found in effectFace");
return null;
}
else
{
List<Vector2> points = minorDetector.DetectLandmark(effectFaces[0]);
for (int i = 0; i < points.Count; i++)
{
effectPoints.Add(new Point(points[i].x, points[i].y));
}
}
//求单应矩阵
Mat warpMat = new Mat();
//用68个特征点拟合求最优单应矩阵
warpMat = Calib3d.findHomography(
new MatOfPoint2f(effectPoints.ToArray()),
new MatOfPoint2f(srcPoints.ToArray())
);
//对人脸纹理进行透视变换
Mat targetMat = new Mat();
Imgproc.warpPerspective(effectMat, targetMat, warpMat, srcMat.size());
return targetMat;
}
//局部平移算法
Vector2 GetWarpPoint(Vector2 _x, Vector2 _C_point, Vector2 _M_point, float _radius, float _a)
{
Vector2 direction = _M_point - _C_point;
//求(M-C)前面的因子
float midFactor = _radius * _radius - (_x - _C_point).sqrMagnitude;
float factor = Mathf.Pow(midFactor / (midFactor + _a * direction.sqrMagnitude), 2f);
Vector2 res = _x - factor * direction;
return res;
}
public Texture2D GetBlendFace(Texture2D srcFace, Texture2D effectFace, Texture2D effectMask)
{
srcMat = new Mat(srcFace.height, srcFace.width, CvType.CV_8UC4);
OpenCVForUnity.UnityUtils.Utils.texture2DToMat(srcFace, srcMat);
effectMat = new Mat(effectMask.height, effectMask.width, CvType.CV_8UC4);
OpenCVForUnity.UnityUtils.Utils.texture2DToMat(effectMask, effectMat);
// 拷贝需处理的人脸图像
copyTexture = new Texture2D(srcFace.width, srcFace.height, TextureFormat.RGBA32, false);
copyTexture.SetPixels32(srcFace.GetPixels32());
copyTexture.Apply();
copyMat = new Mat(copyTexture.height, copyTexture.width, CvType.CV_8UC4);
OpenCVForUnity.UnityUtils.Utils.texture2DToMat(copyTexture, copyMat);
// 获取变换后的人脸纹理矩阵
targetMat = new Mat(srcFace.height, srcFace.width, CvType.CV_8UC4);
List<Vector2> facePoints = new List<Vector2>();
targetMat = GetWarpedTexture(srcFace, srcMat, effectFace, effectMat);
if (targetMat == null)
return null;
// 性能优化:批量处理像素数据,使用并行
int rows = srcMat.rows();
int cols = srcMat.cols();
int channels = srcMat.channels();
byte[] srcData = new byte[rows * cols * channels];
srcMat.get(0, 0, srcData);
byte[] targetData = new byte[rows * cols * channels];
targetMat.get(0, 0, targetData);
byte[] dstData = new byte[rows * cols * channels];
copyMat.get(0, 0, dstData);
var options = new ParallelOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
Parallel.For(
0,
rows,
options,
i =>
{
int rowOffset = i * cols * channels;
for (int j = 0; j < cols; j++)
{
int idx = rowOffset + j * channels;
double a = targetData[idx + 3];
if (a > 0)
{
// double[] color1 = new double[]
// {
// targetData[idx],
// targetData[idx + 1],
// targetData[idx + 2],
// a,
// };
// double[] color2 = new double[]
// {
// srcData[idx],
// srcData[idx + 1],
// srcData[idx + 2],
// srcData[idx + 3],
// };
double r = SoftLightBlend(targetData[idx], srcData[idx], a, fill);
double g = SoftLightBlend(targetData[idx + 1], srcData[idx + 1], a, fill);
double b = SoftLightBlend(targetData[idx + 2], srcData[idx + 2], a, fill);
dstData[idx] = (byte)Mathf.Clamp((float)r, 0, 255);
dstData[idx + 1] = (byte)Mathf.Clamp((float)g, 0, 255);
dstData[idx + 2] = (byte)Mathf.Clamp((float)b, 0, 255);
dstData[idx + 3] = 255;
}
}
}
);
copyMat.put(0, 0, dstData);
// 输出处理后的图像
OpenCVForUnity.UnityUtils.Utils.matToTexture2DRaw(copyMat, copyTexture);
return copyTexture;
}
public Texture2D ThinFace(Texture2D srcFace)
{
srcMat = new Mat(srcFace.height, srcFace.width, CvType.CV_8UC4);
OpenCVForUnity.UnityUtils.Utils.texture2DToMat(srcFace, srcMat);
//拷贝需处理的人脸图像
Texture2D copyTexture = new Texture2D(
srcFace.width,
srcFace.height,
TextureFormat.RGBA32,
false
);
copyTexture.SetPixels32(srcFace.GetPixels32());
copyTexture.Apply();
copyMat = new Mat(copyTexture.height, copyTexture.width, CvType.CV_8UC4);
OpenCVForUnity.UnityUtils.Utils.texture2DToMat(copyTexture, copyMat);
List<Vector2> facePoints = new List<Vector2>();
//源图像人脸识别
minorDetector1.SetImage(srcFace);
List<UnityEngine.Rect> srcFaces = minorDetector1.Detect();
List<Point> srcPoints = new List<Point>();
if (srcFaces.Count == 0)
{
Debug.Log("no faces detected in srcFace");
return null;
}
else
{
facePoints = minorDetector1.DetectLandmark(srcFaces[0]);
for (int i = 0; i < facePoints.Count; i++)
{
srcPoints.Add(new Point(facePoints[i].x, facePoints[i].y));
}
}
if (facePoints.Count <= 0)
return null;
Vector2 five = Vector2.zero;
Vector2 thirteen = Vector2.zero;
Vector2 eight = Vector2.zero;
Vector2 thirty_one = Vector2.zero;
// Mat thinMat = copyMat.clone(); //克隆一份,用于后续操作
// Mat thinCopyMat = copyMat.clone(); //克隆一份,用于后续操作
//获取特殊点,注意:数组下标=序号-1
five = new Vector2(facePoints[4].x, facePoints[4].y);
thirteen = new Vector2(facePoints[12].x, facePoints[12].y);
eight = new Vector2(facePoints[7].x, facePoints[7].y);
thirty_one = new Vector2(facePoints[30].x, facePoints[30].y);
//获取蒙版
//以点4和点14作为圆心,点4-6和点14-12的距离为半径,朝向点31
Mat mask = Mat.zeros(copyMat.rows(), copyMat.cols(), copyMat.type()); //初始化为零矩阵,即全黑色
// float radius1 = (four - six).magnitude;//右脸圆半径
// float radius2 = (four - six).magnitude;//左脸圆半径
float radius1 = (five - eight).magnitude; //右脸圆半径
float radius2 = (five - eight).magnitude; //左脸圆半径
Imgproc.circle(
mask,
new Point(five.x, five.y),
(int)radius1,
new Scalar(255, 255, 255),
-1
); //右脸的圆为白色
Imgproc.circle(
mask,
new Point(thirteen.x, thirteen.y),
(int)radius2,
new Scalar(255, 255, 0),
-1
); //左脸的圆为黄色
//局部平移 - 优化性能版本
#region 局部平移 - 优化性能版本
// 获取蒙版数据
byte[] maskData = new byte[mask.rows() * mask.cols() * 4];
mask.get(0, 0, maskData);
// 获取源图像数据
byte[] srcData = new byte[srcMat.rows() * srcMat.cols() * 4];
srcMat.get(0, 0, srcData);
// 获取目标图像数据
byte[] dstData = new byte[copyMat.rows() * copyMat.cols() * 4];
copyMat.get(0, 0, dstData);
var options = new ParallelOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
// 并行处理图像数据
Parallel.For(
0,
copyMat.rows(),
options,
i =>
{
for (int j = 0; j < copyMat.cols(); j++)
{
int index = (i * copyMat.cols() + j) * 4;
// 检查蒙版颜色
byte b = maskData[index];
byte g = maskData[index + 1];
byte r = maskData[index + 2];
bool isRightFace = (b == 255 && g == 255 && r == 255); //右脸
bool isLeftFace = (b == 255 && g == 255 && r == 0); //左脸
if (isRightFace || isLeftFace)
{
Vector2 center = isRightFace ? five : thirteen;
float radius = isRightFace ? radius1 : radius2;
Vector2 U_point = GetWarpPoint(
new Vector2(j - 1 + 0.5f, i - 1 + 0.5f),
center,
thirty_one,
radius,
thinFactor
);
// 采样点越界直接用原像素
int x = (int)U_point.x;
int y = (int)U_point.y;
bool inBounds = (
x >= 0 && x < copyMat.cols() - 1 && y >= 0 && y < copyMat.rows() - 1
);
for (int k = 0; k < 4; k++)
{
if (!inBounds)
{
dstData[index + k] = srcData[index + k];
}
else
{
// 直接用变形像素(双线性插值采样)
float fx = Mathf.Clamp(U_point.x, 0, copyMat.cols() - 2);
float fy = Mathf.Clamp(U_point.y, 0, copyMat.rows() - 2);
int x0 = (int)fx;
int y0 = (int)fy;
int x1 = Mathf.Min(x0 + 1, copyMat.cols() - 1);
int y1 = Mathf.Min(y0 + 1, copyMat.rows() - 1);
float dx = fx - x0;
float dy = fy - y0;
int idx00 = (y0 * copyMat.cols() + x0) * 4;
int idx01 = (y0 * copyMat.cols() + x1) * 4;
int idx10 = (y1 * copyMat.cols() + x0) * 4;
int idx11 = (y1 * copyMat.cols() + x1) * 4;
float v00 = srcData[idx00 + k];
float v01 = srcData[idx01 + k];
float v10 = srcData[idx10 + k];
float v11 = srcData[idx11 + k];
float v0 = Mathf.Lerp(v00, v01, dx);
float v1 = Mathf.Lerp(v10, v11, dx);
float v = Mathf.Lerp(v0, v1, dy);
dstData[index + k] = (byte)Mathf.Clamp(v, 0, 255);
}
}
}
else
{
// 非变形区像素100%还原原图
dstData[index + 0] = srcData[index + 0];
dstData[index + 1] = srcData[index + 1];
dstData[index + 2] = srcData[index + 2];
dstData[index + 3] = srcData[index + 3];
}
}
}
);
// 将处理后的数据写回目标矩阵
copyMat.put(0, 0, dstData);
#endregion
//输出处理后的图像
OpenCVForUnity.UnityUtils.Utils.matToTexture2DRaw(copyMat, copyTexture);
return copyTexture;
}
public override void OnDestroy()
{
base.OnDestroy();
if (copyTexture != null)
Destroy(copyTexture);
if (srcMat != null)
srcMat.Dispose();
if (effectMat != null)
effectMat.Dispose();
if (targetMat != null)
targetMat.Dispose();
if (copyMat != null)
copyMat.Dispose();
if (mainDetector != null)
mainDetector.Dispose();
if (minorDetector != null)
minorDetector.Dispose();
if (minorDetector1 != null)
minorDetector1.Dispose();
}
}
最新发布