Commit c683a1ef by 何阳

增加功能,只判断上半脸

parent 5d8bde7a
......@@ -25,8 +25,8 @@ namespace FaceServer
rollingInterval: RollingInterval.Day,
fileSizeLimitBytes: 1_000_000,
rollOnFileSizeLimit: true,
outputTemplate: "[{Timestamp:HH:mm:ss.fff}] [{Level}] [{Thread}] [{SourceContext}] {Message:lj}{NewLine}{Code} {Exception}") // 文件日志,每天滚动
.WriteTo.Trace(outputTemplate: "[{Timestamp:HH:mm:ss.fff}] [{Level}] [{Thread}] [{SourceContext}] {Message:lj}{NewLine}{Code} {Exception}")
outputTemplate: "[{Timestamp:HH:mm:ss.fff}] [{Level}] [{ThreadId}] [{SourceContext}] {Message:lj}{NewLine}{Code} {Exception}") // 文件日志,每天滚动
.WriteTo.Trace(outputTemplate: "[{Timestamp:HH:mm:ss.fff}] [{Level}] [{ThreadId}] [{SourceContext}] {Message:lj}{NewLine}{Code} {Exception}")
.WriteTo.Sink(new TextBoxSink(textBox, "[{Timestamp:HH:mm:ss.fff}] [{Level}] [{ThreadId}] [{SourceContext}] {Message:lj}{NewLine}{Exception}")) // 自定义 TextBox Sink
.CreateLogger();
_logger = Log.Logger;
......
......@@ -213,7 +213,7 @@ namespace FaceServer
{
// 序列化要发送的数据
byte[] send = ConvertEx.StructToBytes(item);
byte[] send = ConvertEx.SerializeFaceData(item);
// 发送数据
await pipeServer.WriteAsync(send);
}
......
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
......@@ -8,7 +9,30 @@ using System.Threading.Tasks;
public static class ConvertEx
{
public static byte[] SerializeFaceData(FaceData faceData)
{
using (MemoryStream stream = new MemoryStream())
using (BinaryWriter writer = new BinaryWriter(stream))
{
writer.Write(faceData.FaceRect.X);
writer.Write(faceData.FaceRect.Y);
writer.Write(faceData.FaceRect.Width);
writer.Write(faceData.FaceRect.Height);
byte[] nameBytes = Encoding.UTF8.GetBytes(faceData.Name);
if (nameBytes.Length > 50)
{
throw new ArgumentException("Name is too long to serialize");
}
byte[] paddedNameBytes = new byte[50]; // 固定长度的字节数组
Array.Copy(nameBytes, paddedNameBytes, nameBytes.Length); // 复制字符串的字节数组到固定长度的字节数组
writer.Write(paddedNameBytes);
writer.Write(faceData.IsIdentified);
return stream.ToArray();
}
}
public static Byte[] StructToBytes( Object structure)
{
......
......@@ -9,6 +9,8 @@ using System.Drawing;
using System.Windows;
using System.Threading.Tasks;
using System.Windows.Media.Imaging;
using System.Text;
using Serilog;
public class FaceRecognitionServer
{
......@@ -62,29 +64,61 @@ public class FaceRecognitionServer
bitmap.Dispose();
// Detect faces
var faceLocations = FaceRecognition.FaceLocations(image);
if (faceLocations.Count() == 0)
if (!faceLocations.Any())
{
image.Dispose();
return new List<FaceData>();
}
var faceEncodings = FaceRecognition.FaceEncodings(image, faceLocations);
List<FaceData> faces = new List<FaceData>();
for (int i = 0; i < faceEncodings.Count(); i++)
var faceLandmarks = FaceRecognition.FaceLandmark(image, faceLocations).ToList();
List<FaceData> faces = new List<FaceData>();
for (int i = 0; i < faceLocations.Count(); i++)
{
var landmarks = faceLandmarks[i];
var upperFacePoints = new List<FacePoint>();
upperFacePoints.AddRange(landmarks[FacePart.LeftEyebrow]);
upperFacePoints.AddRange(landmarks[FacePart.RightEyebrow]);
upperFacePoints.AddRange(landmarks[FacePart.LeftEye]);
upperFacePoints.AddRange(landmarks[FacePart.RightEye]);
upperFacePoints.AddRange(landmarks[FacePart.NoseBridge]);
// 计算上半脸特征点的边界
var minX = upperFacePoints.Min(point => point.Point.X) + Roi.X;
var minY = upperFacePoints.Min(point => point.Point.Y) + Roi.Y;
var maxX = upperFacePoints.Max(point => point.Point.X) + Roi.X;
var maxY = upperFacePoints.Max(point => point.Point.Y) + Roi.Y;
// 创建一个新的面部区域Location,只包括上半脸的特征点
var upperFaceLocation = new Location(minX, minY, maxX, maxY);
// 确保宽度和高度是正数
var width = Math.Max(0, upperFaceLocation.Right - upperFaceLocation.Left);
var height = Math.Max(0, upperFaceLocation.Bottom - upperFaceLocation.Top);
// Get the encodings based on the adjusted location
var faceEncodings = FaceRecognition.FaceEncodings(image, new[] { upperFaceLocation });
var encoding = faceEncodings.FirstOrDefault();
if (encoding == null)
{
Log.Warning($"人脸编码计算失败!");
continue; // Skip if no encoding is found
}
var encoding = faceEncodings.ElementAt(i);
// Find match for known faces
var bestMatch = FindBestMatch(encoding, KnownFaceEncodings);
var location = faceLocations.ElementAt(i);
// 映射回整个图像的坐标空间
// 创建一个新的RectangleSerializable对象
var faceRect = new RectangleSerializable
(
location.Left + Roi.X, // 加上ROI的X偏移
location.Top + Roi.Y, // 加上ROI的Y偏移
location.Right - location.Left,
location.Bottom - location.Top
upperFaceLocation.Left, // 上半脸位置的左边界
upperFaceLocation.Top, // 上半脸位置的上边界
width, // 宽度
height // 高度
);
faces.Add(new FaceData
{
......@@ -92,7 +126,6 @@ public class FaceRecognitionServer
Name = bestMatch,
IsIdentified = bestMatch != "Unknown"
});
}
return faces;
......@@ -106,11 +139,43 @@ public class FaceRecognitionServer
foreach (var imagePath in Directory.GetFiles(faceDataPath, "*.jpg"))
{
var name = Path.GetFileNameWithoutExtension(imagePath);
var image = FaceRecognition.LoadImageFile(imagePath);
var faceEncoding = faceRecognition.FaceEncodings(image).FirstOrDefault();
if (faceEncoding != null)
using (var image = FaceRecognition.LoadImageFile(imagePath))
{
knownFaceEncodings.Add(name, faceEncoding);
// Detect the face location and face landmarks for upper face parts only
var faceLocations = faceRecognition.FaceLocations(image).FirstOrDefault();
if (faceLocations == null)
{
Log.Warning($"{faceDataPath} 人脸模型加载失败!");
continue;
} // Skip if no face is detected
var faceLandmarks = faceRecognition.FaceLandmark(image,new[] { faceLocations }).FirstOrDefault();
if (faceLandmarks == null)
{
Log.Warning($"{faceDataPath} 人脸模型加载失败!");
continue;
} // Skip if no face is detected
var upperFaceLandmarks = new List<FacePoint>();
upperFaceLandmarks.AddRange(faceLandmarks[FacePart.LeftEyebrow]);
upperFaceLandmarks.AddRange(faceLandmarks[FacePart.RightEyebrow]);
upperFaceLandmarks.AddRange(faceLandmarks[FacePart.LeftEye]);
upperFaceLandmarks.AddRange(faceLandmarks[FacePart.RightEye]);
// Calculate the bounding box for the upper face landmarks
var minX = upperFaceLandmarks.Min(point => point.Point.X);
var minY = upperFaceLandmarks.Min(point => point.Point.Y);
var maxX = upperFaceLandmarks.Max(point => point.Point.X);
var maxY = upperFaceLandmarks.Max(point => point.Point.Y);
// Create a Location object representing the upper face region
var upperFaceLocation = new Location(minX, minY, maxX - minX, maxY - minY);
// Get the face encoding based on the upper face landmarks
var faceEncoding = faceRecognition.FaceEncodings(image, new[] { upperFaceLocation }).FirstOrDefault();
if (faceEncoding != null)
{
knownFaceEncodings.Add(name, faceEncoding);
}
}
}
return knownFaceEncodings;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment