实例介绍
【实例简介】
【实例截图】
【核心代码】
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Windows.Forms;
using System.Speech.Synthesis;
using System.Threading;
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.CV.CvEnum;
using System.IO;
using Emgu.CV.UI;
namespace MultiFaceRec
{
public partial class FrmPrincipal : Form
{
//Declararation of all variables, vectors and haarcascades
Image<Bgr, Byte> currentFrame;
Capture grabber;
HaarCascade face;
HaarCascade eye;
MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d);
Image<Gray, byte> result, TrainedFace = null;
Image<Gray, byte> gray = null;
List<Image<Gray, byte>> trainingImages = new List<Image<Gray, byte>>();
List<string> labels = new List<string>();
List<string> NamePersons = new List<string>();
int ContTrain, NumLabels, t;
string name, namess = null, names = null;
Dictionary<string, Rectangle> foundPeople = new Dictionary<string, Rectangle>();
float xfactor;
float yfactor;
public FrmPrincipal()
{
InitializeComponent();
try
{
//Initialize the capture device
grabber = new Capture();
grabber.QueryFrame();
//Initialize the FrameGraber event
Application.Idle = new EventHandler(FrameGrabber);
if (grabber != null)
grabber.FlipHorizontal = !grabber.FlipHorizontal;
button1.Enabled = false;
}
catch (Exception)
{
MessageBox.Show("没有摄像头!");
}
//Load haarcascades for face detection
face = new HaarCascade("haarcascade_frontalface_default.xml");
//eye = new HaarCascade("haarcascade_eye.xml");
try
{
//Load of previus trainned faces and labels for each image
string Labelsinfo = File.ReadAllText(Application.StartupPath "/TrainedFaces/TrainedLabels.txt");
string[] Labels = Labelsinfo.Split('%');
NumLabels = Convert.ToInt16(Labels[0]);
ContTrain = NumLabels;
string LoadFaces;
for (int tf = 1; tf < NumLabels 1; tf )
{
LoadFaces = "face" tf ".bmp";
trainingImages.Add(new Image<Gray, byte>(Application.StartupPath "/TrainedFaces/" LoadFaces));
labels.Add(Labels[tf]);
}
}
catch (Exception e)
{
//MessageBox.Show(e.ToString());
MessageBox.Show("Nothing in binary database, please add at least a face", "Triained faces load", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
}
private void button1_Click(object sender, EventArgs e)
{
try
{
Application.Idle = new EventHandler(FrameGrabber);
button1.Enabled = false;
}
catch (Exception)
{
}
}
private void button3_Click(object sender, EventArgs e)
{
try
{
Application.Idle -= new EventHandler(FrameGrabber);
button1.Enabled = true;
}
catch (Exception)
{
}
}
private void button2_Click(object sender, System.EventArgs e)
{
try
{
//Trained face counter
ContTrain = ContTrain 1;
//Get a gray frame from capture device
gray = grabber.QueryGrayFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
//Face Detector
MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
face,
1.2,
10,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size(20, 20));
//Action for each element detected
foreach (MCvAvgComp f in facesDetected[0])
{
TrainedFace = currentFrame.Copy(f.rect).Convert<Gray, byte>();
break;
}
//resize face detected image for force to compare the same size with the
//test image with cubic interpolation type method
TrainedFace = result.Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
trainingImages.Add(TrainedFace);
labels.Add(textBox1.Text);
//Show face added in gray scale
imageBox1.Image = TrainedFace;
//Write the number of triained faces in a file text for further load
File.WriteAllText(Application.StartupPath "/TrainedFaces/TrainedLabels.txt", trainingImages.ToArray().Length.ToString() "%");
//Write the labels of triained faces in a file text for further load
for (int i = 1; i < trainingImages.ToArray().Length 1; i )
{
trainingImages.ToArray()[i - 1].Save(Application.StartupPath "/TrainedFaces/face" i ".bmp");
File.AppendAllText(Application.StartupPath "/TrainedFaces/TrainedLabels.txt", labels.ToArray()[i - 1] "%");
}
MessageBox.Show(textBox1.Text "´s face detected and added :)", "Training OK", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
catch
{
MessageBox.Show("Enable the face detection first", "Training Fail", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
}
/// <summary>
/// 人脸识别与检测
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void FrameGrabber(object sender, EventArgs e)
{
label3.Text = "0";
//label4.Text = "";
NamePersons.Add("");
//Get the current frame form capture device
currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
//Convert it to Grayscale
gray = currentFrame.Convert<Gray, Byte>();
//Face Detector
MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
face,
1.2,
10,
Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
new Size(20, 20));
foundPeople.Clear();
//Action for each element detected
foreach (MCvAvgComp f in facesDetected[0])
{
t = t 1;
result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
//draw the face detected in the 0th (gray) channel with red color
currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);
if (trainingImages.ToArray().Length != 0)
{
//TermCriteria for face recognition with numbers of trained images like maxIteration
MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001);
//Eigen face recognizer
EigenObjectRecognizer recognizer = new EigenObjectRecognizer(
trainingImages.ToArray(),
labels.ToArray(),
5000,
ref termCrit);
name = recognizer.Recognize(result);
foundPeople[name] = f.rect;
//Draw the label for each face detected and recognized
//currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));
}
NamePersons[t - 1] = name;
NamePersons.Add("");
//Set the number of faces detected on the scene
label3.Text = facesDetected[0].Length.ToString();
}
t = 0;
//Names concatenation of persons recognized
for (int nnn = 0; nnn < facesDetected[0].Length; nnn )
{
names = names NamePersons[nnn] ", ";
}
//Show the faces procesed and recognized
imageBoxFrameGrabber.Image = currentFrame;
label4.Text = names;
namess = names;
names = "";
//Clear the list(vector) of names
NamePersons.Clear();
}
/// <summary>
/// 中文显示名字
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void imageBoxFrameGrabber_Paint(object sender, PaintEventArgs e)
{
Font ff = new Font("宋体", 15, FontStyle.Bold);
if (foundPeople.Count > 0)
{
// 缩放
xfactor = (float)imageBoxFrameGrabber.Width / (float)currentFrame.Bitmap.Width;
yfactor = (float)imageBoxFrameGrabber.Height / (float)currentFrame.Bitmap.Height;
foreach (string n in foundPeople.Keys)
{
e.Graphics.DrawString(
n,
ff,
Brushes.LightGreen,
foundPeople[n].X * xfactor,
foundPeople[n].Y * yfactor - 30);
}
}
e.Graphics.DrawString(
"识别人数:" label3.Text.ToString(),
this.Font,
Brushes.Red,
0, 0);
e.Graphics.DrawString(
foundPeople.Count.ToString(),
this.Font,
Brushes.BlanchedAlmond,
0, 20);
}
private Thread th;
private void label4_TextChanged(object sender, EventArgs e)
{
th = new Thread(new ThreadStart(SpeechSound));
th.Start();
}
/// <summary>
/// 语音播报
/// </summary>
private void SpeechSound()
{
if (string.IsNullOrEmpty(label4.Text))
{
return;
}
SpeechSynthesizer sp = new SpeechSynthesizer();
if (namess == label4.Text)
{
Thread.Sleep(2500);
if (foundPeople.Count == 1)
sp.SpeakAsync(label4.Text "你好");
if (foundPeople.Count > 1)
sp.SpeakAsync(label4.Text "你们好");
}
namess = null;
th.Abort();
}
private void FrmPrincipal_FormClosing(object sender, FormClosingEventArgs e)
{
th.Abort();
}
}
}
好例子网口号:伸出你的我的手 — 分享!
网友评论
小贴士
感谢您为本站写下的评论,您的评论对其它用户来说具有重要的参考价值,所以请认真填写。
- 类似“顶”、“沙发”之类没有营养的文字,对勤劳贡献的楼主来说是令人沮丧的反馈信息。
- 相信您也不想看到一排文字/表情墙,所以请不要反馈意义不大的重复字符,也请尽量不要纯表情的回复。
- 提问之前请再仔细看一遍楼主的说明,或许是您遗漏了。
- 请勿到处挖坑绊人、招贴广告。既占空间让人厌烦,又没人会搭理,于人于己都无利。
关于好例子网
本站旨在为广大IT学习爱好者提供一个非营利性互相学习交流分享平台。本站所有资源都可以被免费获取学习研究。本站资源来自网友分享,对搜索内容的合法性不具有预见性、识别性、控制性,仅供学习研究,请务必在下载后24小时内给予删除,不得用于其他任何用途,否则后果自负。基于互联网的特殊性,平台无法对用户传输的作品、信息、内容的权属或合法性、安全性、合规性、真实性、科学性、完整权、有效性等进行实质审查;无论平台是否已进行审查,用户均应自行承担因其传输的作品、信息、内容而可能或已经产生的侵权或权属纠纷等法律责任。本站所有资源不代表本站的观点或立场,基于网友分享,根据中国法律《信息网络传播权保护条例》第二十二与二十三条之规定,若资源存在侵权或相关问题请联系本站客服人员,点此联系我们。关于更多版权及免责申明参见 版权及免责申明


支持(0) 盖楼(回复)
支持(0) 盖楼(回复)
支持(0) 盖楼(回复)