You are on page 1of 17

Main Form

using
using
using
using
using
using
using
using
using
using
using
using

System;
System.Collections.Generic;
System.ComponentModel;
System.Data;
System.Drawing;
System.Linq;
System.Text;
System.Windows.Forms;
Emgu.CV.Structure;
Emgu.CV;
HandGestureRecognition.SkinDetector;
System.Runtime.InteropServices;

namespace HandGestureRecognition
{
public partial class Form1 : Form
{
// ---PORT IO------------------[DllImport("inpout32.dll")]
private static extern UInt32 IsInpOutDriverOpen();
[DllImport("inpout32.dll")]
private static extern void Out32(short PortAddress,short
Data);
[DllImport("inpout32.dll")]
private static extern char Inp32(short PortAddress);
[DllImport("inpout32.dll")]
private static extern void DlPortWritePortUshort(short
PortAddress, ushort Data);
[DllImport("inpout32.dll")]
private static extern ushort DlPortReadPortUshort(short
PortAddress);
[DllImport("inpout32.dll")]
private static extern void DlPortWritePortUlong(int
PortAddress, uint Data);
[DllImport("inpout32.dll")]
private static extern uint DlPortReadPortUlong(int
PortAddress);
[DllImport("inpoutx64.dll")]

private static extern bool GetPhysLong(ref int PortAddress, ref


uint Data);
[DllImport("inpoutx64.dll")]
private static extern bool SetPhysLong(ref int
PortAddress,
ref uint Data);
[DllImport("inpoutx64.dll", EntryPoint
="IsInpOutDriverOpen")]
private static extern UInt32 IsInpOutDriverOpen_x64();
[DllImport("inpoutx64.dll", EntryPoint = "Out32")]
private static extern void Out32_x64(short PortAddress,
short
Data);
[DllImport("inpoutx64.dll", EntryPoint = "Inp32")]
private static extern char Inp32_x64(short PortAddress);
[DllImport("inpoutx64.dll", EntryPoint =
"DlPortWritePortUshort")]
private static extern void
DlPortWritePortUshort_x64(short
PortAddress, ushort Data);
[DllImport("inpoutx64.dll", EntryPoint =
"DlPortReadPortUshort")]
private static extern ushort DlPortReadPortUshort_x64(short
PortAddress);
[DllImport("inpoutx64.dll", EntryPoint =
"DlPortWritePortUlong")]
private static extern void DlPortWritePortUlong_x64(int
PortAddress, uint Data);
[DllImport("inpoutx64.dll", EntryPoint =
"DlPortReadPortUlong")]
private static extern uint DlPortReadPortUlong_x64(int
PortAddress);
[DllImport("inpoutx64.dll", EntryPoint = "GetPhysLong")]
private static extern bool GetPhysLong_x64(ref int

PortAddress, ref uint Data);


[DllImport("inpoutx64.dll", EntryPoint = "SetPhysLong")]
private static extern bool SetPhysLong_x64(ref int
PortAddress, ref uint Data);
bool flag_dev1_on, flag_dev2_on, flag_dev1_off,
flag_dev2_off;
int value;

//---------------------------------------------------------------IColorSkinDetector skinDetector;
Image<Bgr, Byte> currentFrame;
Image<Bgr, Byte> currentFrameCopy;
Capture grabber;
AdaptiveSkinDetector detector;
int frameWidth;
int frameHeight;
Hsv
Hsv
Ycc
Ycc

hsv_min;
hsv_max;
YCrCb_min;
YCrCb_max;

Seq<Point> hull;
Seq<Point> filteredHull;
Seq<MCvConvexityDefect> defects;
MCvConvexityDefect[] defectArray;
Rectangle handRect;
MCvBox2D box;
Ellipse ellip;
public Form1() // Main Form Constructor
{
InitializeComponent(); // Initialize the Components
value = 0;
flag_dev1_on = false;
flag_dev2_on = false;
flag_dev1_off = true;
flag_dev2_off = true;

//grabber = new
Emgu.CV.Capture(@".\..\..\..\M2U00253.MPG");
// Use for Videos
grabber = new Emgu.CV.Capture();
// Object to
Capture the Frame
grabber.QueryFrame();
frameWidth = grabber.Width;
frameHeight = grabber.Height;
detector = new AdaptiveSkinDetector(1,
AdaptiveSkinDetector.MorphingMethod.NONE);
hsv_min = new Hsv(0, 45, 0); // HSV Color Space ..
hsv_max = new Hsv(20, 255, 255);
YCrCb_min = new Ycc(0, 131, 80); // YCrCb color Space
YCrCb_max = new Ycc(255, 185, 135);
box = new MCvBox2D();
ellip = new Ellipse();
Application.Idle += new EventHandler(FrameGrabber);
}
//Method for grabbing Frames captured from the camera
void FrameGrabber(object sender, EventArgs e)
{
currentFrame = grabber.QueryFrame(); // Capture
current frame
if (currentFrame != null)
{
currentFrameCopy = currentFrame.Copy();
// Uncomment if using opencv adaptive skin
detector
//Image<Gray,Byte> skin = new
Image<Gray,byte>(currentFrameCopy.Width,currentFrameCopy.Height);
//detector.Process(currentFrameCopy, skin);
skinDetector = new YCrCbSkinDetector(); //Skin
Color detector Object Initialization
Image<Gray, Byte> skin =
skinDetector.DetectSkin(currentFrameCopy,YCrCb_min,YCrCb_max); //
Skin region after Erosion & dilate
ExtractContourAndHull(skin);
Extraction
DrawAndComputeFingersNum();

// Contour

imageBoxSkin.Image = skin;
imageBoxFrameGrabber.Image = currentFrame;
int pauseTime = 1000;
System.Threading.Thread.Sleep(pauseTime);
}
}
private void ExtractContourAndHull(Image<Gray, byte>
skin)
{
using (MemStorage storage = new MemStorage())
{
Contour<Point> contours =
skin.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APP
ROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, storage);
Contour<Point> biggestContour = null;
Double Result1 = 0;
Double Result2 = 0;
while (contours != null)
{
Result1 = contours.Area;
if (Result1 > Result2)
{
Result2 = Result1;
biggestContour = contours;
}
contours = contours.HNext;
}
if (biggestContour != null)
{
//currentFrame.Draw(biggestContour, new Bgr(Color.DarkViolet),
2);
Contour<Point> currentContour =
biggestContour.ApproxPoly(biggestContour.Perimeter * 0.0025,
storage);
//currentFrame.Draw(currentContour, new Bgr(Color.LimeGreen), 2);
biggestContour = currentContour;
hull =
biggestContour.GetConvexHull(Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKW
ISE);
box = biggestContour.GetMinAreaRect();

PointF[] points = box.GetVertices();


//handRect = box.MinAreaRect();
//currentFrame.Draw(handRect, new Bgr(200, 0, 0),
1);
Point[] ps = new Point[points.Length];
for (int i = 0; i < points.Length; i++)
ps[i] = new Point((int)points[i].X,
(int)points[i].Y);
//currentFrame.DrawPolyline(hull.ToArray(), true, new Bgr(200,
125, 75), 2);
//currentFrame.Draw(new CircleF(new PointF(box.center.X,
box.center.Y), 3), new Bgr(200, 125, 75), 2);
PointF center;
float radius;
CvInvoke.cvMinEnclosingCircle(biggestContour.Ptr, out
out radius);

center,

currentFrame.Draw(new CircleF(center, radius), new


Bgr(Color.Gold), 2);
currentFrame.Draw(new CircleF(new Point((int)center.X,(int)
center.Y),2), new Bgr(Color.Gold), 2);
filteredHull = new Seq<Point>(storage);
for (int i = 0; i < hull.Total; i++)
{
if (Math.Sqrt(Math.Pow(hull[i].X - hull[i
+ 1].X, 2) + Math.Pow(hull[i].Y - hull[i + 1].Y, 2)) >
box.size.Width / 10)
{
filteredHull.Push(hull[i]);
}
}
defects = biggestContour.GetConvexityDefacts(storage,
Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKWISE);
defectArray = defects.ToArray();
}
}
}
private void DrawAndComputeFingersNum()
{

int fingerNum = 0;
#region hull drawing
//for (int i = 0; i < filteredHull.Total; i++)
//{
//
PointF hullPoint = new
PointF((float)filteredHull[i].X,
//
(float)filteredHull[i].Y);
//
CircleF hullCircle = new CircleF(hullPoint, 4);
//
currentFrame.Draw(hullCircle, new
Bgr(Color.Aquamarine), 2);
//}
#endregion
#region defects drawing
if (defects != null)
{
for (int i = 0; i < defects.Total; i++)
{
PointF startPoint = new
PointF((float)defectArray[i].StartPoint.X,
(float)defectArray[i].StartPoint.Y);
PointF depthPoint = new
PointF((float)defectArray[i].DepthPoint.X,
(float)defectArray[i].DepthPoint.Y);
PointF endPoint = new
PointF((float)defectArray[i].EndPoint.X,
(float)defectArray[i].EndPoint.Y);
LineSegment2D startDepthLine = new
LineSegment2D(defectArray[i].StartPoint,
defectArray[i].DepthPoint);
LineSegment2D depthEndLine = new
LineSegment2D(defectArray[i].DepthPoint,
defectArray[i].EndPoint);
CircleF startCircle = new CircleF(startPoint,
5f);

CircleF depthCircle = new CircleF(depthPoint,


5f);
CircleF endCircle = new CircleF(endPoint,
5f);
if ((startCircle.Center.Y < box.center.Y ||
depthCircle.Center.Y < box.center.Y) && (startCircle.Center.Y <
depthCircle.Center.Y) && (Math.Sqrt(Math.Pow(startCircle.Center.X
- depthCircle.Center.X, 2) + Math.Pow(startCircle.Center.Y depthCircle.Center.Y, 2)) > box.size.Height / 6.5))
{
fingerNum++;
currentFrame.Draw(startDepthLine, new
Bgr(Color.Green), 2);
currentFrame.Draw(depthEndLine, new
Bgr(Color.Magenta), 2);
}
//currentFrame.Draw(startCircle, new
Bgr(Color.Red), 2);
//currentFrame.Draw(depthCircle, new
Bgr(Color.Red), 5);
//currentFrame.Draw(endCircle, new
Bgr(Color.DarkBlue), 4);
}
}
#endregion
MCvFont font = new
MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_DUPLEX, 5d, 5d);
currentFrame.Draw(fingerNum.ToString(), ref font, new
Point(50, 150), new Bgr(Color.Red));

if (fingerNum >= 1 && fingerNum <= 5)


{
//fingerNum=1;
switch (fingerNum)
{
case 1: //Turn ON device 1
{
if (flag_dev1_on == false && flag_dev2_on ==
false)

{
//value = (int)Math.Pow(2, 1);
value = 1;
PortAccess.Output(888, value);
flag_dev1_on = true;
}
else if (flag_dev1_on == false && flag_dev2_on ==
true)
{
value = 3;
PortAccess.Output(888, value);
flag_dev1_on = true;
}
break;
}
case 2: // Turn OFF device 1
{
if (flag_dev1_on == true && flag_dev2_on ==
false)
{
//value = (int)Math.Pow(2, 1);
value = 0;
PortAccess.Output(888, value);
flag_dev1_on = false;
}
else if (flag_dev1_on == true && flag_dev2_on ==
true)
{
value = 2;
PortAccess.Output(888, value);
flag_dev1_on = false;
}
break;
}
case 3: //Turn ON device 2
{
if (flag_dev2_on == false && flag_dev1_on ==
false)
{
//value = (int)Math.Pow(2, 1);
value = 2;
PortAccess.Output(888, value);
flag_dev2_on = true;
}

else if (flag_dev2_on == false && flag_dev1_on ==


true)
{
value = 3;
PortAccess.Output(888, value);
flag_dev2_on = true;
}
break;
}
case 4: // Turn OFF device 2
{
if (flag_dev2_on == true && flag_dev1_on ==
false)
{
//value = (int)Math.Pow(2, 1);
value = 0;
PortAccess.Output(888, value);
flag_dev2_on = false;
}
else if (flag_dev2_on == true && flag_dev1_on ==
true)
{
value = 1;
PortAccess.Output(888, value);
flag_dev2_on = false;
}
break;
}
case 5: //Reset All devices
{
value = 0;
PortAccess.Output(888, value);
break;
}
}
}
}
private void Form1_Load(object sender, EventArgs e)
{
}
}
}

FORM 2:
CustomYCrCbSkinDetector
using
using
using
using
using
using

System;
System.Collections.Generic;
System.Linq;
System.Text;
Emgu.CV;
Emgu.CV.Structure;

namespace HandGestureRecognition.SkinDetector
{
class CustomYCrCbSkinDetector:IColorSkinDetector
{
public override Image<Gray, byte> DetectSkin(Image<Bgr,
byte> Img, IColor min, IColor max)
{
//Code adapted from here
//
http://blog.csdn.net/scyscyao/archive/2010/04/09/5468577.aspx
// Look at this paper for reference (Chinese!!!!!)
//
http://www.chinamca.com/UploadFile/200642991948257.pdf

Image<Ycc,Byte> currentYCrCbFrame = Img.Convert<Ycc, Byte>();


Image<Gray, Byte> skin = new Image<Gray, Byte>(Img.Width,
Img.Height);
int y, cr, cb, l, x1, y1, value;
int rows = Img.Rows;
int cols = Img.Cols;
Byte[, ,] YCrCbData = currentYCrCbFrame.Data;
Byte[, ,] skinData = skin.Data;
for (int i = 0; i < rows; i++)
for (int j = 0; j < cols; j++)
{
y = YCrCbData[i, j, 0];
cr = YCrCbData[i, j, 1];
cb = YCrCbData[i, j, 2];
cb -= 109;
cr -= 152;
x1 = (819 * cr - 614
y1 = (819 * cr + 614
x1 = x1 * 41 / 1024;
y1 = y1 * 73 / 1024;
value = x1 * x1 + y1
if (y < 100)
skinData[i, j, 0] = (value <

* cb) / 32 + 51;
* cb) / 32 + 77;
* y1;
700) ? (byte)255 :

(byte)0;
else
skinData[i, j, 0] = (value < 850) ? (byte)255 :
(byte)0;
}
StructuringElementEx rect_6 = new
StructuringElementEx(6,
6, 3, 3,
Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
CvInvoke.cvErode(skin, skin, rect_6, 1);
CvInvoke.cvDilate(skin, skin, rect_6, 2);
return skin;
}
}
}

FORM 3
HsvSkinDetector
using
using
using
using
using
using

System;
System.Collections.Generic;
System.Linq;
System.Text;
Emgu.CV;
Emgu.CV.Structure;

namespace HandGestureRecognition.SkinDetector
{
public class HsvSkinDetector:IColorSkinDetector
{

public override Image<Gray, byte> DetectSkin(Image<Bgr,


byte> Img, IColor min, IColor max)
{
Image<Hsv, Byte> currentHsvFrame = Img.Convert<Hsv,
Byte>();
Image<Gray, byte> skin = new Image<Gray,
byte>(Img.Width,
Img.Height);
skin = currentHsvFrame.InRange((Hsv)min,(Hsv)max);
return skin;
}
}
}

FORM 4
IColorSkinDetector
using
using
using
using
using
using

System;
System.Collections.Generic;
System.Linq;
System.Text;
Emgu.CV;
Emgu.CV.Structure;

namespace HandGestureRecognition.SkinDetector
{
public abstract class IColorSkinDetector

{
public abstract Image<Gray, Byte> DetectSkin(Image<Bgr,
Byte> Img, IColor min, IColor max);
}
}

FORM 5
YCrCbSkinDetector
using
using
using
using
using
using

System;
System.Collections.Generic;
System.Linq;
System.Text;
Emgu.CV.Structure;
Emgu.CV;

namespace HandGestureRecognition.SkinDetector
{

public class YCrCbSkinDetector:IColorSkinDetector


{
public override Image<Gray, byte> DetectSkin(Image<Bgr,
byte> Img, IColor min, IColor max)
{
Image<Ycc, Byte> currentYCrCbFrame = Img.Convert<Ycc,
Byte>(); // Convert the image frame to YCrCb
Image<Gray, byte> skin = new Image<Gray, byte>(Img.Width,
Img.Height); // Gray Scale
skin = currentYCrCbFrame.InRange((Ycc)min,(Ycc) max);
// Black & white Skin Region
StructuringElementEx rect_12 = new
StructuringElementEx(12, 12, 6, 6,
Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
CvInvoke.cvErode(skin, skin, rect_12, 1); // Apply Erosion
StructuringElementEx rect_6 = new
StructuringElementEx(6, 6, 3, 3,
Emgu.CV.CvEnum.CV_ELEMENT_SHAPE.CV_SHAPE_RECT);
CvInvoke.cvDilate(skin, skin, rect_6, 2); //Dilation
return skin;
}
}
}

You might also like