﻿#region Header

/*
Behavioral Rating of Dancing Human Crowds based on Motion Patterns
By

Pascal Hauser 
Dipl. Ing. in Informatik, Hochschule für Technik Rapperswil, 2006
Master Thesis, Hochschule für Technik Rapperswil, 2008-2010

and

Raphael Gfeller
Dipl. Ing. in Informatik, Hochschule für Technik Rapperswil, 2006
Master Thesis, Hochschule für Technik Rapperswil, 2008-2010

*/

#endregion

#region Usings

using System;
using System.Drawing;
using System.IO;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.UI;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Sebarf.Diagnostics.Interfaces;
using Tests.Basics.UI;

#endregion

namespace Tests.Basics {
	/// <summary>
	/// Tests the usage of the Emgu Library which allows to use the OpenCV library under .Net
	/// <see cref="http://www.emgu.com/wiki/index.php/Tutorial"/>
	/// </summary>
	[TestClass]
	public class TestOpenCV {
		#region Test Methods

		// public methods located here

		[TestMethod]
		[DeploymentItem("Images\\ManyPeople.JPG", "Images")]
		[DeploymentItem("..\\Libraries\\OpenCV")]
		public void TestLoadImageAndInvertImage() {
			try {
				var img1 = new Image<Bgr, Byte>("Images\\ManyPeople.jpg");
				img1 = img1.Not();
				var imageBox = new ImageBox();
				imageBox.Width = img1.Width;
				imageBox.Height = img1.Height;
				imageBox.Image = img1;
				var form = new Form();
				form.Controls.Add(imageBox);
				form.ShowDialog();
			}
			catch (Exception ex) {
				Logger.WriteError(ex.Message);
				Assert.Fail(ex.Message);
			}
		}

		[TestMethod]
		[DeploymentItem("Images\\ManyPeople.JPG", "Images")]
		[DeploymentItem("..\\Libraries\\OpenCV")]
		public void TestCaptureImages() {
			try {
				var viewer = new ImageViewer(); //create an image viewer
				Capture capture = null;
				try {
					capture = new Capture(); //create a camera captue
				}
				catch (Exception) {
					Logger.WriteInformation("No camera found");
					return;
				}

				var font = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
				Application.Idle += delegate {
					//run this until application closed (close button click on image viewer)
					Image<Bgr, byte> img = capture.QueryFrame(); //draw the image obtained from camera
					img.Draw("Hallo Hausi", ref font, new Point(10, 50), new Bgr(255, 0, 0));
					viewer.Image = img;
				};
				viewer.ShowDialog();
			}
			catch (Exception ex) {
				Logger.WriteError(ex.Message);
				Assert.Fail(ex.Message);
			}
		}

		[TestMethod]
		[DeploymentItem("HaarCascade", "HaarCascade")]
		[DeploymentItem("..\\Libraries\\OpenCV")]
		public void TestCaptureFaceDetection() {
			try {
				var viewer = new ImageViewer(); //create an image viewer
				Capture capture = null;
				try {
					capture = new Capture(); //create a camera captue
				}
				catch (Exception) {
					Logger.WriteInformation("No camera found");
					return;
				}

				var font = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0);
				Application.Idle += delegate {
					//run this until application closed (close button click on image viewer)
					Image<Bgr, byte> img = capture.QueryFrame(); //draw the image obtained from camera
					Image<Gray, Byte> gray = img.Convert<Gray, Byte>(); //Convert it to Grayscale

					//Read the HaarCascade objects
					var face = new HaarCascade("HaarCascade\\haarcascade_frontalface_alt_tree.xml");
					var eye = new HaarCascade("HaarCascade\\haarcascade_eye.xml");

					//Detect the faces  from the gray scale image and store the locations as rectangle
					//The first dimensional is the channel
					//The second dimension is the index of the rectangle in the specific channel
					MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.1, 1,
																		   HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
																		   new Size(20, 20));

					foreach (MCvAvgComp f in facesDetected[0]) {
						//Set the region of interest on the faces
						gray.ROI = f.rect;
						//MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(eye, 1.1, 1, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
						gray.ROI = Rectangle.Empty;

						//draw the face detected in the 0th (gray) channel with blue color
						img.Draw(f.rect, new Bgr(Color.Blue), 2);
					}
					viewer.Image = img;
				};
				viewer.ShowDialog();
			}
			catch (Exception ex) {
				Logger.WriteError(ex.Message);
				Assert.Fail(ex.Message);
			}
		}

		[TestMethod]
		[DeploymentItem("Images\\ManyPeople.JPG", "Images")]
		[DeploymentItem("..\\Libraries\\OpenCV")]
		public void TestMotionDetectionFromCameraUI() {
			var test = new MotionDetectionFromCamera();
			test.ShowDialog();
		}

		[TestMethod]
		[DeploymentItem("Images\\ManyPeople.JPG", "Images")]
		[DeploymentItem("..\\Libraries\\OpenCV")]
		public void TestMotionDetectionFromCamera() {
			try {
				var capturedImageBox = new ImageViewer(); //create an image viewer
				var motionImageBox = new ImageViewer(); //create an image viewer
				Capture capture = null;
				try {
					capture = new Capture(); //create a camera captue
				}
				catch (Exception) {
					Logger.WriteInformation("No camera found");
					return;
				}

				var motionHistory = new MotionHistory(6,
					//number of images to store in buffer, adjust it to fit your camera's frame rate
													   20,
					//0-255, the amount of pixel intensity change to consider it as motion pixel
													   1.0, //in second, the duration of motion history you wants to keep
													   0.05, //in second, parameter for cvCalcMotionGradient
													   0.5); //in second, parameter for cvCalcMotionGradient
				Application.Idle += delegate {
					using (var storage = new MemStorage())
					//create storage for motion components
				                    	{
						Image<Bgr, Byte> image = capture.QueryFrame().PyrUp();
						image = image.Canny(new Bgr(140, 140, 140), new Bgr(200, 200, 200));
						//reduce noise from the image
						capturedImageBox.Image = image;

						//update the motion history
						motionHistory.Update(image.Convert<Gray, Byte>());

						#region get a copy of the motion mask and enhance its color

						Image<Gray, Byte> motionMask = motionHistory.Mask;
						double[] minValues, maxValues;
						Point[] minLoc, maxLoc;
						motionMask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
						motionMask._Mul(255.0 / maxValues[0]);

						#endregion

						//create the motion image 
						var motionImage = new Image<Bgr, byte>(motionMask.Size);
						//display the motion pixels in blue (first channel)
						motionImage[0] = motionMask;

						//Threshold to define a motion area, reduce the value to detect smaller motion
						double minArea = 100;

						storage.Clear(); //clear the storage
						Seq<MCvConnectedComp> motionComponents = motionHistory.GetMotionComponents(storage);

						//iterate through each of the motion component
						foreach (MCvConnectedComp comp in motionComponents) {
							//reject the components that have small area;
							if (comp.area < minArea) {
								continue;
							}

							// find the angle and motion pixel count of the specific area
							double angle, motionPixelCount;
							motionHistory.MotionInfo(comp.rect, out angle, out motionPixelCount);

							//reject the area that contains too few motion
							if (motionPixelCount < comp.area * 0.05) {
								continue;
							}

							//Draw each individual motion in red
							DrawMotion(motionImage, comp.rect, angle, new Bgr(Color.Red));
						}

						// find and draw the overall motion angle
						double overallAngle, overallMotionPixelCount;
						motionHistory.MotionInfo(motionMask.ROI, out overallAngle, out overallMotionPixelCount);
						DrawMotion(motionImage, motionMask.ROI, overallAngle, new Bgr(Color.Green));

						//Display the amount of motions found on the current image
						//UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", motionComponents.Total, overallMotionPixelCount));

						//Display the image of the motion
						motionImageBox.Image = motionImage;
					}
				};
				capturedImageBox.Width = 700;
				capturedImageBox.Height = 700;
				motionImageBox.Width = 700;
				motionImageBox.Height = 700;
				capturedImageBox.Show();
				motionImageBox.ShowDialog();
			}
			catch (Exception ex) {
				Logger.WriteError(ex.Message);
				Assert.Fail(ex.Message);
			}
		}

		[TestMethod]
		[DeploymentItem("Images\\ManyPeople.JPG", "Images")]
		[DeploymentItem("..\\Libraries\\OpenCV")]
		public void TestMotionDetectionFromVideoMiddle() {
			try {
				var capturedImageBox = new ImageViewer(); //create an image viewer
				var motionImageBox = new ImageViewer(); //create an image viewer

				int index = 0;
				string workingDictionary = Directory.GetCurrentDirectory() + @"\..\..\..\..\..\..\099_Materials\Middle\";
				if (!File.Exists(workingDictionary + "Sample_00000.jpg")) {
					Logger.WriteDebug("Generate video images");
					new TestVideoGraber().TestGrabVideoMaterialsMiddle();
				}
				string prefix = workingDictionary + @"\Sample_";
				DateTime current = DateTime.Now;
				var motionHistory = new MotionHistory(6,
					//number of images to store in buffer, adjust it to fit your camera's frame rate
													   20,
					//0-255, the amount of pixel intensity change to consider it as motion pixel
													   1.0, //in second, the duration of motion history you wants to keep
													   0.05, //in second, parameter for cvCalcMotionGradient
													   0.5); //in second, parameter for cvCalcMotionGradient
				Application.Idle += delegate {
					using (var storage = new MemStorage())
					//create storage for motion components
				                    	{
						Image<Bgr, Byte> image =
							new Image<Bgr, Byte>(prefix + (index++).ToString("00000") + ".jpg").PyrDown();
						//	ImageData<Bgr, Byte> image = capture.QueryFrame().PyrUp();
						//image = image.Canny(new Bgr(140, 140, 140), new Bgr(200, 200, 200));
						//reduce noise from the image
						capturedImageBox.Image = image;

						//update the motion history
						motionHistory.Update(image.Convert<Gray, Byte>(), current.AddMilliseconds(100 * index));

						#region get a copy of the motion mask and enhance its color

						Image<Gray, Byte> motionMask = motionHistory.Mask;
						double[] minValues, maxValues;
						Point[] minLoc, maxLoc;
						motionMask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
						motionMask._Mul(255.0 / maxValues[0]);

						#endregion

						//create the motion image 
						var motionImage = new Image<Bgr, byte>(motionMask.Size);
						//display the motion pixels in blue (first channel)
						motionImage[0] = motionMask;

						//Threshold to define a motion area, reduce the value to detect smaller motion
						double minArea = 100;

						storage.Clear(); //clear the storage
						Seq<MCvConnectedComp> motionComponents = motionHistory.GetMotionComponents(storage);

						//iterate through each of the motion component
						foreach (MCvConnectedComp comp in motionComponents) {
							//reject the components that have small area;
							if (comp.area < minArea) {
								continue;
							}

							// find the angle and motion pixel count of the specific area
							double angle, motionPixelCount;
							motionHistory.MotionInfo(comp.rect, out angle, out motionPixelCount);

							//reject the area that contains too few motion
							if (motionPixelCount < comp.area * 0.05) {
								continue;
							}

							//Draw each individual motion in red
							DrawMotion(motionImage, comp.rect, angle, new Bgr(Color.Red));
						}

						// find and draw the overall motion angle
						double overallAngle, overallMotionPixelCount;
						motionHistory.MotionInfo(motionMask.ROI, out overallAngle, out overallMotionPixelCount);
						DrawMotion(motionImage, motionMask.ROI, overallAngle, new Bgr(Color.Green));

						//Display the amount of motions found on the current image
						//UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", motionComponents.Total, overallMotionPixelCount));

						//Display the image of the motion
						motionImageBox.Image = motionImage;
					}
				};
				capturedImageBox.Width = 700;
				capturedImageBox.Height = 700;
				motionImageBox.Width = 700;
				motionImageBox.Height = 700;
				capturedImageBox.Show();
				motionImageBox.ShowDialog();
			}
			catch (Exception ex) {
				Logger.WriteError(ex.Message);
				Assert.Fail(ex.Message);
			}
		}

		private static void DrawMotion(Image<Bgr, Byte> image, Rectangle motionRegion, double angle, Bgr color) {
			float circleRadius = (motionRegion.Width + motionRegion.Height) >> 2;
			var center = new Point(motionRegion.X + motionRegion.Width >> 1, motionRegion.Y + motionRegion.Height >> 1);

			var circle = new CircleF(center, circleRadius);

			var xDirection = (int)(Math.Cos(angle * (Math.PI / 180.0)) * circleRadius);
			var yDirection = (int)(Math.Sin(angle * (Math.PI / 180.0)) * circleRadius);
			var pointOnCircle = new Point(center.X + xDirection, center.Y - yDirection);
			var line = new LineSegment2D(center, pointOnCircle);

			image.Draw(circle, color, 1);
			image.Draw(line, color, 2);
		}

		#endregion
	}
}