﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Microsoft.Kinect;
using System.Drawing;
using System.Drawing.Imaging;
using System.Windows.Interop;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.CV.CvEnum;
using System.IO.Ports;
namespace Robot
{
    struct Aim
    {
        public double vmin;
        public double vmax;
        public double smin;
        public bool isCutMask;
        public bool isTrack;
        public bool firstImage;
        public int trackObject;
        public IntPtr img_hsv;
        public IntPtr img_hue;
        public IntPtr img_mask;
        public IntPtr img_backproject;
        public System.Drawing.Rectangle selection;
        public System.Drawing.Rectangle trackWindow;
        public MCvBox2D trackBox;
        public MCvConnectedComp trackComp;
        public DenseHistogram hist;
        public SkeletonPoint spacePoint;

    }

    /// <summary>
    /// MainWindow.xaml 的交互逻辑
    /// </summary>
    public partial class MainWindow : Window
    {
        [DllImport("gdi32")]
        static extern int DeleteObject(IntPtr o);
        private KinectSensor sensor;
        private SerialPort sp;
        public DepthImageFrame depthFrame;
        public ColorImageFrame colorFrame;
        Aim aim1,aim2,aim3,aim4;
        public DepthImagePoint[] depthPoints;
        public short[] depthPixelData;
        public DepthImagePixel[] depthImagePixel;
        int scale = 4;
        Int32Rect depthImageBitmapRect;
        WriteableBitmap depthBitMap;
        int depthImageStride;
        IntPtr tmp;
        BitmapSource bitmapSource;
        Bitmap bitmap;
        BitmapData bitmapData;
        public MainWindow()
        {
            InitializeComponent();
            sensor = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);//连接Kinect
            InitializeKinectSensor(sensor);
        }
        private void InitializeKinectSensor(KinectSensor kinectSensor)
        {
            if (kinectSensor != null)
            {
                //初始化kinect
                kinectSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                kinectSensor.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(kinectSensor_ColorFrameReady);


                kinectSensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
                kinectSensor.DepthFrameReady += new EventHandler<DepthImageFrameReadyEventArgs>(kinectSensor_DepthFrameReady);
                depthBitMap = new WriteableBitmap(sensor.DepthStream.FrameWidth, sensor.DepthStream.FrameHeight,
                            96, 96, PixelFormats.Gray16, null);          //new 可写和可更新的BitmapSource，WriteableBitmap为BitmapSource的子类
                depthImageBitmapRect = new Int32Rect(0, 0, sensor.DepthStream.FrameWidth, sensor.DepthStream.FrameHeight);        //需要更新的矩阵，这里为整幅图像
                depthImageStride = sensor.DepthStream.FrameWidth * sensor.DepthStream.FrameBytesPerPixel;
                depthPixelData = new short[sensor.DepthStream.FramePixelDataLength]; //深度像素数组，用来存储kinect中depthFrame捕捉到的像素数据
                depthImagePixel = new DepthImagePixel[sensor.DepthStream.FramePixelDataLength];
                depthPoints = new DepthImagePoint[640 * 480];
                kinectSensor.Start();
            }
        }
        void kinectSensor_ColorFrameReady(object sender, ColorImageFrameReadyEventArgs e)
        {
            colorFrame = e.OpenColorImageFrame();
            if (colorFrame != null)
            {
                #region 显示彩色图像
                byte[] colorPixels;             //彩色像素数组，用来存储kinect中colorFrame捕捉到的像素数据
                Image<Bgr, Byte> colorImage = null;
                colorPixels = new byte[sensor.ColorStream.FramePixelDataLength];//初始化，数组大小为像素数据长度
                colorFrame.CopyPixelDataTo(colorPixels);        //将像素数据拷贝到数组中
                bitmap = new Bitmap(colorFrame.Width, colorFrame.Height);    //new 一个Bitmap长和宽为colorFrame的长和宽

                //将bitmap锁定到系统内存中，再将colorPixels中的数据复制到bitmap所在的内存块中，再将bitmap解锁
                bitmapData = bitmap.LockBits(
                        new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height),
                        ImageLockMode.WriteOnly,
                        bitmap.PixelFormat);            //
                Marshal.Copy(colorPixels, 0, bitmapData.Scan0, colorPixels.Length);
                bitmap.UnlockBits(bitmapData);

                colorImage = new Image<Bgr, byte>(bitmap);

                if ( !aim1.isTrack && aim1.selection.X > 0 && aim1.selection.Width > 0)
                {
                    Emgu.CV.CvInvoke.cvRectangleR(colorImage, aim1.selection, new MCvScalar(255, 0, 255), 2, LINE_TYPE.EIGHT_CONNECTED, 2);
                }
                if (!aim2.isTrack && aim2.selection.X > 0 && aim2.selection.Width > 0)
                {
                    Emgu.CV.CvInvoke.cvRectangleR(colorImage, aim2.selection, new MCvScalar(255,0,0), 2, LINE_TYPE.EIGHT_CONNECTED, 2);
                }
                if (!aim3.isTrack && aim3.selection.X > 0 && aim3.selection.Width > 0)
                {
                    Emgu.CV.CvInvoke.cvRectangleR(colorImage, aim3.selection, new MCvScalar(255, 255, 0), 2, LINE_TYPE.EIGHT_CONNECTED, 2);
                }
                if (!aim4.isTrack && aim4.selection.X > 0 && aim4.selection.Width > 0)
                {
                    Emgu.CV.CvInvoke.cvRectangleR(colorImage, aim4.selection, new MCvScalar(55, 155, 255), 2, LINE_TYPE.EIGHT_CONNECTED, 2);
                }
                #region aim1目标跟踪
                if (aim1.isTrack)
                {
                    if (aim1.firstImage)
                    {

                        aim1.img_hsv = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 3);
                        aim1.img_hue = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim1.img_mask = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim1.img_backproject = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim1.hist = new DenseHistogram(64, new RangeF(0, 180));
                        aim1.firstImage = false;
                    }
                    CvInvoke.cvCvtColor(colorImage, aim1.img_hsv, COLOR_CONVERSION.CV_RGB2HSV);
                    aim1.vmin = slider1.Value;
                    aim1.vmax = slider2.Value;
                    aim1.smin = slider3.Value;
                    CvInvoke.cvInRangeS(aim1.img_hsv, new MCvScalar(0, aim1.smin, Math.Min(aim1.vmin, aim1.vmax), 0), new MCvScalar(256, 256, Math.Max(aim1.vmin, aim1.vmax), 0), aim1.img_mask);
                    CvInvoke.cvSplit(aim1.img_hsv, aim1.img_hue, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
                    if (aim1.trackObject == -1)
                    {
                        float max_val = 0.0F;
                        float min_val = 0.0F;
                        aim1.selection.X /= 4; 
                        aim1.selection.Y /= 4;
                        aim1.selection.Width /= 4;
                        aim1.selection.Height /= 4;
                        CvInvoke.cvSetImageROI(aim1.img_hue, aim1.selection);
                        CvInvoke.cvSetImageROI(aim1.img_mask, aim1.selection);
                        CvInvoke.cvCalcHist(new IntPtr[] { aim1.img_hue }, aim1.hist, false, aim1.img_mask);

                        CvInvoke.cvGetMinMaxHistValue(aim1.hist, ref min_val, ref max_val, new int[100], new int[100]);
                        CvInvoke.cvResetImageROI(aim1.img_hue);
                        CvInvoke.cvResetImageROI(aim1.img_mask);
                        aim1.trackWindow = aim1.selection;
                        aim1.trackObject = 1;
                    }
                    CvInvoke.cvCalcBackProject(new IntPtr[] { aim1.img_hue }, aim1.img_backproject, aim1.hist);
                    
                    CvInvoke.cvAnd(aim1.img_backproject, aim1.img_mask, aim1.img_backproject, IntPtr.Zero);
                    CvInvoke.cvCamShift(aim1.img_backproject, aim1.trackWindow, new MCvTermCriteria(20, 10), out aim1.trackComp, out aim1.trackBox);
                    //CvInvoke.cvShowImage("2.jpg", aim1.img_mask);
                    //CvInvoke.cvShowImage("1.jpg", aim1.img_backproject);
                    aim1.trackWindow = aim1.trackComp.rect;
                    aim1.trackBox.angle = aim1.trackBox.angle + 90;
                    //CvInvoke.cvEllipseBox(colorImage, aim1.trackBox, new MCvScalar(255, 0, 255), 3, LINE_TYPE.CV_AA, 0);
                    
                    if (depthFrame != null)
                    {
                        int x = (int)(aim1.trackWindow.X + aim1.trackWindow.Width / 2);
                        int y = (int)(aim1.trackWindow.Y + aim1.trackWindow.Height / 2);
                        int s = x + y * 640;
                        textBox1.Text = x.ToString();
                        textBox2.Text = y.ToString();
                        aim1.spacePoint = sensor.CoordinateMapper.MapDepthPointToSkeletonPoint(sensor.DepthStream.Format, depthPoints[s]);
                        textBox3.Text = aim1.spacePoint.Z.ToString();
                        textBox13.Text = aim1.spacePoint.X.ToString();
                        textBox14.Text = aim1.spacePoint.Y.ToString();                                                                      
                    }
                }   
                    
                #endregion aim1目标跟踪
                #region aim2目标跟踪
                if (aim2.isTrack)
                {
                    if (aim2.firstImage)
                    {

                        aim2.img_hsv = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 3);
                        aim2.img_hue = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim2.img_mask = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim2.img_backproject = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim2.hist = new DenseHistogram(64, new RangeF(0, 180));
                        aim2.firstImage = false;
                    }
                    CvInvoke.cvCvtColor(colorImage, aim2.img_hsv, COLOR_CONVERSION.CV_RGB2HSV);
                    aim2.vmin = slider4.Value;
                    aim2.vmax = slider5.Value;
                    aim2.smin = slider6.Value;
                    CvInvoke.cvInRangeS(aim2.img_hsv, new MCvScalar(0, aim2.smin, Math.Min(aim2.vmin, aim2.vmax), 0), new MCvScalar(180, 256, Math.Max(aim2.vmin, aim2.vmax), 0), aim2.img_mask);
                    CvInvoke.cvSplit(aim2.img_hsv, aim2.img_hue, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
                    if (aim2.trackObject == -1)
                    {

                        float max_val = 0.0F;
                        float min_val = 0.0F;
                        aim2.selection.X /= 4;
                        aim2.selection.Y /= 4;
                        aim2.selection.Width /= 4;
                        aim2.selection.Height /= 4;
                        CvInvoke.cvSetImageROI(aim2.img_hue, aim2.selection);
                        CvInvoke.cvSetImageROI(aim2.img_mask, aim2.selection);
                        CvInvoke.cvCalcHist(new IntPtr[] { aim2.img_hue }, aim2.hist, false, aim2.img_mask);

                        CvInvoke.cvGetMinMaxHistValue(aim2.hist, ref min_val, ref max_val, new int[100], new int[100]);
                        CvInvoke.cvResetImageROI(aim2.img_hue);
                        CvInvoke.cvResetImageROI(aim2.img_mask);
                        aim2.trackWindow = aim2.selection;
                        aim2.trackObject = 1;
                    }
                    CvInvoke.cvCalcBackProject(new IntPtr[] { aim2.img_hue }, aim2.img_backproject, aim2.hist);
                    CvInvoke.cvAnd(aim2.img_backproject, aim2.img_mask, aim2.img_backproject, IntPtr.Zero);
                    CvInvoke.cvCamShift(aim2.img_backproject, aim2.trackWindow, new MCvTermCriteria(20, 10), out aim2.trackComp, out aim2.trackBox);
                    aim2.trackWindow = aim2.trackComp.rect;
                    aim2.trackBox.angle = aim2.trackBox.angle + 90;

                    if (depthFrame != null)
                    {
                        int x = (int)(aim2.trackWindow.X + aim2.trackWindow.Width / 2);
                        int y = (int)(aim2.trackWindow.Y + aim2.trackWindow.Height / 2);
                        int s = x + y * 640;
                        textBox4.Text = x.ToString();
                        textBox5.Text = y.ToString();
                        aim2.spacePoint = sensor.CoordinateMapper.MapDepthPointToSkeletonPoint(sensor.DepthStream.Format, depthPoints[s]);
                        textBox6.Text = aim2.spacePoint.Z.ToString();
                        textBox15.Text = aim2.spacePoint.X.ToString();
                        textBox16.Text = aim2.spacePoint.Y.ToString(); 
                    }
                }
                #endregion aim2目标跟踪
                #region aim3目标跟踪
                if (aim3.isTrack)
                {
                    if (aim3.firstImage)
                    {

                        aim3.img_hsv = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 3);
                        aim3.img_hue = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim3.img_mask = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim3.img_backproject = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim3.hist = new DenseHistogram(64, new RangeF(0, 180));
                        aim3.firstImage = false;
                    }
                    CvInvoke.cvCvtColor(colorImage, aim3.img_hsv, COLOR_CONVERSION.CV_RGB2HSV);
                    aim3.vmin = slider7.Value;
                    aim3.vmax = slider8.Value;
                    aim3.smin = slider9.Value;
                    CvInvoke.cvInRangeS(aim3.img_hsv, new MCvScalar(0, aim3.smin, Math.Min(aim3.vmin, aim3.vmax), 0), new MCvScalar(180, 256, Math.Max(aim3.vmin, aim3.vmax), 0), aim3.img_mask);
                    CvInvoke.cvSplit(aim3.img_hsv, aim3.img_hue, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
                    if (aim3.trackObject == -1)
                    {

                        float max_val = 0.0F;
                        float min_val = 0.0F;
                        aim3.selection.X /= 4;
                        aim3.selection.Y /= 4;
                        aim3.selection.Width /= 4;
                        aim3.selection.Height /= 4;
                        CvInvoke.cvSetImageROI(aim3.img_hue, aim3.selection);
                        CvInvoke.cvSetImageROI(aim3.img_mask, aim3.selection);
                        CvInvoke.cvCalcHist(new IntPtr[] { aim3.img_hue }, aim3.hist, false, aim3.img_mask);

                        CvInvoke.cvGetMinMaxHistValue(aim3.hist, ref min_val, ref max_val, new int[100], new int[100]);
                        CvInvoke.cvResetImageROI(aim3.img_hue);
                        CvInvoke.cvResetImageROI(aim3.img_mask);
                        aim3.trackWindow = aim3.selection;
                        aim3.trackObject = 1;
                    }
                    CvInvoke.cvCalcBackProject(new IntPtr[] { aim3.img_hue }, aim3.img_backproject, aim3.hist);
                    CvInvoke.cvAnd(aim3.img_backproject, aim3.img_mask, aim3.img_backproject, IntPtr.Zero);
                    CvInvoke.cvCamShift(aim3.img_backproject, aim3.trackWindow, new MCvTermCriteria(20, 10), out aim3.trackComp, out aim3.trackBox);
                    //CvInvoke.cvShowImage("3.jpg", aim3.img_backproject);
                    aim3.trackWindow = aim3.trackComp.rect;
                    aim3.trackBox.angle = aim3.trackBox.angle + 90;
                    //CvInvoke.cvEllipseBox(colorImage, aim3.trackBox, new MCvScalar(255, 0, 255), 3, LINE_TYPE.CV_AA, 0);

                    if (depthFrame != null)
                    {
                        int x = (int)(aim3.trackWindow.X + aim3.trackWindow.Width / 2);
                        int y = (int)(aim3.trackWindow.Y + aim3.trackWindow.Height / 2);
                        int s = x + y * 640; 
                        textBox7.Text = x.ToString();
                        textBox8.Text = y.ToString();
                        aim3.spacePoint = sensor.CoordinateMapper.MapDepthPointToSkeletonPoint(sensor.DepthStream.Format, depthPoints[s]);
                        textBox9.Text = aim3.spacePoint.Z.ToString();
                        textBox17.Text = aim3.spacePoint.X.ToString();
                        textBox18.Text = aim3.spacePoint.Y.ToString(); 
                    }
                }
                #endregion aim3目标跟踪
                #region aim4目标跟踪
                if (aim4.isTrack)
                {
                    textBox1.Text = "hah";
                    if (aim4.firstImage)
                    {

                        aim4.img_hsv = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 3);
                        aim4.img_hue = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim4.img_mask = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim4.img_backproject = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(colorImage), IPL_DEPTH.IPL_DEPTH_8U, 1);
                        aim4.hist = new DenseHistogram(64, new RangeF(0, 180));
                        aim4.firstImage = false;
                    }
                    CvInvoke.cvCvtColor(colorImage, aim4.img_hsv, COLOR_CONVERSION.CV_RGB2HSV);
                    aim4.vmin = slider10.Value;
                    aim4.vmax = slider11.Value;
                    aim4.smin = slider12.Value;
                    CvInvoke.cvInRangeS(aim4.img_hsv, new MCvScalar(0, aim4.smin, Math.Min(aim4.vmin, aim4.vmax), 0), new MCvScalar(180, 256, Math.Max(aim4.vmin, aim4.vmax), 0), aim4.img_mask);
                    CvInvoke.cvSplit(aim4.img_hsv, aim4.img_hue, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero);
                    if (aim4.trackObject == -1)
                    {

                        float max_val = 0.0F;
                        float min_val = 0.0F;
                        aim4.selection.X /= 4;
                        aim4.selection.Y /= 4;
                        aim4.selection.Width /= 4;
                        aim4.selection.Height /= 4;
                        CvInvoke.cvSetImageROI(aim4.img_hue, aim4.selection);
                        CvInvoke.cvSetImageROI(aim4.img_mask, aim4.selection);
                        CvInvoke.cvCalcHist(new IntPtr[] { aim4.img_hue }, aim4.hist, false, aim4.img_mask);

                        CvInvoke.cvGetMinMaxHistValue(aim4.hist, ref min_val, ref max_val, new int[100], new int[100]);
                        CvInvoke.cvResetImageROI(aim4.img_hue);
                        CvInvoke.cvResetImageROI(aim4.img_mask);
                        aim4.trackWindow = aim4.selection;
                        aim4.trackObject = 1;
                    }
                    CvInvoke.cvCalcBackProject(new IntPtr[] { aim4.img_hue }, aim4.img_backproject, aim4.hist);
                    CvInvoke.cvAnd(aim4.img_backproject, aim4.img_mask, aim4.img_backproject, IntPtr.Zero);
                    CvInvoke.cvCamShift(aim4.img_backproject, aim4.trackWindow, new MCvTermCriteria(20, 10), out aim4.trackComp, out aim4.trackBox);
                    //CvInvoke.cvShowImage("4.jpg", aim4.img_backproject);
                    aim4.trackWindow = aim4.trackComp.rect;
                    aim4.trackBox.angle = aim4.trackBox.angle + 90;
                    //CvInvoke.cvEllipseBox(colorImage, aim4.trackBox, new MCvScalar(255, 0, 255), 3, LINE_TYPE.CV_AA, 0);

                    if (depthFrame != null)
                    {
                        
                        int x = (int)(aim4.trackWindow.X + aim4.trackWindow.Width / 2);
                        int y = (int)(aim4.trackWindow.Y + aim4.trackWindow.Height / 2);
                        int s = x + y * 640;
                        textBox10.Text = x.ToString();
                        textBox11.Text = y.ToString();
                        aim4.spacePoint = sensor.CoordinateMapper.MapDepthPointToSkeletonPoint(sensor.DepthStream.Format, depthPoints[s]);
                        textBox12.Text = aim4.spacePoint.Z.ToString();
                        textBox19.Text = aim4.spacePoint.X.ToString();
                        textBox20.Text = aim4.spacePoint.Y.ToString(); 
                    }
                }
                #endregion 目标跟踪
                if (aim1.isTrack)
                {
                    CvInvoke.cvEllipseBox(colorImage, aim1.trackBox, new MCvScalar(255, 0, 255), 3, LINE_TYPE.CV_AA, 0);
                }
                if (aim2.isTrack)
                {
                    CvInvoke.cvEllipseBox(colorImage, aim2.trackBox, new MCvScalar(255, 0, 0), 3, LINE_TYPE.CV_AA, 0);
                    //textBox21.Text = ((aim2.spacePoint.Z - aim1.spacePoint.Z) / (aim2.spacePoint.X - aim1.spacePoint.X)).ToString();
                    //textBox22.Text = (aim2.spacePoint.Z - aim1.spacePoint.Z).ToString();
                    //textBox23.Text = (aim2.spacePoint.X - aim1.spacePoint.X).ToString();
                }
                if (aim3.isTrack)
                {
                    CvInvoke.cvEllipseBox(colorImage, aim3.trackBox, new MCvScalar(255, 255, 0), 3, LINE_TYPE.CV_AA, 0);
                }
                if (aim4.isTrack)
                {
                    CvInvoke.cvEllipseBox(colorImage, aim4.trackBox, new MCvScalar(55, 155, 255), 3, LINE_TYPE.CV_AA, 0);
                }
                
               
                tmp = colorImage.ToBitmap().GetHbitmap();        //tmp指针指向colorImage转换成的bitmap
                bitmapSource = Imaging.CreateBitmapSourceFromHBitmap(tmp,
                      IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions());          //将tmp指针指向的bitmap生成bitmapSource用于显示
                image1.Source = bitmapSource;
                DeleteObject(tmp);          //释放申请的内存
                #endregion 显示彩色图像
            }
        }
        void kinectSensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
        {
            depthFrame = e.OpenDepthImageFrame();
            if (depthFrame != null)
            {
                #region 显示深度图像
                depthFrame.CopyPixelDataTo(depthPixelData);
                depthBitMap.WritePixels(depthImageBitmapRect, depthPixelData, depthImageStride, 0);     //将捕捉到的像素数据写到depthbitmap中
                image2.Source = depthBitMap;            //显示
                depthFrame.CopyDepthImagePixelDataTo(depthImagePixel);
                sensor.CoordinateMapper.MapColorFrameToDepthFrame(sensor.ColorStream.Format, sensor.DepthStream.Format, depthImagePixel, depthPoints);
                #endregion 显示深度图像
            }
        }
        private void ColorImageMouseDown(object sender, MouseButtonEventArgs e)
        {
            System.Windows.Point p = e.GetPosition(image1);
            if (aim1.isCutMask)
            {
                aim1.selection.X =scale* (int)p.X;
                aim1.selection.Y =scale* (int)p.Y;
            }
            if (aim2.isCutMask)
            {
                aim2.selection.X = scale * (int)p.X;
                aim2.selection.Y = scale * (int)p.Y;
            }
            if (aim3.isCutMask)
            {
                aim3.selection.X = scale * (int)p.X;
                aim3.selection.Y = scale * (int)p.Y;
            }
            if (aim4.isCutMask)
            {
                aim4.selection.X = scale * (int)p.X;
                aim4.selection.Y = scale * (int)p.Y;
            }
            
        }
        private void ColorImageMouseUp(object sender, MouseButtonEventArgs e)
        {
            aim1.isCutMask = false;
            aim2.isCutMask = false;
            aim3.isCutMask = false;
            aim4.isCutMask = false;
        }
        private void ColorImageMouseMove(object sender, MouseEventArgs e)
        {
            if (Mouse.LeftButton == MouseButtonState.Pressed)
            {
                System.Windows.Point p = e.GetPosition(image1);
                if (aim1.isCutMask && aim1.selection.X > 0)
                {
                    aim1.selection.Width = scale * (int)p.X - aim1.selection.X;
                    aim1.selection.Height = scale * (int)p.Y - aim1.selection.Y;
                }
                if (aim2.isCutMask && aim2.selection.X > 0)
                {
                    aim2.selection.Width = scale * (int)p.X - aim2.selection.X;
                    aim2.selection.Height = scale * (int)p.Y - aim2.selection.Y;
                }
                if (aim3.isCutMask && aim3.selection.X > 0)
                {
                    aim3.selection.Width = scale * (int)p.X - aim3.selection.X;
                    aim3.selection.Height = scale * (int)p.Y - aim3.selection.Y;
                }
                if (aim4.isCutMask && aim4.selection.X > 0)
                {
                    aim4.selection.Width = scale * (int)p.X - aim4.selection.X;
                    aim4.selection.Height = scale * (int)p.Y - aim4.selection.Y;
                }
            }
        }
        private void CutMaskButton1_Click(object sender, RoutedEventArgs e)
        {
            aim1.isCutMask = true;
            aim1.selection.X = -1;
            aim1.selection.Width = -1;
            textBox1.Text = null;
            textBox2.Text = null;
            textBox3.Text = null;
            textBox13.Text = null;
            textBox14.Text = null;
            if (aim1.isTrack)
            {
                aim1.isTrack = false;
            }
        }       
        private void AimTrackButton1_Click(object sender, RoutedEventArgs e)
        {
            aim1.isTrack = true;
            aim1.firstImage = true;
            aim1.trackObject = -1;
        }
        private void CutMaskButton2_Click(object sender, RoutedEventArgs e)
        {
            aim2.isCutMask = true;
            aim2.selection.X = -1;
            aim2.selection.Width = -1;
            textBox4.Text = null;
            textBox5.Text = null;
            textBox6.Text = null;
            textBox15.Text = null;
            textBox16.Text = null;
            if (aim2.isTrack)
            {
                aim2.isTrack = false;
            }
        }
        private void AimTrackButton2_Click(object sender, RoutedEventArgs e)
        {
            aim2.isTrack = true;
            aim2.firstImage = true;
            aim2.trackObject = -1;
        }
        private void CutMaskButton3_Click(object sender, RoutedEventArgs e)
        {
            aim3.isCutMask = true;
            aim3.selection.X = -1;
            aim3.selection.Width = -1;
            textBox7.Text = null;
            textBox8.Text = null;
            textBox9.Text = null;
            textBox17.Text = null;
            textBox18.Text = null;
            if (aim3.isTrack)
            {
                aim3.isTrack = false;
            }
        }      
        private void AimTrackButton3_Click(object sender, RoutedEventArgs e)
        {
            aim3.isTrack = true;
            aim3.firstImage = true;
            aim3.trackObject = -1;
        }       
        private void CutMaskButton4_Click(object sender, RoutedEventArgs e)
        {
            aim4.isCutMask = true;
            aim4.selection.X = -1;
            aim4.selection.Width = -1;
            textBox10.Text = null;
            textBox11.Text = null;
            textBox12.Text = null;
            textBox19.Text = null;
            textBox20.Text = null;
            if (aim4.isTrack)
            {
                aim4.isTrack = false;
            }
        }
        private void AimTrackButton4_Click(object sender, RoutedEventArgs e)
        {
            
            aim4.isTrack = true;
            aim4.firstImage = true;
            aim4.trackObject = -1;
        }
        private void button1_Click(object sender, RoutedEventArgs e)
        {
            String serialPort = comboBoxPort.Text;
            String baudRate = comboBoxBaud.Text;
            if (sp != null)
            {
                if (sp.IsOpen)
                {
                    sp.Close();
                }
            }
            try
            {
                sp = new SerialPort(serialPort);
                sp.Open();
                sp.BaudRate = int.Parse(baudRate);
                portStateColor.Background = new SolidColorBrush(System.Windows.Media.Color.FromRgb(0, 255, 0));
                portStateText.Content = "打开串口成功";
                buttonSendOrder.IsEnabled = true;
            }
            catch (Exception)
            {
                portStateColor.Background = new SolidColorBrush(System.Windows.Media.Color.FromRgb(255, 0, 0));
                portStateText.Content = "打开串口失败";
                buttonSendOrder.IsEnabled = false;
                textBlockReceive.Text = "";
            }  
        }
        private void button2_Click(object sender, RoutedEventArgs e)
        {
            if (sp.IsOpen)
            {
                String sendText = textBoxSend.Text;
                sp.WriteLine(sendText);
                textBlockReceive.Text += sp.ReadLine();
            }
        }
        private void MainWindowClosing(object sender, System.ComponentModel.CancelEventArgs e)
        {
            if (sp != null)
            {
                if (sp.IsOpen)
                {
                    sp.Close();
                }
            }
        }

        private void DepthImageMouseLeftButtonDown(object sender, MouseButtonEventArgs e)
        {
            System.Windows.Point p = e.GetPosition(image2);
            Int32 pixelIndex = (Int32)(p.X + ((Int32)p.Y * depthFrame.Width));
            Int32 depth = depthPixelData[pixelIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth;
            Int32 depthInches = (Int32)(depth * 0.0393700787);
            Int32 depthFt = depthInches / 12;
            depthInches = depthInches % 12;
            textBox6.Text = String.Format("{0}mm~{1}'{2}", depth, depthFt, depthInches);
        }

        private void calculateButton_Click(object sender, RoutedEventArgs e)
        {
            int i = comboBoxAimNumber1.SelectedIndex + 1;
            int j = comboBoxAimNumber2.SelectedIndex + 1;
            SkeletonPoint p1 =new SkeletonPoint();
            SkeletonPoint p2 = new SkeletonPoint();
            if (i > 0 && j > 0)
            {
                int c = (i-1) * 4 + j;
                switch (c) {
                    case 1 : p1 = aim1.spacePoint; p2 = aim1.spacePoint;break;
                    case 2 : p1 = aim1.spacePoint; p2 = aim2.spacePoint; break;
                    case 3: p1 = aim1.spacePoint; p2 = aim3.spacePoint; break;
                    case 4: p1 = aim1.spacePoint; p2 = aim4.spacePoint; break;
                    case 5: p1 = aim2.spacePoint; p2 = aim1.spacePoint; break;
                    case 6: p1 = aim2.spacePoint; p2 = aim2.spacePoint; break;
                    case 7: p1 = aim2.spacePoint; p2 = aim3.spacePoint; break;
                    case 8: p1 = aim2.spacePoint; p2 = aim4.spacePoint; break;
                    case 9: p1 = aim3.spacePoint; p2 = aim1.spacePoint; break;
                    case 10: p1 = aim3.spacePoint; p2 = aim2.spacePoint; break;
                    case 11: p1 = aim3.spacePoint; p2 = aim3.spacePoint; break;
                    case 12: p1 = aim3.spacePoint; p2 = aim4.spacePoint; break;
                    case 13: p1 = aim4.spacePoint; p2 = aim1.spacePoint; break;
                    case 14: p1 = aim4.spacePoint; p2 = aim2.spacePoint; break;
                    case 15: p1 = aim4.spacePoint; p2 = aim3.spacePoint; break;
                    case 16: p1 = aim4.spacePoint; p2 = aim4.spacePoint; break;
                }
                if (p1.Z !=0 && p2.Z != 0)
                {
                    textBox21.Text = (p1.Z - p2.Z).ToString(); 
                    textBox22.Text = (p1.X - p2.X).ToString();
                    if(p1.X ==p2.X ||p1.Z ==p2.Z)
                    {
                        textBox23.Text = "同一物体";
                    }
                    else
                    {
                        //textBox23.Text = (Math.Atan((p1.Z - p2.Z) / (p1.X - p2.X)) * 180 / Math.PI).ToString();
                        if (p1.X - p2.X < 0 && p1.Z - p2.Z < 0)
                        {
                            textBox23.Text = (Math.Atan((p1.Z - p2.Z) / (p1.X - p2.X)) * 180 / Math.PI).ToString();
                        }
                        else if (p1.X - p2.X > 0 && p1.Z - p2.Z < 0)
                        {
                            textBox23.Text = (180+Math.Atan((p1.Z - p2.Z) / (p1.X - p2.X)) * 180 / Math.PI).ToString();
                        }
                        else if (p1.X - p2.X < 0 && p1.Z - p2.Z > 0)
                        {
                            textBox23.Text = (Math.Atan((p1.Z - p2.Z) / (p1.X - p2.X)) * 180 / Math.PI).ToString();
                        }
                        else if (p1.X - p2.X > 0 && p1.Z - p2.Z > 0)
                        {
                            textBox23.Text = (Math.Atan((p1.Z - p2.Z) / (p1.X - p2.X)) * 180 / Math.PI-180).ToString();
                        }
                    }
                }
            }
        }
        
    }
}
