using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.Diagnostics;
using System.Runtime.Remoting;
using Blimpbots.Recognition.Algorithms;
using System.Threading;

using System.Runtime.Remoting.Channels;
using System.Runtime.Remoting.Channels.Tcp;
using System.Runtime.Remoting.Services;

using surfInterop;

namespace Blimpbots.Recognition
{
    public partial class Monitor : Form
    {
        const double GoodFrac = 0.5;

        // namespace Remote is located in Remoting.DLL
        private Remoting.cTransfer mi_Transfer = null;

        private string _camIP;
        private CameraProperties _cProps;
        ICamera c;

        RecognitionModel[] models = null;

        bool run = true;
        bool renderOnce = false;

        Thread procThread, renderThread;
        Mutex invokeMutex = new Mutex();
        string strTiming;
        bool _transmitResults = false;

        bool knownZ = true;
        double knownZ_val = 3000;
        double knownZ_factor = 0;

        string workingPath = "";

        Generative G = new Generative(3, 10.0F);

        int TotalDetections = 0;
        int TotalFrames = 0;

        private bool TryGetCameraProperties(string camera_ip, out CameraProperties cProps)
        {
            string camFilename = camera_ip.Replace('.', '-').Trim() + ".cam";
            camFilename = workingPath + camFilename;
            cProps = new CameraProperties();

            bool readSuccess = false;
            if (System.IO.File.Exists(camFilename))
            {
                System.IO.FileStream fs = new System.IO.FileStream(camFilename, System.IO.FileMode.Open);
                try { cProps = CameraProperties.ReadFrom(fs); readSuccess = true; }
                catch { readSuccess = false; }
            }

            if (!readSuccess)
            {
                cProps.intrinsicMatrix = new double[3][]{
                new double[3] { 511.358,  0, 305.259},
                new double[3] { 0,  482.437, 272.629},
                new double[3] { 0,        0,       1}};

                cProps.rotationMatrix = new double[3][]{
                new double[3] { 1,  0, 0 },
                new double[3] { 0,  1, 0 },
                new double[3] { 0,  0, 1 }};

                cProps.translationVector = new double[3] { 0, 0, 0 };

                return false;
            }

            return true;
         
        }

        public Monitor(string camera_ip)
        {
            _camIP = camera_ip;

            System.IO.DirectoryInfo di
                = new System.IO.DirectoryInfo(AppDomain.CurrentDomain.BaseDirectory + "..\\..\\..\\");
    
            workingPath = di.FullName;

            motion = new MotionDetector(320, 240);
            InitializeComponent();
        }

        //private int gone;

        private void SendResults(Remoting.blimpData[] data, DateTime t)
        {
            Remoting.kAction k_Action = new Remoting.kAction();

            k_Action.blimps = data;
            k_Action.t = t;
            k_Action.s_Camera = c.Name;
            k_Action.s_Command = "Update";

            string s_URL = "tcp://127.0.0.1:12345/TestService";

            try
            {
                mi_Transfer = (Remoting.cTransfer)Activator.GetObject(
                    typeof(Remoting.cTransfer), s_URL);
                // triggers the event mi_Transfer.ev_SlaveCall in the Slave
                Remoting.kResponse k_Response = mi_Transfer.CallSlave(k_Action);
                //TODO: remote invoke.
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                /* fails silently */
            }
        }

        //List<InterestWindow> objectWindows = new List<InterestWindow>();

        private void ProcessFrame(Bitmap data, DateTime frameTs)
        {
            Rectangle[] wholeScene = new Rectangle[] { new Rectangle(0, 0, data.Width, data.Height) };
            ProcessFrame(data, wholeScene, frameTs);
        }

        Stopwatch surfWatch = new Stopwatch(),
          bkgWatch = new Stopwatch(),
          ransacWatch = new Stopwatch(),
          clusterWatch = new Stopwatch(),
          matchWatch = new Stopwatch(),
          totalProcess = new Stopwatch();

        private List<LikelyObject> lastDetected = new List<LikelyObject>();

        private const int KeyframePeriod = 30; // seconds between keyframes
        private DateTime LastKeyframe = DateTime.Now;

        private void ProcessFrame(Bitmap data, Rectangle[] foregroundRegion, DateTime frameTs)
        {
            #region Reset all stopwatches
            surfWatch.Reset();
            bkgWatch.Reset();
            ransacWatch.Reset();
            clusterWatch.Reset();
            matchWatch.Reset();
            totalProcess.Reset();
            #endregion

            Stopwatch slice = new Stopwatch();

            totalProcess.Start();
            slice.Start();

            List<SurfKeypoint> dataKeypointList = new List<SurfKeypoint>(500);

            LinkedList<Rectangle> surfRegions = new LinkedList<Rectangle>(foregroundRegion);

            foreach (RecognitionModel m in models)
            {
                if (m.LastMatch.Example != null)
                {
                    RectangleF r = m.LastMatch.RubberBand();
                    r.Inflate(30f, 30f);
                    surfRegions.AddLast(new 
                        Rectangle((int)r.X, (int)r.Y, (int)r.Width, (int)r.Height));
                }
            }

            TotalFrames = TotalFrames + 1;

            if (((TimeSpan)DateTime.Now.Subtract(LastKeyframe)).Seconds >= KeyframePeriod)
            {
                LastKeyframe = DateTime.Now;
                foregroundRegion = new Rectangle[] { new Rectangle(0, 0, data.Width, data.Height) };
            }
            else
                foregroundRegion = Euclidian.Rectangles.Merge(surfRegions);

            #region Render Regions of Interest
            Bitmap src = (Bitmap)data.Clone();
            using (Graphics gr = Graphics.FromImage(data))
            {
                Brush b = new SolidBrush(Color.FromArgb(55, Color.Black));

                gr.FillRectangle(b, new Rectangle(0, 0, data.Width, data.Height));

                foreach (Rectangle r in Euclidian.Rectangles.Merge(surfRegions))
                {
                    Rectangle rect = new Rectangle(r.Left, r.Top, r.Width - 1, r.Height - 1);
                    gr.DrawImage(src, rect, rect, GraphicsUnit.Pixel); 
                }
            }
            #endregion


            foreach (Rectangle r in foregroundRegion)
            {
                surfWatch.Start();
                SurfKeypoint[] kpts = SurfLocal.Instance.FindKeypoints(data, r, 3.5);
                surfWatch.Stop();

                bkgWatch.Start();
                // Compare this frame to the estimated background keypoints
                G.ObserveRegion(kpts, r);
                bkgWatch.Stop();

                surfWatch.Start();

                #region If there are *many* foreground points, process only a subset
                //ShuffledList<int> allKpts = new ShuffledList<int>(G.ForegroundPrediction);
                //if (allKpts.Count > 200)
                //{
                //    allKpts.Shuffle(1);
                //    allKpts.RemoveRange(200, allKpts.Count - 200);
                //    kpts = SurfLocal.Instance.DescribeKeypoints(allKpts.ToArray());
                //}
                //else
                //{
                //    kpts = SurfLocal.Instance.DescribeKeypoints(allKpts.ToArray());
                //}
                #endregion

                // Find the descriptors for only the foreground keypoints
                kpts = SurfLocal.Instance.DescribeKeypoints(G.ForegroundPrediction);

                surfWatch.Stop();

                dataKeypointList.AddRange(kpts);
            }

            SurfKeypoint[] dataKeypoints = dataKeypointList.ToArray();

            // Prepare visualization surface
            Graphics g = Graphics.FromImage(data);
            g.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;

            Pen greenPen = new Pen(Color.FromArgb(128, Color.GreenYellow), 3.0F);
            Pen bluePen = new Pen(Color.FromArgb(128, Color.SkyBlue), 3.0F);

            //Visualization.DrawKeypoints(dataKeypoints, g);

            List<LikelyObject> detectedObjects = new List<LikelyObject>();

            //Visualization.DrawKeypoints(dataKeypoints, g);

            foreach (RecognitionModel mdl in models)
            {
                LikelyObject? o = DetectObject(dataKeypoints, mdl, g);

                if (o.HasValue)
                {
                    if (o.Value.WarpModel.Inliers.Count < 6) continue;

                    detectedObjects.Add(o.Value);
                    mdl.LastMatch = o.Value;
                }
            }

            string strObjects = "";

            List<Rectangle> objectWindows = new List<Rectangle>();


            Remoting.blimpData[] blimpData = new Remoting.blimpData[detectedObjects.Count];
            for (int i = 0; i < detectedObjects.Count; i++)
            {
                LikelyObject o = detectedObjects[i];
                // Heuristic estimate
                blimpData[i].confidence = 1.0 - (1.0 / o.WarpModel.Inliers.Count);

                RectangleF r = o.RubberBand();
                r.Inflate(10f, 10f);

                // Add so the background detector knows to ignore this object
                objectWindows.Add(new Rectangle((int)r.X, (int)r.Y, (int)r.Width, (int)r.Height));

                //List<SurfKeypoint> nearKpts = o.WarpModel.FilterKeypoints(dataKeypoints, 5.0f);
                //Matching m = new Matching(o.Example.ModelKeypoints);
                //List<KeypointMatch> nearMatches = m.Match1(nearKpts, 0.80);

                blimpData[i].angleGood = false;

                NamedPoint center, front;
                if (o.Example.NamedPoints.TryGetValue("center", out center) &&
                    o.Example.NamedPoints.TryGetValue("front", out front))
                {

                    EstimatedPoint[] estPts = new EstimatedPoint[2];

                    Point2D ctrPt = o.Center;

                    estPts[0] = new EstimatedPoint("center", default(Point3D), ctrPt, 0, 0);

                    double ang = Math.Atan2(front.Point.Y - center.Point.Y,
                                            front.Point.X - center.Point.X);

                    Point2D mFt = new Point2D((double)front.Point.X, (double)front.Point.Y);
                    Point2D mCtr = new Point2D((double)center.Point.X, (double)center.Point.Y);

                    double dist = mFt.Distance(mCtr);

                    double rawAng = o.Model.SmoothAngle;

                    if ( o.WarpModel.Inliers.Count > 3)
                    {
                        SimpleAngleMedian sam1 = new SimpleAngleMedian();

                        foreach (KeypointMatch kpm in o.WarpModel.Inliers)
                        {
                            sam1.Update(kpm.beta);
                        }
                        if (Math.Abs(Angles.Difference(sam1.Angle(), o.Model.SmoothAngle)) < 3.0)
                        {
                            blimpData[i].angleGood = true;
                            o.Model.UpdateSmoothAngle(Angles.Normalize(sam1.Angle() + ang), 4.5);
                        }

                        double a = sam1.Angle();

                        o.Model.UpdateSmoothAngle(Angles.Normalize(ang - a), 1.5);

                        rawAng = ang - a;
                    }

                    double smAngle = 0;
                    //smAngle = o.Model.SmoothAngle;

                    smAngle = rawAng;

                    Point2D ftPt = new Point2D(ctrPt.x + dist * o.WarpModel.Scale * Math.Cos(smAngle),
                                               ctrPt.y + dist * o.WarpModel.Scale * Math.Sin(smAngle));

                    estPts[1] = new EstimatedPoint("front", default(Point3D), ftPt, 0, 0);

                    Pen p = new Pen(Color.Aquamarine, 3.0f);
                    g.DrawLine(p, ctrPt.ToPointF(), ftPt.ToPointF());

                    // Infer distance of center
                    double smDepth = o.Model.UpdateSmoothDepth(o.DepthEstimate, o.WarpModel.ScaleDeviation);

                    // Infer the 2D positions
                    //EstimatedPoint[] estPts = o.EstimatePoints2D();

                    // Upgrade to 3D positions
                    estPts = o.EstimatePoints3D(estPts, c, smDepth / o.DepthEstimate);

                    blimpData[i].estPts = estPts;
                    blimpData[i].name = o.Model.ModelName;

                    for (int j = 0; j < estPts.Length; j++)
                    {
                        strObjects += string.Format("\n{0} ({1:f2},{2:f2},{3:f2})",
                            estPts[j].Name,
                            estPts[j].Position.X / 1000,
                            estPts[j].Position.Y / 1000,
                            estPts[j].Position.Z / 1000);
                    }
                }

                // Visualize the Object
                //PointF[] shape = o.Example.ProjectSquare(o.WarpModel);
                //Visualization.DrawShape(shape, g, bluePen);

                Visualization.DrawTagBox(o.Model.ModelName, o.Center.ToPointF(), g);
            }
            
            JpegSave.SaveJpeg( string.Format("./video/fr{0}.jpg", c.FrameNumber)
                              , data, 92 );

            // Send results back home
            if (_transmitResults) SendResults(blimpData, frameTs);
 
            if (detectedObjects.Count > 0) TotalDetections++;


            bkgWatch.Start();
            motion.ObserveObjects(objectWindows.ToArray()); // motion detector
            G.ObserveObjects(detectedObjects);  // bkg keypoint detector


            // Pass this along to the UI module that 
            //   wants to reflect on the accuracy of the detections
            //   and update the example parameters as needed.

            lock (lastDetected)
            {
                lastDetected = detectedObjects;
            }

            bkgWatch.Stop();


            totalProcess.Stop();

            strTiming = string.Format("Regions: {4}\nBackground: {5} ms\nSurf: {0} ms\nMatch: {1} ms\nCluster: {2} ms\nRansac: {3} ms\nSlice: {6} ms",
                surfWatch.ElapsedMilliseconds, matchWatch.ElapsedMilliseconds,
                clusterWatch.ElapsedMilliseconds, ransacWatch.ElapsedMilliseconds, foregroundRegion.Length, bkgWatch.ElapsedMilliseconds, slice.ElapsedMilliseconds);

            strTiming += string.Format("\nDetections: {0} ({1:f2}%)", TotalDetections, 100 * ((double)TotalDetections / TotalFrames));

            strTiming += strObjects; 

            //G.DrawBackground(g);

            //lastRunLabel.Text = string.Format("Image load: {0} ms\nTo grayscale:{1} ms\nSURF:{2} ms\nMatching:{3} ms\nClustering:{4} ms\nFindAffine: {5} ms", imgLoad, grayConvert, surf, match, clustering, hEstimation );

            //Bitmap b = new Bitmap(model.Width + data.Width, Math.Max(model.Height, data.Height));
            //Graphics g = Graphics.FromImage(b);
            //g.DrawImageUnscaled(model, 0, 0);
            //g.DrawImage(data, model.Width, 0, data.Width, data.Height);
            //Visualization.DrawMatches(matches, modelKeypoints, dataKeypoints, g, (float)model.Width);
            //Visualization.DrawKeypoints(modelKeypoints, g);

        }

        private AffineModel TryFitModel(List<KeypointMatch> matches, CentroidEstimate ce)
        {
            if (matches.Count <= 6) return null;

            double goodScore = (matches.Count > 25) ?
                1.0 / (25 * 0.65) : 1.0 / (matches.Count * 0.65);

            double errThresh = ce.Scale * 10.0; // Pixel error estimate

            // TODO: expected number of needed runs
            ransacWatch.Start();

            //long count = (long)(RansacAffine.Choose(matches.Count, 3) * (1.0 - GoodFrac));
            //if (count > 100) count = 100;

            // Groups of 3 seem to be the best here
            AffineModel m = RansacAffine.Run(matches, 3, goodScore, errThresh, 30);
            ransacWatch.Stop();

            return m;
        }

        private Nullable<LikelyObject> DetectObject(SurfKeypoint[] dataKeypoints, RecognitionModel mdl, Graphics g)
        {
            List<LikelyObject> choices = new List<LikelyObject>();

            // First check, see if the last model still works)
            //if (mdl.LastMatch.Example != null)
            //{
            //    SurfKeypoint[] inside = mdl.LastMatch.FilterKeypoints(dataKeypoints);
            //    Matching oldMatch = new Matching(mdl.LastMatch.Example.ModelKeypoints);
            //    List<KeypointMatch> windowMatches = oldMatch.Match(inside, 0.8);
            //    double errThresh = 15.0 * mdl.LastMatch.AffineModel.Scale;
            //    AffineModel m = RansacAffine.TryRefit(windowMatches, mdl.LastMatch.AffineModel.Transform, errThresh);
            //    if (m != null)
            //    {
            //        // HACK: Refit sometimes returns crazy results
            //        if(m.Transform.Skew < 5.0)
            //            return new LikelyObject(mdl.LastMatch.Example, mdl, m);
            //    }
            //}

            // Quick lookup to mark a keypoint's match status
            Matching.MatchStatus[][] matStatus = new Matching.MatchStatus[mdl.TotalExampleCount][];
            Matching.MatchStatus[][] matStatusData = new Matching.MatchStatus[mdl.TotalExampleCount][];
            for (int i = 0; i < mdl.TotalExampleCount; i++)
            {
                matStatus[i] = new Matching.MatchStatus[mdl.GetExample(i).ModelKeypoints.Count];
                matStatusData[i] = new Matching.MatchStatus[dataKeypoints.Length];
            }

            List<KeypointMatch>[] matches = new List<KeypointMatch>[mdl.TotalExampleCount];
            for (int i = 0; i < mdl.TotalExampleCount; i++)
                matches[i] = new List<KeypointMatch>(20);
            

            for (int i = 0; i < mdl.TotalExampleCount; i++)
            {
                RecognitionExample eg = mdl.GetExample(i);

                matchWatch.Start();

                Matching matcher = new Matching(eg.ModelKeypoints);
                //Tell the matcher to take advantage of the correspondences
                matcher.SetMatchStatus(matStatus[i], matStatusData[i]);
                List<KeypointMatch> kpm = matcher.Match(dataKeypoints, 0.82);
                matchWatch.Stop();

                // Add these matches to this example
                matches[i].AddRange(kpm);

                // Cluster results
                clusterWatch.Start();
                Clustering clusterer = new Clustering(eg.ModelKeypoints, (int)eg.Width, (int)eg.Height, 10);
                List<Cluster> clusters = clusterer.Cluster(ref dataKeypoints, matches[i]);

                if (clusters == null) continue;

                clusterWatch.Stop();

                // Detect object
                WarpModel wM = FindWarpTransform(clusters, g); 
                //AffineModel aM = FindTransform(clusters);

                if (wM != null)
                {
                    // TODO: some quality checking?
                    LikelyObject ob = new LikelyObject(eg, mdl, wM);
                    choices.Add(ob);
                }

                // Mark the keypoints in other examples

                foreach(KeypointMatch kM in kpm)
                {
                    matStatus[i][kM.modelIdx] = Matching.MatchStatus.Matched;
                    matStatusData[i][kM.dataIdx] = Matching.MatchStatus.Matched;
                
                    // Assume that if we did find the keypoint here,
                    // it also matches against other examples with this keypoint
                    // Further, assume the data keypoint has been uniquely assigned

                    foreach(CorrespondingKeypoint cK in eg.Correspondences[kM.modelIdx])
                    {
                        matStatus[cK.Example][cK.Idx] = Matching.MatchStatus.Matched;
                        matStatusData[cK.Example][kM.dataIdx] = Matching.MatchStatus.Matched;

                        IList<SurfKeypoint> kpts = mdl.GetExample(cK.Example).ModelKeypoints;

                        // Find the coordinate of this kpt in the other example
                        Point2D mdlCoord = new Point2D(kpts[cK.Idx].x, kpts[cK.Idx].y);

                        // was data - model(old), change to data - model(new)
                        double newBeta = kM.beta + eg.ModelKeypoints[kM.modelIdx].orientation
                              - mdl.GetExample(cK.Example).ModelKeypoints[cK.Idx].orientation;

                        // double scaleRatio
                        matches[cK.Example].Add(new KeypointMatch(cK.Idx, kM.dataIdx,
                            new Point2D(kpts[cK.Idx].x, kpts[cK.Idx].y),
                            kM.dataCoord, kM.distance, kM.ratio, 
                            kpts[cK.Idx].scale, kM.dataScale, newBeta));
                    }

                } // end foreach keypointmatch


                for(int j = 0; j < eg.ModelKeypoints.Count; j++)
                {
                    if (matStatus[i][j] == Matching.MatchStatus.Unknown)
                    {
                        // Assume that if we didn't find the keypoint here,
                        // we won't find the keypoint across other examples.

                        matStatus[i][j] = Matching.MatchStatus.NotMatched;
                        foreach(CorrespondingKeypoint cK in eg.Correspondences[j])
                            matStatus[cK.Example][cK.Idx] = Matching.MatchStatus.NotMatched;
                    }
                    if (matStatus[i][j] == Matching.MatchStatus.Matched)
                    {
                        /* already dealt with above */
                    }
                }
            }

            //mdl.sam = new SimpleAngleMedian();
            //foreach (LikelyObject choice in choices)
            //{
            //    if (choice.WarpModel.Inliers.Count <= 5) continue;
            //    NamedPoint center; NamedPoint front;
            //    if (choice.Example.NamedPoints.TryGetValue("center", out center) &&
            //    choice.Example.NamedPoints.TryGetValue("front", out front))
            //    {
            //        double ang = Math.Atan2(front.Point.Y - center.Point.Y,
            //                                front.Point.X - center.Point.X);
            //        foreach (KeypointMatch kpm in choice.WarpModel.Inliers)
            //        {
            //            mdl.sam.Update(kpm.beta);
            //        }
            //    }
            //}

            // Sort all options by score
            choices.Sort(
                delegate(LikelyObject o1, LikelyObject o2)
                {
                    if (o2.WarpModel.Inliers.Count == o1.WarpModel.Inliers.Count)
                        return o1.WarpModel.ScaleDeviation.CompareTo(o2.WarpModel.ScaleDeviation); 
                    else
                        return o2.WarpModel.Inliers.Count.CompareTo(o1.WarpModel.Inliers.Count);
                });

            //choices.Sort(
            //    delegate(LikelyObject o1, LikelyObject o2)
            //    {
            //        return o1.WarpModel.ScaleDeviation.CompareTo(o1.WarpModel.ScaleDeviation);
            //    });


            if (choices.Count > 0)
            {
                // Return the first (best?) choice
                return choices[0];
            }

            return null;

        }

        private WarpModel FindWarpTransform(List<Cluster> cls, Graphics g)
        {
            // Sort clusters by size
            cls.Sort(
                delegate(Cluster c1, Cluster c2)
                { return c2.Count.CompareTo(c1.Count); });

            foreach (Cluster cl in cls)
            {
                if (cl.Count <= 6) continue; // HACK: drops weak clusters

                ransacWatch.Start();

                WarpModel m = RansacWarpFit.Instance.Run(cl.Matches, 50);

                ransacWatch.Stop();

                if (m != null /* && m.Score < 1.0/5 */)
                {
                    if (m.Inliers.Count < 5) continue;

                    // Don't allow extreme scales
                    if (m.Scale < 0.10 || m.Scale > 5) continue;

                    foreach (KeypointMatch kpm in m.Inliers)
                        g.FillEllipse(Brushes.DodgerBlue, Visualization.PointRectangleF(kpm.dataCoord, 3.0f));
                }

                return m;
            }

            return null;
        }



        private AffineModel FindTransform(List<Cluster> cls)
        {
            List<KeypointMatch> weakUnion = new List<KeypointMatch>();

            // For any weak (small) clusters, gather them up and add them into
            //  each of the bigger clusters during the run.
            foreach (Cluster cl in cls)
            {
                if (cl.Count <= 6)
                    weakUnion.AddRange(cl.Matches);
            }

            foreach (Cluster cl in cls)
            {
                if (cl.Count <= 6) continue; // HACK: drops weak clusters

                // TODO: This is a parameter that helps the RANSAC
                //       algorithm decide when it has found a good match
                //       if the cluster has a LOT of points, we can be
                //       satisfied with matching about 15.

                double goodScore = (cl.Count > 25) ?
                    1.0 / (25 * 0.65) : 1.0 / (cl.Count * 0.65);

                double medianScale = cl.Median(2);

                double errThresh = 10.0 * medianScale; // Pixel error estimate

                ransacWatch.Start();

                // Add the weak ones in before processing
                //cl.Matches.AddRange(weakUnion);

                // Groups of 3 seem to be the best here
                AffineModel m = RansacAffine.Run(cl.Matches, 3, goodScore, errThresh, 60);
                ransacWatch.Stop();

                if (m != null /* && m.Score < 1.0/5 */)
                {
                    // Good numbers here:
                    //   4 and above is pretty solid but
                    //   7 and above will be an amazing fit
                    //      for good tracking distances, try > 3
                    if(m.Inliers.Count <= 4) continue;
                    else if (m.Inliers.Count < 6 && m.Scale < 3.0)
                        return m;
                    else if (m.Inliers.Count > 6)
                    {
                        return m;
                    }
                }
            }

            return null;
        }

        private enum TrackingMode
        {
            DebugSequence,
            DebugImage,
            LiveTrack,
            OneFrameLive
        }

        System.Media.SoundPlayer sp = new System.Media.SoundPlayer(@"C:\Documents and Settings\Administrator\My Documents\pongUpload\bin\Debug\Sounds\Sound136.wav");

        private void Monitor_Load(object sender, EventArgs e)
        {
            _transmitResults = false; // Turn networking on / off
            TrackingMode mode = TrackingMode.DebugSequence;

            SurfLocal.Instance.DoubleImageSize = true;

            models = new RecognitionModel[] { 
                OpenModel("buzz"),
                OpenModel("tmnt")   // periodic
            };

            //models = new RecognitionModel[] { 
            //    OpenModel("turtle"),
            //    OpenModel("bluefin"),
            //    OpenModel("clown"),
            //    OpenModel("bee")
            //};

            //models = new RecognitionModel[] { 
            //    OpenModel("clown"),
            //    OpenModel("turtle"),
            //    OpenModel("bee"),
            //};


            string cameraIP = (mode == TrackingMode.LiveTrack) ?
                              _camIP : "192.168.x.x"; // set this based on the debug source

            if (TryGetCameraProperties(cameraIP, out _cProps))
            {
                this.Text = "Tracking " + cameraIP;
            }
            
            foreach (RecognitionModel mdl in models)
                mdl.LearnCorrespondences();

            int width = 640, height = 480;

            buffer = new Bitmap(width, height);
            bufferG = Graphics.FromImage(buffer);

            Bitmap data = null;

            switch (mode)
            {
                case TrackingMode.DebugImage:

                    string[] jpeg = new string[] { workingPath + "Atrium2\\fr119.jpg" };
                    c = new JpegCamera(width, height, jpeg, _cProps);
                    data = c.CaptureFrame();
                    ProcessFrame(data, DateTime.Now);

                    lastFrame = data;
                    _readyRenderEvent.Set();
                    renderOnce = true;
                    RenderThread();

                    break;

                case TrackingMode.DebugSequence:

                    //string[] jpegList = new string[] { workingPath + "example.jpg" };

                    int MinFrame = 0;
                    int MaxFrame = 274;

                    string[] jpegList = new string[(MaxFrame+1)-MinFrame]; 
                    for (int i = MinFrame; i < MaxFrame + 1; i++)
                    {
                        jpegList[i - MinFrame] = workingPath + string.Format("Atrium2\\fr{0}.jpg", i);
                    }

                    //string[] bigList = new string[jpegList.Length * 2];
                    //jpegList.CopyTo(bigList, 0);
                    //jpegList.CopyTo(bigList, jpegList.Length);

                    c = new JpegCamera(width, height, jpegList, _cProps);

                    #region Start the processing and render threads
                    procThread = new Thread(new ThreadStart(ProcThread));
                    procThread.Start();

                    renderThread = new Thread(new ThreadStart(RenderThread));
                    renderThread.Start();
                    #endregion

                    break;

                case TrackingMode.OneFrameLive:

                    data = c.CaptureFrame();
                    //data.Save("last.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
                    ProcessFrame(data, DateTime.Now);

                    lastFrame = data;
                    _readyRenderEvent.Set();
                    RenderThread();

                    break;

                case TrackingMode.LiveTrack:

                    c = new LiveCamera(640, 480, _camIP, _cProps);

                    #region Start the processing and render threads
                    procThread = new Thread(new ThreadStart(ProcThread));
                    procThread.Start();

                    renderThread = new Thread(new ThreadStart(RenderThread));
                    renderThread.Start();
                    #endregion


                    break;
            }

        }

        Bitmap buffer;
        Graphics bufferG;

        private EventWaitHandle _newFrameEvent =
            new EventWaitHandle(false, EventResetMode.AutoReset);

        private EventWaitHandle _appTerminatingEvent =
            new EventWaitHandle(false, EventResetMode.ManualReset);

        private EventWaitHandle _frameConsumedEvent =
            new EventWaitHandle(false, EventResetMode.AutoReset);

        private Mutex uiDataMutex = new Mutex();

        private EventWaitHandle _readyRenderEvent =
            new EventWaitHandle(false, EventResetMode.AutoReset);

        private MotionDetector motion;

        Bitmap lastFrame = null;

        /// <summary>
        /// The thread that pumps pixels
        /// </summary>
        private void ProcThread()
        {
            Stopwatch w = new Stopwatch();

            w.Start();

            while (run)
            {

                // Wait for the frame or app-shutdown signal
                if (!run) break;

                Bitmap data = c.LastFrame; // returns whenever a frame is ready
                if (data == null)
                {
                    System.Threading.Thread.Sleep(250);
                    continue;
                    // quit if we run out of frames
                }

                DateTime frameTs = c.LastFrameTimestamp;
                Rectangle[] motionRects = motion.PredictForeground(data);
                ProcessFrame(data, motionRects, frameTs);
                w.Stop();

                strTiming = string.Format("Run: {0} ms\n{1}", 
                    w.ElapsedMilliseconds, strTiming);

                w.Reset(); w.Start();

                uiDataMutex.WaitOne();
                if (lastFrame != null) lastFrame.Dispose();
                lastFrame = data;
                uiDataMutex.ReleaseMutex();

                _readyRenderEvent.Set();

            }
        }

        private void RenderThread()
        {
            while (run)
            {
                _readyRenderEvent.WaitOne();

                if (!run) break; // abort if signaled

                Nullable<LikelyObject> lastObject = null;
                Bitmap toRender = null;
                string timing = null;

                uiDataMutex.WaitOne();

                lock (lastDetected)
                {
                    if (lastDetected.Count == 1)
                        lastObject = lastDetected[0];
                }

                timing = strTiming;
                toRender = lastFrame;
                lastFrame = null;
                
                uiDataMutex.ReleaseMutex();

                bufferG.DrawImage(toRender, 0, 0);

                string distString = "";

                if (lastObject.HasValue)
                {
                    LikelyObject o = lastObject.Value;

                    //o.WarpModel.UseSurfScale =
                    EstimatedPoint[] estPts = o.EstimatePoints(c);

                    double ratio = 0;
                    foreach (EstimatedPoint pt in estPts)
                        if (pt.Name == "center")
                        {
                            double distMeters = pt.Depth / 1000;
                            double distYards = distMeters * 1.0936133;
                            distString = string.Format("{0:f1} yd", distYards);

                            distString = o.WarpModel.Inliers.Count.ToString();

                            if (ZFixed)
                            {
                                /* Solve for the object's depth iteratively */

                                double maxDepthPossible = 25 * 1000; // 25 m
                                double minDepthPossible = 1000; // 1 m
                                double minError = double.MaxValue;
                                double bestDepth = 0;

                                for (double depthTry = minDepthPossible; depthTry < maxDepthPossible; depthTry += 100)
                                {
                                    DotNetMatrix.GeneralMatrix ellipseRot;
                                    Point3D pos = c.EstimateCoordinate(pt.ImagePosition, depthTry, out ellipseRot);
                                    if (Math.Abs(pos.Z - knownZ_val) < minError)
                                    {
                                        bestDepth = depthTry;
                                        minError = Math.Abs(pos.Z - knownZ_val);
                                    }
                                }

                                ratio = bestDepth / pt.Depth;

                                foreach (NamedPoint p in o.Example.NamedPoints.Values)
                                {
                                    // Move D towards the measurement
                                    double D = p.Distance;
                                    D = D * (0.90) + (D * ratio * 0.10);
                                    p.Distance = D;
                                }
                            }
                        }
                    }
    
                this.Invoke(new MethodInvoker(delegate()
                {
                    picViewer.Image = buffer;
                    lastRunLabel.Text = timing;
                    if(lastObject.HasValue) lblDistance.Text = distString;
                }));

                toRender.Dispose();

                if (renderOnce) break;
            }

        }



        private RecognitionModel OpenModel(string name)
        {
            string modelPath = workingPath + "Models\\";

            RecognitionModel m = new RecognitionModel();
            m.Open(name, modelPath);

            // Create bounding-boxes for all of the examples
            foreach (RecognitionModel.Example ex in m.Examples)
            {
                //foreach (SurfKeypoint k in ex.ModelKeypoints)
                //{
                //    Point2D modelPt = new Point2D(k.x, k.y);
                //    modelPt = c.Undistort(modelPt);
                //    k.x = modelPt.x;
                //    k.y = modelPt.y;
                //}
                
                ex.Bound();
            }

            return m;
        }

        private void Monitor_FormClosing(object sender, FormClosingEventArgs e)
        {
            run = false;
            c.Stop();

            _readyRenderEvent.Set(); // Signal the render thread to continue ( & exit )

            if (renderThread != null)
            {
                if (renderThread.IsAlive) renderThread.Abort();
                renderThread.Join(1000);
            }

            if (procThread != null)
            {
                if (procThread.IsAlive) procThread.Abort();
                procThread.Join(1000);
            }
        }

        private void picViewer_Click(object sender, EventArgs e)
        {
        }


        private void btnStart_Click_1(object sender, EventArgs e)
        {
            if (btnStart.Text == "Start MJPEG")
            {
                btnStart.Text = "Stop MJPEG";
                c.Start();
            }
            else
            {
                btnStart.Text = "Start MJPEG";
                c.Stop();
            }

        }

        private void txtYards_TextChanged(object sender, EventArgs e)
        {
            if (ZFixed)
            {
                ZFixed = false;
                txtYards.BackColor = Color.White;
            }
        }

        private bool ZFixed = false;

        private void txtYards_KeyDown(object sender, KeyEventArgs e)
        {
            if (ZFixed) return;

            if (e.KeyCode == Keys.Enter)
            {
                e.SuppressKeyPress = true;

                double distYd = 0;
                if (double.TryParse(txtYards.Text, out distYd))
                {
                    knownZ_val = (distYd / 1.0936133) * 1000;

                    ZFixed = true;
                    txtYards.BackColor = Color.Chartreuse;
                    txtYards.SelectAll();
                }
            }
        }

        private void btnSave_Click(object sender, EventArgs e)
        {
            string modelPath = workingPath + "Models\\";
            foreach (RecognitionModel m in models)
            {
                foreach (RecognitionExample re in m.Examples)
                {
                    // HACK
                    re.Unbound();
                }

                m.Save(modelPath);

                foreach (RecognitionExample re in m.Examples)
                {
                    re.Bound();
                }

            }


        }

    } // end class

} // end namespace
