﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Drawing;
using Emgu.CV.Structure;
using Emgu.CV;
using System.Threading.Tasks;
using RA.Common;


namespace RA.Services
{
    public class ImageProcessor : IAgent
    {

       /// public event MyDel MyEvent;
        //settings
        Settings _settings;
        public Settings Settings 
        {
            get { return _settings; }
            set 
            {
                _settings = value;
            }
        }

        //
        public List<Contour<Point>> Contours { get; set; }
        public Templates Templates {get; set;}
        public Templates Samples { get; set; }
        List<FoundTemplateDesc> _foundTemplates;
        public List<FoundTemplateDesc> FoundTemplates { get { return _foundTemplates; } set { _foundTemplates = value; } }
        public TemplateFinder Finder { get; set; }

        public ImageProcessor()//Settings settings)
        {
            Settings = Settings.Current;//settings;
            Templates = new Templates();
            Samples = new Templates();
            FoundTemplates = new List<FoundTemplateDesc>();
            Finder = new TemplateFinder();
        }

        public void ProcessImage(Bitmap bitmap)
        {
            if (bitmap == null)
                return;

            var frame = new Image<Bgr, byte>(bitmap);

            Image<Bgr, byte> smoothedGrayFrame = frame.PyrDown();

            smoothedGrayFrame = smoothedGrayFrame.PyrUp();
         //   smoothedGrayFrame.Bitmap.Save("C:\\S1.bmp"); smoothedGrayFrame.Dilate(1);

            var canny = smoothedGrayFrame.Convert<Gray, byte>().Canny(new Gray(Settings.CannyThreshold), new Gray(Settings.CannyThreshold));
          //  canny.Bitmap.Save("C:\\S11.bmp");
            canny = canny.Dilate(3);
            //canny._Not();

            Bitmap bm = null;
            Image<Gray, byte> grayFrame = null;
         //   canny.Bitmap.Save("C:\\S111.bmp");

            switch (Settings.Method)
            { 
                case EMethod.Линейный:
                    bm = EdgeDetection.LinesFilter(smoothedGrayFrame.Bitmap);
                    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                    break;
                case EMethod.Лапласа:
                    bm = EdgeDetection.LaplasFilter(smoothedGrayFrame.Bitmap);
                    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                    break;

                case EMethod.Кирша:
                    bm = EdgeDetection.KirshFilter(smoothedGrayFrame.Bitmap);
                    EdgeDetection.HelpKirsh(bm);
                    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                    break;

                case EMethod.Канни:
                    bm =  EdgeDetection.CannyFilter(bitmap, Settings.CannyThreshold, Settings.CannyThreshold, 3, 0);
                    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                    break;

                //case EMethod.Робертса:
                //    bm = EdgeDetection.RobertsFilter(smoothedGrayFrame.Bitmap);
                //    EdgeDetection.HelpKirsh(bm);
                //    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                //    break;

                case EMethod.Собела:
                    bm = EdgeDetection.SobelFilter(smoothedGrayFrame.Bitmap);
                    EdgeDetection.HelpKirsh(bm);
                    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                    break;

                //case EMethod.Статистический:
                //    bm = EdgeDetection.StatisticFilter(smoothedGrayFrame.Bitmap, 7);
                //    EdgeDetection.HelpKirsh(bm);
                //    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                //    break;

                case EMethod.Уоллеса:
                    bm = EdgeDetection.UollesFilter(smoothedGrayFrame.Bitmap);
                    EdgeDetection.HelpKirsh(bm);
                    grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                    break;

                case EMethod.АдаптивноеВыделениеМонохромногоИзображения:
                    grayFrame = new Image<Bgr, byte>(bitmap).Convert<Gray, byte>();                    
                    CvInvoke.cvAdaptiveThreshold(grayFrame, grayFrame, 255, Emgu.CV.CvEnum.ADAPTIVE_THRESHOLD_TYPE.CV_ADAPTIVE_THRESH_MEAN_C, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY, Settings.AdaptiveThresholdBlockSize + Settings.AdaptiveThresholdBlockSize % 2 + 1, Settings.AdaptiveThresholdParameter);
                    grayFrame._Not();
                    break;

                default: bm = EdgeDetection.CannyFilter(bitmap, Settings.CannyThreshold, Settings.CannyThreshold, 3, 0);
                         grayFrame = new Image<Bgr, byte>(bm).Convert<Gray, byte>();
                    break;
            }
         //   var bm =// smoothedGrayFrame.Convert<Gray, byte>().Canny(new Gray(Settings.CannyThreshold), new Gray(Settings.CannyThreshold)).Dilate(1).Bitmap;
              //  EdgeDetection
                 //.CannyFilter(bitmap, Settings.CannyThreshold, Settings.CannyThreshold, 3, 0);
                // .AntColonyFilter(smoothedGrayFrame.Bitmap);
               
              //   .LaplasFilter(smoothedGrayFrame.Bitmap);
                // .KirshFilter(smoothedGrayFrame.Bitmap);
                //.UollesFilter(smoothedGrayFrame.Bitmap);//---
                // .SobelFilter(smoothedGrayFrame.Bitmap);
     //  .StatisticFilter(smoothedGrayFrame.Bitmap, 3);
           //EdgeDetection.Help(bm);//kirsh

          
            //grayFrame._Not();
      //      grayFrame.Bitmap.Save("C:\\GF3.bmp");
            
          //  grayFrame._EqualizeHist();
            //grayFrame._Not();
            //grayFrame.Bitmap.Save("C:\\GF4.bmp");
            //grayFrame._Or(canny); grayFrame.Bitmap.Save("C:\\GF5.bmp");
            //grayFrame._Not();
            //grayFrame._Or(canny);
      //      grayFrame.Bitmap.Save("C:\\GF5.bmp");
            var sourceContours = grayFrame.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_NONE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST);
            

            if (!Settings.NoiseFilter)
                canny = null;

            Contours = FilterContours(sourceContours, canny, grayFrame.Width, grayFrame.Height);
            FindTemplates();
        }

        public void FindTemplates()
        {
            lock (FoundTemplates)
                FoundTemplates.Clear();
            Samples.Clear();

            lock (Templates)
                Parallel.ForEach<Contour<Point>>(Contours, (contour) =>
                {
                    var arr = contour.ToArray();
                    Template sample = new Template(arr, contour.Area, Samples.templateSize);
                    lock (Samples)
                        Samples.Add(sample);

                    if (!Settings.OnlyFindContours)
                    {
                        FoundTemplateDesc desc = Finder.FindTemplate(Templates, sample);

                        if (desc != null)
                            lock (FoundTemplates)
                                FoundTemplates.Add(desc);
                    }
                }
                );
            FilterByIntersection(ref _foundTemplates);
        }

        private static void FilterByIntersection(ref List<FoundTemplateDesc> templates)
        {
            //sort by area
            templates.Sort(new Comparison<FoundTemplateDesc>((t1, t2) => -t1.sample.contour.SourceBoundingRect.Area().CompareTo(t2.sample.contour.SourceBoundingRect.Area())));
            //exclude templates inside other templates
            HashSet<int> toDel = new HashSet<int>();
            for (int i = 0; i < templates.Count; i++)
            {
                if (toDel.Contains(i))
                    continue;
                Rectangle bigRect = templates[i].sample.contour.SourceBoundingRect;
                int bigArea = templates[i].sample.contour.SourceBoundingRect.Area();
                bigRect.Inflate(4, 4);
                for (int j = i + 1; j < templates.Count; j++)
                {
                    if (bigRect.Contains(templates[j].sample.contour.SourceBoundingRect))
                    {
                        double a = templates[j].sample.contour.SourceBoundingRect.Area();
                        if (a / bigArea > 0.9d)
                        {
                            //choose template by rate
                            if (templates[i].rate > templates[j].rate)
                                toDel.Add(j);
                            else
                                toDel.Add(i);
                        }
                        else//delete tempate
                            toDel.Add(j);
                    }
                }
            }
            List<FoundTemplateDesc> newTemplates = new List<FoundTemplateDesc>();
            for (int i = 0; i < templates.Count; i++)
                if (!toDel.Contains(i))
                    newTemplates.Add(templates[i]);
            templates = newTemplates;
        }

        private List<Contour<Point>> FilterContours(Contour<Point> contours, Image<Gray, byte> cannyFrame, int frameWidth, int frameHeight)
        {
            int maxArea = frameWidth * frameHeight / 5;
            var c = contours;
            List<Contour<Point>> result = new List<Contour<Point>>();
            while (c != null)
            {
                if (Settings.FilterContoursBySize)
                    if (c.Total < Settings.MinContourLength ||
                        c.Area < Settings.MinContourArea || c.Area > maxArea ||
                        c.Area / c.Total <= Settings.MinFormFactor)
                        goto next;

                if (Settings.NoiseFilter)
                {
                    Point p1 = c[0];
                    Point p2 = c[(c.Total / 2) % c.Total];
                    if (cannyFrame[p1].Intensity <= double.Epsilon && cannyFrame[p2].Intensity <= double.Epsilon)
                        goto next;
                }
                result.Add(c);

            next:
                c = c.HNext;
            }

            return result;
        }

    }
}
