﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using JobDemo01.Job;

namespace JobDemo01.Job
{
    public enum PatternKind
    {
        High,
        Align,
        Mp,
        Addr,
        PreDose,
        Focus,
    }

    public record class ImagingResult
    {
        public int Index { get; init; }

        public PatternKind Kind { get; init; }

        public bool IsManual { get; init; }

        public ImageData ImageData { get; init; }

        public double MatchDx { get; init; }

        public double MatchDy { get; init; }

        public int Score { get; init; }

        public int Threshold { get; init; }

        public Coordinate Coordinate { get; init; }

        public Pattern Pattern { get; init; }

        public object Payload { get; init; }

        public ImagingResult Addressing { get; init; }

        public bool ThresholdReached => IsManual || Score >= Threshold;
    }

    public record class AlignmentHint(double StageX, double StageY, ImageData Image);

    public record class ImageQuality(double Quality, double Current);

    class ImagingPipeline(ImagingContext context,
        IWaferNavigator nav,
        ISEMClient client,
        IImagingService imagingService,
        JobEnvironment environment,
        JobRunOptions options)
    {
        readonly IIndexer indexer = context.Indexer;
        readonly bool skipAutoFocus = options.SkipAutoFocus;

        Coordinate last;

        int lastAddressingId;
        Vector offset;

        int lastFocusId;

        public IWaferNavigator Navigator
        {
            get
            {
                return nav;
            }
            set
            {
                nav = value;
            }
        }

        public async Task<ImagingResult> MoveTo(Coordinate c, Pattern pattern)
        {
            await context.CheckPoint().ConfigureAwait(false);

            ImagingResult addr = null;
            if (last != c)
            {
                if (!pattern.UseDifferentialDrive && pattern.Addressing.IsRegistered)
                {
                    lastAddressingId = pattern.Id;
                    (addr, offset) = await Address(c, pattern).ConfigureAwait(false);
                    if (!addr.ThresholdReached)
                    {
                        if (!environment.ManualAssistAddressing)
                        {
                            lastAddressingId = 0;
                            offset = default;
                            return addr;
                        }

                        var (sx, sy) = nav.GetStageCoordinate(c.X, c.Y, c.Dx + c.AddrOffsetX, c.Dy + c.AddrOffsetY);
                        var hint = await context.AwaitManualAssist().ConfigureAwait(false);
                        addr = new ImagingResult
                        {
                            Index = indexer.Index,
                            Kind = PatternKind.Addr,
                            IsManual = true,
                            ImageData = hint.Image,
                            Threshold = pattern.Addressing.Threshold,
                            Pattern = pattern,
                            MatchDx = 0,
                            MatchDy = 0,
                            Coordinate = c,
                        };
                        offset = new(sx - hint.StageX, sy - hint.StageY);
                    }
                }
                else if (pattern.ParentId != lastAddressingId || c.X != last.X || c.Y != last.Y)
                {
                    lastAddressingId = 0;
                    offset = default;
                }
            }

            if (pattern.Kind != 0)
            {
                await client.Apply(pattern.Reference.ScanSettings).ConfigureAwait(false);
            }

            var param = pattern.Reference.AutoFocusParam;
            var mode = param.Mode;
            if (skipAutoFocus || pattern.Kind == 0 || pattern.UseDifferentialDrive)
            {
                mode = AutoFocusMode.Disabled;
            }
            switch (mode)
            {
                case AutoFocusMode.Disabled:
                    if (last != c)
                    {
                        await nav.MoveTo(c.X, c.Y, c.Dx, c.Dy, offset.X, offset.Y).ConfigureAwait(false);
                    }
                    break;
                case AutoFocusMode.Once:
                    if (pattern.Id == lastFocusId)
                    {
                        goto case AutoFocusMode.Disabled;
                    }
                    await AutoFocus(c,
                        offset,
                        pattern.Reference.ScanSettings,
                        pattern.Reference.AutoFocusParam).ConfigureAwait(false);
                    lastFocusId = pattern.Id;
                    break;
                case AutoFocusMode.Always:
                    await AutoFocus(c,
                        offset,
                        pattern.Reference.ScanSettings,
                        pattern.Reference.AutoFocusParam).ConfigureAwait(false);
                    lastFocusId = 0;
                    break;
            }

            await Task.Delay(pattern.Reference.ScanSettings.WaitTime).ConfigureAwait(false);

            await PreDose(pattern.Reference.PreDoseOptions).ConfigureAwait(false);

            byte[] b = [];
            ImageRead.GetBitmap(pattern.Reference.ImagePath, ref b);

            ImageData img;
            if (pattern.Kind == 0)
            {
                img = await imagingService.Current.Take().ConfigureAwait(false);
            }
            else if (environment.IsDYNEnabled)
            {
                img = await client.DYNTake().ConfigureAwait(false);
            }
            else
            {
                img = await client.Take().ConfigureAwait(false);
            }
            var (dx, dy, score) = Match(pattern.Reference.PatternRecognition, b, img);
            if (score > pattern.Reference.Threshold)
            {
                if (pattern.Reference.DoCentering && last != c)
                {
                    if (Math.Abs(dx / img.Width) > 0.05 || Math.Abs(dy / img.Height) > 0.05)
                    {
                        var cond = client.Condition;
                        var mat = Transformation.NewTransformation(cond.PixelSize, cond.ImageAngle);
                        var off = mat.Transform(new Vector(-dx, -dy));
                        await nav.MotionControl.RelativeMove(off.X, off.Y).ConfigureAwait(false);
                        img = await client.Take().ConfigureAwait(false);
                        (dx, dy, score) = Match(pattern.Reference.PatternRecognition, b, img);
                    }
                }
            }

            var mp = new ImagingResult
            {
                Index = indexer.Index,
                Kind = PatternKind.High + pattern.Kind,
                ImageData = img,
                MatchDx = dx,
                MatchDy = dy,
                Score = score,
                Pattern = pattern,
                Coordinate = c,
                Addressing = addr
            };
            var manual = pattern.Kind == 2 ? environment.ManualAssistMeasurement : environment.ManualAssistAlignment;
            var (result, _) = await Write(mp, manual);

            last = c;
            indexer.Next();
            return result;
        }

        async Task<(ImagingResult, Vector)> Address(Coordinate c, Pattern pattern)
        {
            await client.Apply(pattern.Addressing.ScanSettings).ConfigureAwait(false);
            if (pattern.Addressing.AutoFocusParam.Mode == AutoFocusMode.Always)
            {
                await AutoFocus(c,
                    default,
                    pattern.Addressing.ScanSettings,
                    pattern.Addressing.AutoFocusParam).ConfigureAwait(false);
                lastFocusId = 0;
            }

            var (sx, sy) = nav.GetStageCoordinate(c.X, c.Y, c.Dx + c.AddrOffsetX, c.Dy + c.AddrOffsetY);
            await nav.MotionControl.Move(sx, sy).ConfigureAwait(false);
            await Task.Delay(pattern.Addressing.ScanSettings.WaitTime).ConfigureAwait(false);

            await PreDose(pattern.Addressing.PreDoseOptions).ConfigureAwait(false);

            byte[] b = [];
            ImageRead.GetBitmap(pattern.Addressing.ImagePath, ref b);

            var img = await client.Take().ConfigureAwait(false);
            var (dx, dy, score) = Match(pattern.Addressing.PatternRecognition, b, img);
            var (result, hint) = await Write(new ImagingResult
            {
                Index = indexer.Index,
                Kind = PatternKind.Addr,
                ImageData = img,
                Score = score,
                Threshold = pattern.Addressing.Threshold,
                Pattern = pattern,
                MatchDx = dx,
                MatchDy = dy,
                Coordinate = c,
            }, environment.ManualAssistAlignment);

            if (result.IsManual)
            {
                return (result, new Vector(hint.StageX - sx, hint.StageY - sy));
            }

            var cond = client.Condition;
            var mat = Transformation.NewTransformation(cond.PixelSize, cond.ImageAngle);
            return (result, mat.Transform(new Vector(-dx, -dy)));
        }

        async Task<(ImagingResult, AlignmentHint)> Write(ImagingResult result, bool manual)
        {
            if (result.ThresholdReached || !manual)
            {
                context.Write(result);
                return (result, null);
            }

            Task<AlignmentHint> m = context.AwaitManualAssist();
            context.Write(result);

            var hint = await m.ConfigureAwait(false);
            return (result with
            {
                ImageData = hint.Image,
                Score = 0,
                MatchDx = 0,
                MatchDy = 0,
                IsManual = true
            }, hint);
        }

        async Task AutoFocus(Coordinate c, Vector offset, ScanSettings scan, AutoFocusParam param)
        {
            await nav.MoveTo(c.X, c.Y, c.Dx + param.Dx, c.Dy + param.Dy, offset.X, offset.Y).ConfigureAwait(false);
            await Task.Delay(param.WaitTime).ConfigureAwait(false);
            if (param.DotAvg != scan.DotAvg || param.LineAvg != scan.LineAvg || param.FrameAvg != scan.FrameAvg)
                await client.ApplyAvg(param.DotAvg, param.LineAvg, param.FrameAvg).ConfigureAwait(false);
            var cond = client.Condition;
            IAsyncEnumerable<(ImageData, float, double)> focus;
            if (param.Type == AutoFocusType.ObjectiveLens)
            {
                double[] gunFocus = [cond.Gunfocus_coarsetuning_in, cond.Gunfocus_finecontrol_in];
                focus = new CurrentFocusUseCase(client).CurrentFocusSearch(gunFocus);
            }
            else
            {
                ushort[] gunFocus = [cond.BV2Voltage, cond.BV3Voltage];
                focus = new VoltageFocusUseCase(client).VoltageFocusSearch(gunFocus, (int)param.Type - 1);
            }
            await foreach (var (img, quality, current) in focus)
            {
                var result = new ImagingResult
                {
                    Kind = PatternKind.Focus,
                    ImageData = img,
                    Payload = new ImageQuality(quality, current)
                };
                context.Write(result);
            }

            if (param.DotAvg != scan.DotAvg || param.LineAvg != scan.LineAvg || param.FrameAvg != scan.FrameAvg)
                await client.ApplyAvg(scan.DotAvg, scan.LineAvg, scan.FrameAvg).ConfigureAwait(false);

            await nav.MoveTo(c.X, c.Y, c.Dx, c.Dy, offset.X, offset.Y).ConfigureAwait(false);
        }

        async Task PreDose(PreDoseOptions options)
        {
            if (!options.IsEnabled || options.WaitTime == 0)
            {
                return;
            }

            var cond = client.Condition;
            var settings = client.Settings;
            await settings.SetFrameAverage(5).ConfigureAwait(false);
            await settings.SetFov(options.FOV).ConfigureAwait(false);
            var sw = Stopwatch.StartNew();
            while (sw.ElapsedMilliseconds < options.WaitTime)
            {
                var img = await client.Take().ConfigureAwait(false);
                context.Write(new ImagingResult
                {
                    Index = indexer.Index,
                    Kind = PatternKind.PreDose,
                    ImageData = img,
                });
            }
            await settings.SetFrameAverage(cond.FrameAverage).ConfigureAwait(false);
            await settings.SetFov(cond.Fov).ConfigureAwait(false);
        }

        static (double, double, int) Match(PatternRecognition recognition, byte[] pattern, ImageData img)
        {
            var (w, h, data, _) = img;
            return recognition.Algorithm switch
            {
                PatternMatchAlgorithm.FastMatchNormal => Alignment.Align(w, h, pattern, data, 0),
                PatternMatchAlgorithm.EnhancedMatch => Alignment.Align(w, h, pattern, data, 3),
                PatternMatchAlgorithm.FastROIMatch => ROIMatch(recognition, w, h, pattern, data),
                _ => throw new ArgumentException($"Unknown algorithm {recognition.Algorithm}"),
            };
        }

        static (double, double, int) ROIMatch(PatternRecognition recognition, int w, int h, byte[] pattern, byte[] img)
        {
            var roiX = (int)(recognition.RoiX * w);
            var roiY = (int)(recognition.RoiY * h);
            var roiW = (int)(recognition.RoiW * w);
            var roiH = (int)(recognition.RoiH * h);
            return Alignment.RoiMatch(w, h, pattern, img, roiX, roiY, roiW, roiH);
        }
    }
}
