﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Drawing;
using System.Threading.Tasks;
using OpenCvSharp.Dnn;
using OpenCvSharp;
using OpenCvSharp.Extensions;
using System.IO;
using Microsoft.ML.OnnxRuntime;
using Microsoft.ML.OnnxRuntime.Tensors;


namespace App_Onnx
{
    public class Inference_onnx
    {
        public InferenceSession session;
        public string imagePath = "";
        string classer_path;
        public float[] factors = new float[2];
        public Tensor<float> input_tensor;
        public List<NamedOnnxValue> input_ontainer = new List<NamedOnnxValue>();
        public IDisposableReadOnlyCollection<DisposableNamedOnnxValue> result_infer;
        public DisposableNamedOnnxValue[] results_onnxvalue;
        public Tensor<float> result_tensors;
        public float[] result_array;
        public StringBuilder sb = new StringBuilder();
        public Mat RunImage;


        public Inference_onnx(string Onnx_Path,string class_lable )
        {
            SessionOptions options = new SessionOptions();
            options.LogSeverityLevel = OrtLoggingLevel.ORT_LOGGING_LEVEL_INFO;
            options.AppendExecutionProvider_CPU(0);
            session = new InferenceSession(Onnx_Path, options);
            input_tensor = new DenseTensor<float>(new[] { 1, 3, 640, 640 });
            this.classer_path = class_lable;
        }

        public Result Run(Bitmap inputImg,int mode)
        {
            Mat img_input = BitmapConverter.ToMat(inputImg);
            RunImage = img_input;
            if (img_input.Channels() == 1)
            {
                Cv2.CvtColor(img_input, img_input, ColorConversionCodes.BGR2RGB);
            }
            int max_image_length = img_input.Cols > img_input.Rows ? img_input.Cols : img_input.Rows;
            Mat max_image = Mat.Zeros(new OpenCvSharp.Size(max_image_length, max_image_length), MatType.CV_8UC3);
            Rect roi = new Rect(0, 0, img_input.Cols, img_input.Rows);
            img_input.CopyTo(new Mat(max_image, roi));
            factors[0] = factors[1] = (float)(max_image_length / 640.0);
            Mat image_rgb = new Mat();
            Cv2.CvtColor(max_image, image_rgb, ColorConversionCodes.BGR2RGB);
            Mat resize_image = new Mat();
            Cv2.Resize(image_rgb, resize_image, new OpenCvSharp.Size(640, 640));
            for (int y = 0; y < resize_image.Height; y++)
            {
                for (int x = 0; x < resize_image.Width; x++)
                {
                    input_tensor[0, 0, y, x] = resize_image.At<Vec3b>(y, x)[0] / 255f;
                    input_tensor[0, 1, y, x] = resize_image.At<Vec3b>(y, x)[1] / 255f;
                    input_tensor[0, 2, y, x] = resize_image.At<Vec3b>(y, x)[2] / 255f;
                }
            }

            input_ontainer.Add(NamedOnnxValue.CreateFromTensor("images", input_tensor));
            result_infer = session.Run(input_ontainer);
            results_onnxvalue = result_infer.ToArray();
            result_tensors = results_onnxvalue[0].AsTensor<float>();
            result_array = result_tensors.ToArray();
            DetectionResult result_pro = new DetectionResult(classer_path, factors);
            return result_pro.process_result(result_array,mode);
        }

        public Mat draw_result(Result result, Mat image)
        {
       
            // 将识别结果绘制到图片上
            for (int i = 0; i < result.length; i++)
            {
                if (result.scores[i] == result.scores.Max())
                {
                    if (result.classes[i] == "OK")
                    {
                        Cv2.PutText(image, result.classes[i], new OpenCvSharp.Point(image.Width / 2, image.Height / 2), HersheyFonts.HersheyDuplex, 5, Scalar.Green, 8);
                    }
                    else
                    {
                        Cv2.PutText(image, result.classes[i], new OpenCvSharp.Point(image.Width / 2, image.Height / 2), HersheyFonts.HersheyDuplex, 5, Scalar.Red, 8);
                    }
                }

            }
            return image;
        }

    }
    public class DetectionResult : ResultBase
    {

        /// <summary>
        /// 结果处理类构造
        /// </summary>
        /// <param name="path">识别类别文件地址</param>
        /// <param name="scales">缩放比例</param>
        /// <param name="score_threshold">分数阈值</param>
        /// <param name="nms_threshold">非极大值抑制阈值</param>
        public DetectionResult(string path, float[] scales, float score_threshold = 0.3f, float nms_threshold = 0.3f)
        {
            read_class_names(path);
            this.scales = scales;
            this.score_threshold = score_threshold;
            this.nms_threshold = nms_threshold;
        }

        /// <summary>
        /// 结果处理
        /// </summary>
        /// <param name="result">模型预测输出</param>
        /// <returns>模型识别结果</returns>
        public Result process_result(float[] result,int mode)
        {
            Result result1 = new Result();
            if (mode == 0)
            {
                if (result.Length > 0)
                {
                    for (int i = 0; i < result.Length; i++)
                    {
                        if (result[i] > 0.7)
                        {
                            result1.add(result[i], class_names[i]);
                        }
                    }
                }
            }
            if(mode==1)
            {
                Mat result_data = new Mat(4 + nc, 8400, MatType.CV_32F, result);
                string s = result_data.Dump();
                result_data = result_data.T();
                string s1 = result_data.Dump();
                // 存放结果list
                List<Rect> position_boxes = new List<Rect>();
                List<int> class_ids = new List<int>();
                List<float> confidences = new List<float>();
                // 预处理输出结果
                for (int i = 0; i < result_data.Rows; i++)
                {
                    Mat classes_scores = result_data.Row(i).ColRange(4, nc + 4);

                    OpenCvSharp.Point max_classId_point, min_classId_point;
                    double max_score, min_score;
                    // 获取一组数据中最大值及其位置
                    Cv2.MinMaxLoc(classes_scores, out min_score, out max_score,
                        out min_classId_point, out max_classId_point);
                    // 置信度 0～1之间
                    // 获取识别框信息
                    if (max_score > score_threshold)
                    {
                        float cx = result_data.At<float>(i, 0);
                        float cy = result_data.At<float>(i, 1);
                        float ow = result_data.At<float>(i, 2);
                        float oh = result_data.At<float>(i, 3);
                        int x = (int)((cx - 0.5 * ow) * this.scales[0]);
                        int y = (int)((cy - 0.5 * oh) * this.scales[1]);
                        int width = (int)(ow * this.scales[0]);
                        int height = (int)(oh * this.scales[1]);
                        Rect box = new Rect();
                        box.X = x;
                        box.Y = y;
                        box.Width = width;
                        box.Height = height;
                        position_boxes.Add(box);
                        class_ids.Add(max_classId_point.X);
                        confidences.Add((float)max_score);
                    }
                }

                // NMS非极大值抑制
                int[] indexes = new int[position_boxes.Count];
                CvDnn.NMSBoxes(position_boxes, confidences, this.score_threshold, this.nms_threshold, out indexes);

                // 将识别结果绘制到图片上
                for (int i = 0; i < indexes.Length; i++)
                {
                    int index = indexes[i];
                    result1.add(confidences[index], position_boxes[index], this.class_names[class_ids[index]]);
                }
            }
            return result1;
        }

       

        /// <summary>
        /// 结果绘制
        /// </summary>
        /// <param name="result">识别结果</param>
        /// <param name="image">绘制图片</param>
        /// <returns></returns>
        public Mat draw_result(Result result, Mat image)
        {
             if(result.length==1)
            {
                if (result.classes[0] == "OK")
                {
                    Cv2.PutText(image, result.classes[0], new OpenCvSharp.Point(image.Width / 2, image.Height / 2), HersheyFonts.HersheySimplex, 0.6, Scalar.Green, 5);

                }
                else
                {
                    Cv2.PutText(image, result.classes[0], new OpenCvSharp.Point(image.Width / 2, image.Height / 2), HersheyFonts.HersheySimplex, 0.6, Scalar.Red, 5);
                }
            }

            if (result.length > 1)
            {
                for(int i=0;i<result.length;i++)
                {
                    if(result.scores[i]==result.scores.Max())
                    {
                        if (result.classes[i] == "OK")
                        {
                            Cv2.PutText(image, result.classes[i], new OpenCvSharp.Point(image.Width / 2, image.Height / 2), HersheyFonts.HersheySimplex, 0.6, Scalar.Green, 5);

                        }
                        else
                        {
                            Cv2.PutText(image, result.classes[i], new OpenCvSharp.Point(image.Width / 2, image.Height / 2), HersheyFonts.HersheySimplex, 0.6, Scalar.Red, 5);
                        }
                    }
                }
            }
            if(result.length==0) Cv2.PutText(image,"NG", new OpenCvSharp.Point(image.Width / 2, image.Height / 2), HersheyFonts.HersheySimplex, 0.6, Scalar.Red, 5);



            //// 将识别结果绘制到图片上
            //for (int i = 0; i < result.length; i++)
            //{

            //    Cv2.Rectangle(image, result.rects[i], new Scalar(0, 0, 255), 2, LineTypes.Link8);

            //    Cv2.Rectangle(image, new OpenCvSharp.Point(result.rects[i].TopLeft.X - 1, result.rects[i].TopLeft.Y - 20),
            //        new OpenCvSharp.Point(result.rects[i].BottomRight.X, result.rects[i].TopLeft.Y), new Scalar(0, 0, 255), -1);

            //    Cv2.PutText(image, result.classes[i] + "-" + result.scores[i].ToString("0.00"),
            //        new OpenCvSharp.Point(result.rects[i].X, result.rects[i].Y - 4),
            //        HersheyFonts.HersheySimplex, 0.6, new Scalar(0, 0, 0), 1);
            //}
            return image;
        }

    }

    public class ResultBase
    {
        // 识别结果类型
        public string[] class_names;

        // 图片信息  缩放比例
        public float[] scales;

        // 置信度阈值
        public float score_threshold;

        // 非极大值抑制阈值
        public float nms_threshold;

        // 类别数
        public int nc;

        public ResultBase() { }

        /// <summary>
        /// 读取本地识别结果类型文件到内存
        /// </summary>
        /// <param name="path">文件路径</param>
        public void read_class_names(string path)
        {
            List<string> str = new List<string>();
            StreamReader sr = new StreamReader(path,Encoding.UTF8);
            string line;
            while ((line = sr.ReadLine()) != null)
            {
                str.Add(line);
            }
            class_names = str.ToArray();
            nc = class_names.Length;
        }

    }
    public class Result
    {
        // 获取结果长度
        public int length
        {
            get
            {
                return scores.Count;
            }
        }

        // 识别结果类
        public List<string> classes = new List<string>();

        // 置信值
        public List<float> scores = new List<float>();

        // 预测框
        public List<Rect> rects = new List<Rect>();

        /// <summary>
        /// 物体检测
        /// </summary>
        /// <param name="score">预测分数</param>
        /// <param name="rect">识别框</param>
        /// <param name="cla">识别类</param>
        public void add(float score, Rect rect, string cla)
        {
            scores.Add(score);
            rects.Add(rect);
            classes.Add(cla);
        }
        public void add(float score,string cla)
        {
            scores.Add(score);
            classes.Add(cla);
        }
    }
}
