﻿using SpectralCamera.Extend;
using SpectralCamera.Model;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Net.Sockets;
using System.Net;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Linq.Expressions;
using DevExpress.XtraCharts;
using SpectralCamera.Enum;
using System.Drawing.Imaging;

namespace SpectralCamera
{
    public partial class FormMain : Form
    {
        Camera camera = null;
        private bool CollectedStatus = false;
        public FormMain()
        {
            InitializeComponent();
        }
        private void FormMain_Load(object sender, EventArgs e)
        {
            CollectedStatus = false;
            btnStartCollecting.Text = "开始采集";

            cmbSiginType.Items.Clear();
            cmbSiginType.Items.Add("原始信号");//0
            cmbSiginType.Items.Add("修正信号");//1

            cmbSiginType.SelectedIndex = 0; 
            // 初始化 Chart 控件
            //chartData = new Chart();
            //chartData.Dock = DockStyle.Fill;
            //this.Controls.Add(chartData);

            // 创建 ChartArea
            //ChartArea chartArea = new ChartArea();
            //chartArea.AxisX.Interval = 1;
            //chartArea.AxisY.Interval = 10; // 设置 Y 轴的间隔为 10

            //chartData..Add(chartArea);
            // 创建图表数据系列（Series）
            // 设置图表的 Diagram


            DevExpress.XtraCharts.Series series1 = new DevExpress.XtraCharts.Series("Graph_R", ViewType.Line);            
            DevExpress.XtraCharts.Series series2 = new DevExpress.XtraCharts.Series("Graph_G", ViewType.Line);
            DevExpress.XtraCharts.Series series3 = new DevExpress.XtraCharts.Series("Graph_B", ViewType.Line);
           

            // 将数据系列添加到 Chart 控件
            chartData.Series.Add(series1);
            chartData.Series.Add(series2);
            chartData.Series.Add(series3);
        }
        private  void btnConnected_Click(object sender, EventArgs e)
        {
            ConnectedCrame();
        }
        public void OnCurveUpdateSlot(byte[] bytes)
        {
            int cnt = (bytes.Length - 4) / 3;

            // 创建数据列表
            List<double> ptX1 = new List<double>();
            List<double> ptY1 = new List<double>();
            List<double> ptX2 = new List<double>();
            List<double> ptY2 = new List<double>();
            List<double> ptX3 = new List<double>();
            List<double> ptY3 = new List<double>();

            // 填充数据
            for (int i = 0; i < cnt; i++)
            {
                ptX1.Add(i);
                ptY1.Add(bytes[4 + i * 3 + 0] & 0xff);

                ptX2.Add(i);
                ptY2.Add(bytes[4 + i * 3 + 1] & 0xff);

                ptX3.Add(i);
                ptY3.Add(bytes[4 + i * 3 + 2] & 0xff);
            }

            // 清除之前的图表数据
            chartData.Series["Graph_R"].Points.Clear();
            chartData.Series["Graph_G"].Points.Clear();
            chartData.Series["Graph_B"].Points.Clear();

            // 将新数据添加到 Series 中
            for (int i = 0; i < ptX1.Count; i++)
            {
                chartData.Series["Graph_R"].Points.Add(new SeriesPoint(ptX1[i], ptY1[i]));
                chartData.Series["Graph_G"].Points.Add(new SeriesPoint(ptX2[i], ptY2[i]));
                chartData.Series["Graph_B"].Points.Add(new SeriesPoint(ptX3[i], ptY3[i]));
            }
        }

        //// 输入参数：
        //// - frameRgbData: 所有帧的 RGB 数据，格式为 byte[FrameCount][512*512*3]（每个像素的 R、G、B 值连续存储）
        //// - frameCount: 总帧数
        //// - outputImagePath: 输出图片的路径（如 "output.png"）
        //public Image StitchFramesToImage(byte[] arrR, byte[] arrG, byte[] arrB, int frameCount)
        //{
        //    // 1. 计算拼接后的图片尺寸
        //    int framesPerRow = (int)Math.Ceiling(Math.Sqrt(frameCount)); // 每行显示的帧数
        //    int totalWidth = 512 * framesPerRow;
        //    int totalHeight = 512 * (int)Math.Ceiling((double)frameCount / framesPerRow);

        //    // 2. 创建空白大图（24位RGB格式）
        //    using (Bitmap finalImage = new Bitmap(totalWidth, totalHeight, PixelFormat.Format24bppRgb))
        //    {
        //        // 3. 使用 Graphics 绘制每个帧到大图
        //        using (Graphics g = Graphics.FromImage(finalImage))
        //        {
        //            for (int i = 0; i < frameCount; i++)
        //            {
        //                // 计算当前帧在大图中的位置
        //                int row = i / framesPerRow;
        //                int col = i % framesPerRow;
        //                int x = col * 512;
        //                int y = row * 512;

        //                // 将当前帧的 RGB 数据转换为 Bitmap
        //                byte[] rgbData = frameRgbData[i];
        //                Bitmap frameBitmap = ConvertRgbToBitmap(rgbData, 512, 512);

        //                // 绘制到大图的指定位置
        //                g.DrawImage(frameBitmap, new Rectangle(x, y, 512, 512));
        //                frameBitmap.Dispose(); // 释放资源
        //            }
        //        }

        //        // 4. 保存图片
        //        finalImage.Save(outputImagePath, ImageFormat.Png);
        //    }
        //}

        // 辅助方法：将 RGB 数组转换为 Bitmap
        private static Bitmap ConvertRgbToBitmap(byte[] rgbData, int width, int height)
        {
            Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format24bppRgb);
            BitmapData bitmapData = bitmap.LockBits(
                new Rectangle(0, 0, width, height),
                ImageLockMode.WriteOnly,
                PixelFormat.Format24bppRgb
            );

            // 复制 RGB 数据到 Bitmap
            System.Runtime.InteropServices.Marshal.Copy(rgbData, 0, bitmapData.Scan0, rgbData.Length);
            bitmap.UnlockBits(bitmapData);

            return bitmap;
        }
        public async void ConnectedCrame()
        {
            try
            {
                if (camera != null)
                {
                    return;
                }
                camera = new Camera();
                camera.CurveDataReceived += new Action<byte[]>((data)=> {
                    if (CollectedStatus && this != null && !this.IsDisposed)
                    {
                        try
                        {
                            this.Invoke(new Action(() =>
                            {
                                txtData.Text = string.Join(" ", data);  //BitConverter.ToString(data).Replace("-", " 0x");
                                //txtData.Text = BitConverter.ToString(data.).Replace("-", " 0x");

                                OnCurveUpdateSlot(data);
                            }));
                        }
                        catch (Exception ex)
                        {

                        }
                        
                    }
                    
                });
                if (camera != null && camera._params !=null)
                {
                    txtFramePeriod.Text = camera._params.Frame.ToString();
                    if (camera._params.JifenTime != null  && camera._params.JifenTime.Length >=3)
                    {
                        txtIntegrationTimeR.Text = camera._params.JifenTime[1].ToString();
                        txtIntegrationTimeG.Text = camera._params.JifenTime[2].ToString();
                        txtIntegrationTimeB.Text = camera._params.JifenTime[3].ToString();
                    }
                    if (camera._params.JifenDR != null && camera._params.JifenDR.Length >= 3)
                    {
                        txtIntegralCapacitorR.Text = camera._params.JifenDR[1].ToString();
                        txtIntegralCapacitorG.Text = camera._params.JifenDR[2].ToString();
                        txtIntegralCapacitorB.Text = camera._params.JifenDR[3].ToString();
                    }
                    if (camera._params.Gain != null && camera._params.Gain.Length >= 3)
                    {
                        txtGainR.Text = camera._params.Gain[1].ToString();
                        txtGainG.Text = camera._params.Gain[2].ToString();
                        txtGainB.Text = camera._params.Gain[3].ToString();
                    }
                    if (camera._params.Bias != null && camera._params.Bias.Length >= 3)
                    {
                        txtBigotryR.Text = camera._params.Bias[1].ToString();
                        txtBigotryG.Text = camera._params.Bias[2].ToString();
                        txtBigotryB.Text = camera._params.Bias[3].ToString();
                    }
                }
            }
            catch (Exception ex)
            {

            }
        }

        private void btnStartCollecting_Click(object sender, EventArgs e)
        {

            XYDiagram diagram = (XYDiagram)this.chartData.Diagram;

            // 设置 Y 轴的最大值为 500
            diagram.AxisY.WholeRange.SetMinMaxValues(0, 50);

            // 设置 Y 轴的间隔为 5
            diagram.AxisY.WholeRange.AutoSideMargins = false;
            diagram.AxisY.WholeRange.SideMarginsValue = 0;
            //diagram.AxisY.Tickmarks.MinorTickCount = 0;  // 如果不需要小刻度线可以设置为 0
            diagram.AxisY.GridLines.Visible = true; // 显示网格线
                                                    //diagram.AxisY.Label.ResolveOverlappingOptions = ResolveOverlappingOptions.None;

            //diagram.AxisY. = 5;  // 设置 Y 轴的间隔为 5

            CollectedStatus = !CollectedStatus;
            if (CollectedStatus)
            {
                btnStartCollecting.Text = "停止采集";
            }
            else {
                btnStartCollecting.Text = "开始采集";
            }

            if (camera != null && camera._params != null)
            {
                camera.OnStart(EnumAction.FramePeriod, CollectedStatus, (EnumsignalType)cmbSiginType.SelectedIndex);
            }
        }
        #region 设置参数
        private void btnFramePeriod_Click(object sender, EventArgs e)
        {
            if (camera == null || camera._params ==null)
            {
                return;                 
            }

            camera._params.Frame = Convert.ToInt32(txtFramePeriod.Text);

            camera.OnSendParams(EnumAction.FramePeriod);


        }

        #endregion 设置参数

        private void btnIntegrationTime_Click(object sender, EventArgs e)
        {
            if (camera == null || camera._params == null)
            {
                return;
            }
            //if (camera._params.JifenTime == null)
            //{
            //    camera._params.JifenTime = new int[3];
            //}
            camera._params.JifenTime[0] = Convert.ToInt32(txtIntegrationTimeR.Text);
            camera._params.JifenTime[1] = Convert.ToInt32(txtIntegrationTimeG.Text);
            camera._params.JifenTime[2] = Convert.ToInt32(txtIntegrationTimeB.Text);

            camera.OnSendParams(EnumAction.IntegrationTime);
        }

        private void btnIntegralCapacitor_Click(object sender, EventArgs e)
        {
            if (camera == null || camera._params == null)
            {
                return;
            }
            //if (camera._params.JifenTime == null)
            //{
            //    camera._params.JifenDR = new int[3];
            //}
            camera._params.JifenDR[0] = Convert.ToInt32(txtIntegralCapacitorB.Text);
            camera._params.JifenDR[1] = Convert.ToInt32(txtIntegralCapacitorG.Text);
            camera._params.JifenDR[2] = Convert.ToInt32(txtIntegralCapacitorB.Text);

            camera.OnSendParams(EnumAction.IntegralCapacitor);
        }

        private void btnGain_Click(object sender, EventArgs e)
        {
            if (camera == null || camera._params == null)
            {
                return;
            }
            //if (camera._params.JifenTime == null)
            //{
            //    camera._params.JifenDR = new int[3];
            //}
            camera._params.Gain[0] = Convert.ToInt32(txtGainB.Text);
            camera._params.Gain[1] = Convert.ToInt32(txtGainG.Text);
            camera._params.Gain[2] = Convert.ToInt32(txtGainB.Text);

            camera.OnSendParams(EnumAction.Gain);
        }

        private void btnBigotry_Click(object sender, EventArgs e)
        {
            if (camera == null || camera._params == null)
            {
                return;
            }
            //if (camera._params.JifenTime == null)
            //{
            //    camera._params.JifenDR = new int[3];
            //}
            camera._params.Bias[0] = Convert.ToInt32(txtBigotryR.Text);
            camera._params.Bias[1] = Convert.ToInt32(txtBigotryG.Text);
            camera._params.Bias[2] = Convert.ToInt32(txtBigotryB.Text);

            camera.OnSendParams(EnumAction.Bigotry);
        }

        private void btnDarkCorrection_Click(object sender, EventArgs e)
        {
            if (camera == null || camera._params == null)
            {
                return;
            }
            //if (camera._params.JifenTime == null)
            //{
            //    camera._params.JifenDR = new int[3];
            //}
            camera.OnSendParams(EnumAction.DarkCorrection);
        }

        private void btnLightCorrection_Click(object sender, EventArgs e)
        {
            if (camera == null || camera._params == null)
            {
                return;
            }
            //if (camera._params.JifenTime == null)
            //{
            //    camera._params.JifenDR = new int[3];
            //}
            camera._params.LightCalib[0] = Convert.ToInt32(txtCorrectionR.Text);
            camera._params.LightCalib[1] = Convert.ToInt32(txtCorrectionG.Text);
            camera._params.LightCalib[2] = Convert.ToInt32(txtCorrectionB.Text);
            camera.OnSendParams(EnumAction.SaveCorrection);
        }
    }
}
