﻿using System;
using System.Collections.Generic;
using System.Windows.Forms;
using TUCAMAPI;
using System.Runtime.InteropServices;
using System.Threading;
using System.IO;
using Eps.Ai.Common;
using System.Drawing;
using InfonavaEye.handler;
using System.Timers;
using InfonavaEye.model;
using Sylan.Common.Util;
using InfonavaEye.common;

namespace InfonavaEye.Forms
{
    public partial class AutoStartStep2Form : Form
    {

        public event Action settingCloseDelegate;

        TUCAM_INIT m_itApi;                 // SDK initialize environment object

        int m_pid;                          // The current used camera PID  s
        int m_indexCam;                     // The current used camera index
        List<TUCAM_OPEN> m_opCamList;       // The list of opened cameras object   
        List<TUCAM_DRAW_INIT> m_itDrawList; // The list of drawing object

        UInt64 m_drawingFramesCnt;          // The count of drawing frames
        TimeSpan m_startTime;               // The start time count
        float m_fps;                        // The current frame speed

        int m_clientWinWidth;               // The current client window width
        int m_clientWinHeight;              // The current client window height
        TUCAM_DRAW m_drawing;               // The drawing object
        TUCAM_FRAME m_frame;                // The frame which get from SDK
        Thread m_waitingThread;             // The waiting frame thread

        TUCAM_TRIGGER_ATTR m_triggerAttr;   // The trigger parameter attribute
    
        bool m_isWaiting;                   // Is waiting for the image frame
        bool m_isSaving;                    // Is saving the image
        bool m_isRecording;                 // Is recording the video
        bool m_isRoi;                       // Is on ROI mode
        bool m_isTrigger;                   // Is trigger mode

        int m_totalFrames;                  // The saved image total frames
        int m_savedFormat;                  // The format of saved image 
        int m_savedFormatCnt;               // The count of saved image format
        int m_captureTimes;                 // The capture times
        int m_savedFramesCnt;               // The count of saved frames
        int m_savedSucceedCnt;              // The count of image succeed

        string m_filePath;                  // The file path where to save image file and video file
        string m_imageName;                 // The saved image name

        System.Timers.Timer m_aeTimer;              // The auto exposure timer
        System.Timers.Timer m_onceAeTimer;          // The once auto exposure timer
        System.Timers.Timer m_autoLeftLevelsTimer;  // The auto left levels timer
        System.Timers.Timer m_autoRightLevelsTimer; // The auto right levels timer
        System.Timers.Timer m_awbTimer;             // The auto white balance timer
        System.Timers.Timer m_onceWbTimer;          // The once white balance timer

        System.Timers.Timer readLocationTimer;              // The auto exposure timer

        /// <summary>
        /// 校准原点的位置（x,y）
        /// </summary>
        public Rectangle fixZeroLocation;

        public BoxModel boxModel;

        public DeviceHandler deviceHandler;

        public delegate void UpdateFpsInvok(string fpsStr);
        public delegate void FinishedCaptureInvok(int savedSucceedCnt);

        public AutoStartStep2Form()
        {
            InitializeComponent();
            Control.CheckForIllegalCrossThreadCalls = false; //加载时 取消跨线程检查
        }

        private void SamplePro_Load(object sender, EventArgs e)
        {
            int drawingBoxWidth = drawingBox.Width;
            drawingBox.Width = (int)(drawingBox.Height / 2048f * 3072f );
            drawingBox.Location = new Point(drawingBox.Location.X + (drawingBoxWidth - drawingBox.Width)/2, drawingBox.Location.Y);

            #region 视频光标
            xCrossLine.Width = drawingBox.Width;
            xCrossLine.Height = 1;
            xCrossLine.Location = new Point(drawingBox.Location.X, drawingBox.Location.Y + drawingBox.Height / 2);

            yCrossLine.Width = 1;
            yCrossLine.Height = drawingBox.Height;
            yCrossLine.Location = new Point(drawingBox.Location.X + drawingBox.Width / 2, drawingBox.Location.Y);
            /* xCrossLine.Visible = false;
             yCrossLine.Visible = false;*/
            #endregion

            /* full screen */
            //this.WindowState = FormWindowState.Maximized;

            m_opCamList = new List<TUCAM_OPEN>();
            m_itDrawList = new List<TUCAM_DRAW_INIT>();

            if (TUCAMRET.TUCAMRET_NO_CAMERA == InitApi())
            {
                MessageBox.Show("Please connect the camera.", "Tips");
                return;
            }

            if (TUCAMRET.TUCAMRET_FAILOPEN_CAMERA == OpenAllConnectedCameras())
            {
                MessageBox.Show("Open the opened camera failed!", "Tips");
                return;
            }

            InitAllOpenedCamerasDrawing();

            // Set current selected camera
            m_indexCam = 0;
            m_isSaving = false;
            m_isRecording = false;
            m_clientWinWidth = 0;
            m_clientWinHeight = 0;

            InitAllTimers();
            InitControlRange(m_opCamList[m_indexCam]);
            UpdateInformation(m_opCamList[m_indexCam]);
            this.PerformLayout();
            //          splitContainer1.Panel1.PerformLayout();
            //自动打开录制
            //buttonLive_Click(null, null);
            //曝光时间
            //numericUpDownMs.Value = 15;
            //buttonExpOK_Click(null, null);

            StartWaitForFrame(m_opCamList[m_indexCam]);
            drawingBox.Refresh();
            btnSure.Text = "移动中";

            ThreadHelper.ExcuteThreadTask(() =>
            {
                moveToFirstNail();
            }, 300);
        }

        private void moveToFirstNail() {
            if (this.boxModel == null || this.deviceHandler == null) return;
            if (this.boxModel.boxAreas!=null && this.boxModel.boxAreas.Count>0) {
                var area = this.boxModel.boxAreas[0];
                if (area.startPoint!=null) {
                    deviceHandler.moveToXYZandCapture(area.startPoint[0], area.startPoint[1], area.startPoint[2],false);
                    btnSure.Enabled = true;
                    btnSure.Text = "开始识别";
                }
            }
        }

        private void SamplePro_Resize(object sender, EventArgs e)
        {
            drawingBox.Refresh();
        }


        public void FinishedCapture(int savedSucceedCnt)
        {
           /* buttonRecord.Enabled = true;
            buttonCapture.Enabled = true;

            buttonCapture.Text = "Capture";*/
        }
 

        public void OnTimerUpdateWhiteBalance(object source, System.Timers.ElapsedEventArgs e)
        {
           /* if (m_opCamList.Count > 0)
            {
                double channelValue = 0;
                IntPtr hIdxTUCam = m_opCamList[m_indexCam].hIdxTUCam;

                // Get the current red channel value
                if (TUCAMRET.TUCAMRET_SUCCESS == TUCamAPI.TUCAM_Prop_GetValue(hIdxTUCam, (int)TUCAM_IDPROP.TUIDP_CHNLGAIN, ref channelValue, 1))
                {
                    trackBarRed.Value = (int)channelValue;
                    channelValue /= 2.0f;
                    labelValueRed.Text = channelValue.ToString("#0.0");
                }

                // Get the current green channel value
                if (TUCAMRET.TUCAMRET_SUCCESS == TUCamAPI.TUCAM_Prop_GetValue(hIdxTUCam, (int)TUCAM_IDPROP.TUIDP_CHNLGAIN, ref channelValue, 2))
                {
                    trackBarGreen.Value = (int)channelValue;
                    channelValue /= 2.0f;
                    labelValueGreen.Text = channelValue.ToString("#0.0");
                }

                // Get the current blue channel value
                if (TUCAMRET.TUCAMRET_SUCCESS == TUCamAPI.TUCAM_Prop_GetValue(hIdxTUCam, (int)TUCAM_IDPROP.TUIDP_CHNLGAIN, ref channelValue, 3))
                {
                    trackBarBlue.Value = (int)channelValue;
                    channelValue /= 2.0f;
                    labelValueBlue.Text = channelValue.ToString("#0.0");
                }

                UpdateColorTemperature(m_opCamList[m_indexCam]);
            }*/
        }

        private void SamplePro_FormClosed(object sender, FormClosedEventArgs e)
        {
            if (m_isWaiting)
            {
                StopWaitForFrame(m_opCamList[m_indexCam]);
            }

            readLocationTimer.Stop();
            UnInitAllOpenedCamerasDrawing();
            CloseAllOpenedCameras();
            UnInitApi();
            if (settingCloseDelegate != null)
            {
                settingCloseDelegate.Invoke();
            }
        }

        private TUCAMRET InitApi()
        {
            /* Get the current directory */
            IntPtr strPath = Marshal.StringToHGlobalAnsi(System.Environment.CurrentDirectory);

            m_filePath = Marshal.PtrToStringAnsi(strPath) + "\\Image";

            m_itApi.uiCamCount = 0;
            m_itApi.pstrConfigPath = strPath;

            TUCamAPI.TUCAM_Api_Init(ref m_itApi);

            Console.WriteLine("Connect {0} camera", m_itApi.uiCamCount);

            if (0 == m_itApi.uiCamCount)
            {
                return TUCAMRET.TUCAMRET_NO_CAMERA;
            }

            return TUCAMRET.TUCAMRET_SUCCESS;
        }

        private TUCAMRET UnInitApi()
        {
            return TUCamAPI.TUCAM_Api_Uninit();
        }

        private TUCAMRET OpenAllConnectedCameras()
        {
            // Get the camera name
            TUCAM_VALUE_INFO valueInfo;
            valueInfo.nID = (int)TUCAM_IDINFO.TUIDI_CAMERA_MODEL;
            valueInfo.nTextSize = 64;
            valueInfo.nValue = 0;
            valueInfo.pText = IntPtr.Zero;
            m_opCamList.Clear();
           
            for (uint i = 0; i < m_itApi.uiCamCount; ++i)
            {
                TUCAM_OPEN opCam;
                opCam.uiIdxOpen = i;
                opCam.hIdxTUCam = IntPtr.Zero;

                if (TUCAMRET.TUCAMRET_SUCCESS == TUCamAPI.TUCAM_Dev_Open(ref opCam))
                {
                    if (TUCAMRET.TUCAMRET_SUCCESS == TUCamAPI.TUCAM_Dev_GetInfo(opCam.hIdxTUCam, ref valueInfo))
                    {
                        //comboBoxName.Items.Add(Marshal.PtrToStringAnsi(valueInfo.pText) /*+ " " + labelValueSn.Text*/);
                    }

                    m_opCamList.Add(opCam);
                }
            }

            if (m_opCamList.Count > 0)
            {
                return TUCAMRET.TUCAMRET_SUCCESS;
            }

            return TUCAMRET.TUCAMRET_FAILOPEN_CAMERA;
        }

        private TUCAMRET CloseAllOpenedCameras()
        {
            for (int i = 0; i < m_opCamList.Count; ++i)
            {
                TUCamAPI.TUCAM_Dev_Close(m_opCamList[i].hIdxTUCam);
            }

            m_opCamList.Clear();

            return TUCAMRET.TUCAMRET_SUCCESS;
        }

        private void InitAllOpenedCamerasDrawing()
        {
            TUCAM_OPEN opCam;
            TUCAM_VALUE_INFO valueInfo;
            TUCAM_DRAW_INIT itDraw;
            itDraw.hWnd = drawingBox.Handle;
            itDraw.nMode = 0;

            valueInfo.nID = (int)TUCAM_IDINFO.TUIDI_CAMERA_CHANNELS;
            valueInfo.nValue = 1;
            valueInfo.nTextSize = 0;
            valueInfo.pText = IntPtr.Zero;

            m_itDrawList.Clear();

            for (int i = 0; i < m_opCamList.Count; ++i)
            {
                opCam = m_opCamList[i];

                valueInfo.nID = (int)TUCAM_IDINFO.TUIDI_CAMERA_CHANNELS;
                TUCamAPI.TUCAM_Dev_GetInfo(opCam.hIdxTUCam, ref valueInfo);
                itDraw.ucChannels = (byte)valueInfo.nValue;

                valueInfo.nID = (int)TUCAM_IDINFO.TUIDI_CURRENT_WIDTH;
                TUCamAPI.TUCAM_Dev_GetInfo(opCam.hIdxTUCam, ref valueInfo);
                itDraw.nWidth = valueInfo.nValue;

                valueInfo.nID = (int)TUCAM_IDINFO.TUIDI_CURRENT_HEIGHT;
                TUCamAPI.TUCAM_Dev_GetInfo(opCam.hIdxTUCam, ref valueInfo);
                itDraw.nHeight = valueInfo.nValue;

                TUCamAPI.TUCAM_Draw_Init(opCam.hIdxTUCam, itDraw);

                m_itDrawList.Add(itDraw);
            }
        }

        private void UnInitAllOpenedCamerasDrawing()
        {
            for (int i = 0; i < m_opCamList.Count; ++i)
            {
                TUCamAPI.TUCAM_Draw_Uninit(m_opCamList[i].hIdxTUCam);
            }

            m_itDrawList.Clear();
        }

        private void InitAllTimers()
        {
            readLocationTimer = new System.Timers.Timer(1000);
            readLocationTimer.Elapsed += new System.Timers.ElapsedEventHandler(OnTimerReadLocation);
            readLocationTimer.AutoReset = true;
            readLocationTimer.SynchronizingObject = this;
            readLocationTimer.Start();

            /* m_onceAeTimer = new System.Timers.Timer(1000);
             m_onceAeTimer.Elapsed += new System.Timers.ElapsedEventHandler(OnTimerOnceAutoExposure);
             m_onceAeTimer.AutoReset = false;
             m_onceAeTimer.SynchronizingObject = this;

             m_autoLeftLevelsTimer = new System.Timers.Timer(1000);
             m_autoLeftLevelsTimer.Elapsed += new System.Timers.ElapsedEventHandler(OnTimerAutoLeftLevels);
             m_autoLeftLevelsTimer.AutoReset = true;
             m_autoLeftLevelsTimer.SynchronizingObject = this;

             m_autoRightLevelsTimer = new System.Timers.Timer(1000);
             m_autoRightLevelsTimer.Elapsed += new System.Timers.ElapsedEventHandler(OnTimerAutoRightLevels);
             m_autoRightLevelsTimer.AutoReset = true;
             m_autoRightLevelsTimer.SynchronizingObject = this;*/

            /* m_awbTimer = new System.Timers.Timer(1000);
             m_awbTimer.Elapsed += new System.Timers.ElapsedEventHandler(OnTimerUpdateWhiteBalance);
             m_awbTimer.AutoReset = true;
             m_awbTimer.SynchronizingObject = this;

             m_onceWbTimer = new System.Timers.Timer(1000);
             m_onceWbTimer.Elapsed += new System.Timers.ElapsedEventHandler(OnTimerUpdateWhiteBalance);
             m_onceWbTimer.AutoReset = false;
             m_onceWbTimer.SynchronizingObject = this;*/
        }

        private void OnTimerReadLocation(object sender, ElapsedEventArgs e)
        {
            this.readXYZ();
        }

        private void WaitForFrameThreadEntity()
        {
            int clientWinWidth = 0;
            int clientWinHeight = 0;

            int drawingWidth = 0;
            int drawingHeight = 0;
            int drawingOffsetX = 0;
            int drawingOffsetY = 0;

            UInt64 intervalTime = 0;
            IntPtr hIdxTUCam = m_opCamList[m_indexCam].hIdxTUCam;

            while (m_isWaiting)
            {
                // Calculate the frame speed
                m_drawingFramesCnt++;
                TimeSpan stopTime = new TimeSpan(DateTime.Now.Ticks);
                intervalTime = (UInt64)m_startTime.Subtract(stopTime).Duration().TotalMilliseconds;

                if (intervalTime > 1000)
                {
                    m_fps = m_drawingFramesCnt * 1000.0f / intervalTime;

                    //UpdateFpsInvok updateFps = new UpdateFpsInvok(UpdateFps);
                    //BeginInvoke(updateFps, new object[] { m_fps.ToString("#0.0 fps") });

                    m_startTime = stopTime;
                    m_drawingFramesCnt = 0;
                }

                m_frame.ucFormatGet = (byte)TUFRM_FORMATS.TUFRM_FMT_USUAl;
                if (TUCAMRET.TUCAMRET_SUCCESS == TUCamAPI.TUCAM_Buf_WaitForFrame(hIdxTUCam, ref m_frame))
                {
                    // Change drawing area
                    if (drawingBox.Width != m_frame.usWidth || drawingBox.Height != m_frame.usHeight || m_clientWinWidth != clientWinWidth || m_clientWinHeight != clientWinHeight)
                    {
                        clientWinWidth = drawingBox.Width;
                        clientWinHeight = drawingBox.Height;

                        float scaleX = clientWinWidth * 1.0f / m_frame.usWidth;
                        float scaleY = clientWinHeight * 1.0f / m_frame.usHeight;
                        float scale = scaleX > scaleY ? scaleY : scaleX;
                        scale = (float)((int)(scale * 100) / 100.0f);

                        if (scale < 1)
                        {
                            drawingWidth = (int)(scale * m_frame.usWidth);
                            drawingHeight = (int)(scale * m_frame.usHeight);
                        }
                        else
                        {
                            drawingWidth = m_frame.usWidth;
                            drawingHeight = m_frame.usHeight;
                        }

                        drawingWidth = (drawingWidth >> 2) << 2;
                        drawingHeight = (drawingHeight >> 2) << 2;

                        drawingOffsetX = (clientWinWidth - drawingWidth) / 2;
                        drawingOffsetY = (clientWinHeight - drawingHeight) / 2;
                    }

                    // Drawing image
                    if (IntPtr.Zero != m_frame.pBuffer)
                    {
                        m_drawing.pFrame = Marshal.AllocHGlobal(Marshal.SizeOf(m_frame));
                        Marshal.StructureToPtr(m_frame, m_drawing.pFrame, true);

                        m_drawing.nDstX = drawingOffsetX;
                        m_drawing.nDstY = drawingOffsetY;
                        m_drawing.nDstWidth = drawingWidth;
                        m_drawing.nDstHeight = drawingHeight;

                        m_drawing.nSrcX = 0;
                        m_drawing.nSrcY = 0;
                        m_drawing.nSrcWidth = m_frame.usWidth;
                        m_drawing.nSrcHeight = m_frame.usHeight;

                        TUCamAPI.TUCAM_Draw_Frame(hIdxTUCam, ref m_drawing);
                    }

                    // Saving video
                    if (m_isRecording)
                    {
                        TUCamAPI.TUCAM_Rec_AppendFrame(hIdxTUCam, ref m_frame);
                    }

                    // Saving image
                    if (m_isSaving || m_isTrigger)
                    {
                        string fileName = m_filePath + "\\" + m_imageName + "_" + m_captureTimes + "_" + m_savedFramesCnt++;

                        bool isSucceed = false;

                        do
                        {
                            int fileFormat = m_savedFormat;
                            TUCAM_FILE_SAVE fileSave;

                            fileSave.pstrSavePath = Marshal.StringToHGlobalAnsi(fileName);

                            // struct to IntPtr
                            fileSave.pFrame = Marshal.AllocHGlobal(Marshal.SizeOf(m_frame));
                            Marshal.StructureToPtr(m_frame, fileSave.pFrame, true);

                            // Format RAW
                            if (0 != (fileFormat & (int)TUIMG_FORMATS.TUFMT_RAW))
                            {
                                fileFormat &= ~(int)TUIMG_FORMATS.TUFMT_RAW;
                            }

                            if (0 != fileFormat)
                            {
                                fileSave.nSaveFmt = fileFormat;

                                // Save other format data(TIFF/BMP/PNG/JPEG)
                                if (TUCAMRET.TUCAMRET_SUCCESS != TUCamAPI.TUCAM_File_SaveImage(hIdxTUCam, fileSave))
                                {
                                    break;
                                }
                            }

                            if (0 != (m_savedFormat & (int)TUIMG_FORMATS.TUFMT_RAW))
                            {
                                fileSave.nSaveFmt = (int)TUIMG_FORMATS.TUFMT_RAW;

                                // Get RAW data
                                m_frame.ucFormatGet = (int)TUFRM_FORMATS.TUFRM_FMT_RAW;
                                if (TUCAMRET.TUCAMRET_SUCCESS != TUCamAPI.TUCAM_Buf_CopyFrame(hIdxTUCam, ref m_frame))
                                {
                                    break;
                                }

                                // Save RAW data
                                if (TUCAMRET.TUCAMRET_SUCCESS != TUCamAPI.TUCAM_File_SaveImage(hIdxTUCam, fileSave))
                                {
                                    break;
                                }
                            }
                            /*BaiDuOcr baiDuOcr = new BaiDuOcr();
                            BaiduReturn.Text=baiDuOcr.GeneralBasicDemo(fileName+".jpg");
                            Client client=AlibabaOCR.CreateClient();
                            Stream bodySyream = AlibabaCloud.DarabonbaStream.StreamUtil.ReadFromFilePath(fileName + ".jpg");
                            AlibabaCloud.SDK.Ocr_api20210707.Models.RecognizeAdvancedRequest recognizeAdvancedRequest = new AlibabaCloud.SDK.Ocr_api20210707.Models.RecognizeAdvancedRequest
                            {
                                Body = bodySyream,
                                OutputFigure = true,
                                Row = true,
                                NeedSortPage = true,
                                NeedRotate = true,
                                OutputCharInfo = true,
                            };
                            AlibabaCloud.SDK.Ocr_api20210707.Models.RecognizeAdvancedResponse resp = client.RecognizeAdvanced(recognizeAdvancedRequest);
                            JObject jo = JObject.Parse(AlibabaCloud.TeaUtil.Common.ToJSONString(resp));
                            JObject dataJo=  JObject.Parse(jo.GetValue("Body")["Data"].ToString());
                            AlibabaCloud.TeaConsole.Client.Log(jo.GetValue("Body")["Data"].ToString());
                            //aireturn.Text = AlibabaCloud.TeaUtil.Common.ToJSONString(dataJo.GetValue("content"));
                            aireturn.Text = dataJo.GetValue("content").ToString();
                            AlibabaCloud.TeaConsole.Client.Log(AlibabaCloud.TeaUtil.Common.ToJSONString(resp));
                            TencentOCR tencentOCR = new TencentOCR();
                            TencentReturn.Text=tencentOCR.GetTencentOCR(fileName + ".jpg");*/
                            isSucceed = true;

                        } while (false);

                        if (isSucceed)
                        {
                            m_savedSucceedCnt++;

                            // capture finished
                            if (m_savedSucceedCnt >= m_totalFrames)
                            {
                                m_isSaving = false;

                                FinishedCaptureInvok finished = new FinishedCaptureInvok(FinishedCapture);
                                BeginInvoke(finished, new object[] { m_savedSucceedCnt });
                            }
                        }
                    }
                }
            }
        }

        private void StartWaitForFrame(TUCAM_OPEN opCam)
        {
            if (m_isWaiting)
                return;

            if (m_opCamList.Count > 0)
            {
                m_isWaiting = true;
                m_waitingThread = new Thread(new ThreadStart(WaitForFrameThreadEntity));

                m_frame.pBuffer = IntPtr.Zero;
                m_frame.ucFormatGet = (byte)TUFRM_FORMATS.TUFRM_FMT_USUAl;
                m_frame.uiRsdSize = 1;

                TUCamAPI.TUCAM_Buf_Alloc(opCam.hIdxTUCam, ref m_frame);                           // Alloc buffer after set resolution or set ROI attribute                                           
                TUCamAPI.TUCAM_Cap_Start(opCam.hIdxTUCam, (uint)m_triggerAttr.nTgrMode);          // Start capture

                m_waitingThread.Start();
            }
        }

        private void StopWaitForFrame(TUCAM_OPEN opCam)
        {
            if (!m_isWaiting)
                return;

            if (m_opCamList.Count > 0)
            {
                m_isSaving = false;
                m_isWaiting = false;
                m_drawingFramesCnt = 0;
                m_startTime = new TimeSpan(DateTime.Now.Ticks);

                TUCamAPI.TUCAM_Buf_AbortWait(opCam.hIdxTUCam);
                m_waitingThread.Join();
                m_waitingThread.Abort();

                TUCamAPI.TUCAM_Cap_Stop(opCam.hIdxTUCam);                  // Stop capture   
                TUCamAPI.TUCAM_Buf_Release(opCam.hIdxTUCam);               // Release alloc buffer after stop capture and quit drawing thread
            }
        }

        private void UpdateInformation(TUCAM_OPEN opCam)
        {
           
        }

        private void InitControlRange(TUCAM_OPEN opCam)
        {
            if (IntPtr.Zero == opCam.hIdxTUCam)
                return;

            //图像水平翻转
            TUCamAPI.TUCAM_Capa_SetValue(m_opCamList[m_indexCam].hIdxTUCam, (int)TUCAM_IDCAPA.TUIDC_HORIZONTAL, 1);
            //图像垂直翻转
            TUCamAPI.TUCAM_Capa_SetValue(m_opCamList[m_indexCam].hIdxTUCam, (int)TUCAM_IDCAPA.TUIDC_VERTICAL, 1);

            //曝光
            TUCamAPI.TUCAM_Capa_SetValue(m_opCamList[m_indexCam].hIdxTUCam, (int)TUCAM_IDCAPA.TUIDC_ATEXPOSURE, 0);//关闭自动曝光
            TUCamAPI.TUCAM_Prop_SetValue(m_opCamList[m_indexCam].hIdxTUCam, (int)TUCAM_IDPROP.TUIDP_EXPOSURETM, (double)(Config.CAMERA_EXPOSURE / 1000.0f), 0);//曝光值

            /*TUCAM_VALUE_TEXT valueText;
            valueText.nTextSize = 64;
            string textStr = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
            valueText.pText = Marshal.StringToHGlobalAnsi(textStr);*/
        }

        private int[] readXYZ() {
            try
            {
                Console.WriteLine("readXYZ");
                int[] xyz = deviceHandler.readCurrentXYZ();// new int[] { 200, 300, 500 };// 
                lblCurrentXYZ.Text = String.Format("x {0},y {1}, z {2}", xyz[0], xyz[1], xyz[2]);
                Console.WriteLine("读取到当前坐标：", lblCurrentXYZ.Text);
                return xyz;
            }
            catch (Exception ex) {
                AutoCloseMessageBox.Show(ex.Message);
                return null;
            }
        }

        private void btnSure_Click(object sender, EventArgs e)
        {
            if (m_opCamList.Count > 0)
            {
                StopWaitForFrame(m_opCamList[m_indexCam]);
            }

            int[] xyz = this.readXYZ();
            if (xyz != null && xyz.Length >= 2)
            {
                this.fixZeroLocation = new Rectangle()
                {
                    X = xyz[0],
                    Y = xyz[1],
                };
                this.DialogResult = DialogResult.OK;
                this.Close();
            }
            else {
                MessageBox.Show("没有读取到原点坐标，请联系管理员");
            }
        }

        private void btnBack_Click(object sender, EventArgs e)
        {
            if (m_opCamList.Count>0) {
                StopWaitForFrame(m_opCamList[m_indexCam]);
            }
            this.Close();
        }

        private void lblCurrentXYZ_Click(object sender, EventArgs e)
        {

        }

        private void drawingBox_Paint(object sender, PaintEventArgs e)
        {
            PictureBox p = (PictureBox)sender;
            Pen pp = new Pen(Color.FromArgb(255, 0, 77, 203));
            e.Graphics.DrawRectangle(pp, e.ClipRectangle.X, e.ClipRectangle.Y, e.ClipRectangle.X + e.ClipRectangle.Width - 1, e.ClipRectangle.Y + e.ClipRectangle.Height - 1);
        }
    }
}

