﻿using System;
using MVSDK;
using CameraHandle = System.Int32;
using System.Runtime.InteropServices;
using System.Threading;
using System.Drawing;

namespace ContourSplit.Scaffold
{
	public class ReceivingArgs : EventArgs
	{
		public int channels;
		public Size size;
		public readonly IntPtr FrameBuffer;
		public ReceivingArgs(int width, int height, int channels, IntPtr FrameBuffer)
		{
			this.size = new Size(width, height);
			this.channels = channels;
			this.FrameBuffer = FrameBuffer;
		}
	}
	public sealed class MindVisionCamera : IDisposable
	{
		public delegate void ReceivingImageHandler(object sender, ReceivingArgs e);
		/// <summary>
		/// 该事件发生在子线程中，事件处理应避免直接操作UI。
		/// </summary>
		public event ReceivingImageHandler OnReiceivingImage;
		private tSdkCameraCapbility tCameraCapability;  // 相机特性描述
		private IntPtr m_ImageBuffer;             // 预览通道RGB图像缓存
		private CameraHandle m_hCamera = 0;
		private bool m_bExitCaptureThread = false;//采用线程采集时，让线程退出的标志
		private Thread m_tCaptureThread;          //图像抓取线程

		private void CaptureThreadProc()
		{
			CameraSdkStatus eStatus;
			while (m_bExitCaptureThread == false)
			{
				//rawbuffer由SDK内部申请。应用层不要调用delete之类的释放函数
				eStatus = MvApi.CameraGetImageBuffer(m_hCamera, out tSdkFrameHead FrameHead, out uint uRawBuffer, 500);
				if (eStatus == CameraSdkStatus.CAMERA_STATUS_SUCCESS)//如果是触发模式，则有可能超时
				{
					MvApi.CameraImageProcess(m_hCamera, uRawBuffer, m_ImageBuffer, ref FrameHead);
					MvApi.CameraReleaseImageBuffer(m_hCamera, uRawBuffer);
					int channel = FrameHead.uiMediaType == (uint)emImageFormat.CAMERA_MEDIA_TYPE_MONO ? 1 : 3;
					OnReiceivingImage(this, new ReceivingArgs(FrameHead.iWidth, FrameHead.iHeight, channel, m_ImageBuffer));
				}
			}
		}

		public MindVisionCamera(int id)
		{
			tSdkCameraDevInfo[] tCameraDevInfoList = new tSdkCameraDevInfo[12];
			IntPtr ptr = Marshal.AllocHGlobal(Marshal.SizeOf(new tSdkCameraDevInfo()) * 12);
			int iCameraCounts = 12;
			if (m_hCamera > 0)
				return;
			if (MvApi.CameraEnumerateDevice(ptr, ref iCameraCounts) == CameraSdkStatus.CAMERA_STATUS_SUCCESS)
			{
				for (int i = 0; i < 12; i++)
					tCameraDevInfoList[i] = (tSdkCameraDevInfo)Marshal.PtrToStructure((IntPtr)((int)ptr + i * Marshal.SizeOf(new tSdkCameraDevInfo())), typeof(tSdkCameraDevInfo));
				Marshal.FreeHGlobal(ptr);
				if (iCameraCounts >= id + 1)
				{
					if (MvApi.CameraInit(ref tCameraDevInfoList[id], -1, -1, ref m_hCamera) == CameraSdkStatus.CAMERA_STATUS_SUCCESS)
					{
						ptr = Marshal.AllocHGlobal(Marshal.SizeOf(new tSdkCameraCapbility()));
						MvApi.CameraGetCapability(m_hCamera, ptr);
						tCameraCapability = (tSdkCameraCapbility)Marshal.PtrToStructure(ptr, typeof(tSdkCameraCapbility));
						Marshal.FreeHGlobal(ptr);

						m_ImageBuffer = Marshal.AllocHGlobal(tCameraCapability.sResolutionRange.iWidthMax * tCameraCapability.sResolutionRange.iHeightMax * 3 + 1024);

						m_bExitCaptureThread = false;
						m_tCaptureThread = new Thread(new ThreadStart(CaptureThreadProc));
						m_tCaptureThread.Priority = ThreadPriority.Highest;
						m_tCaptureThread.Start();

						MvApi.CameraSetTriggerMode(m_hCamera, (int)emSdkSnapMode.EXTERNAL_TRIGGER);
						MvApi.CameraSetExtTrigSignalType(m_hCamera, (int)emExtTrigSignal.EXT_TRIG_TRAILING_EDGE);
						MvApi.CameraPause(m_hCamera);
					}
					else
						throw new Exception("Failed to Init Camera.");
				}
			}
			else
				throw new Exception("Failed to Get Devices.");
		}
		public void Play()
		{
			MvApi.CameraPlay(m_hCamera);
		}

		public void Stop()
		{
			MvApi.CameraStop(m_hCamera);
		}

		public void Dispose()
		{
			m_bExitCaptureThread = true;
			while (m_tCaptureThread != null && m_tCaptureThread.IsAlive)
			{
				Thread.Sleep(10);
			}

			MvApi.CameraUnInit(m_hCamera);
			Marshal.FreeHGlobal(m_ImageBuffer);
			m_hCamera = 0;
		}
		~MindVisionCamera()
		{
			Dispose();
		}
	}
}
