/*
| ==============================================================================
| Copyright (C) 2007 Prosilica.  All Rights Reserved.
|
| Redistribution of this header file, in original or modified form, without
| prior written consent of Prosilica is prohibited.
|
|==============================================================================
|
| This sample code demonstrate how to capture and save to disk a frame using .NET
| and C#
|
|==============================================================================
|
| THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
| WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE,
| NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR  PURPOSE ARE
| DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
| INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
| LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
| OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED  AND ON ANY THEORY OF
| LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
| NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
| EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|==============================================================================
*/

using System;
using System.Text;
using System.Threading;
using System.Runtime.InteropServices;
using System.Drawing;
using System.Drawing.Imaging;

using PvNET;

namespace Snap
{
	/// <summary>
	/// Summary description for Class1.
	/// </summary>
	class Class1
	{
		// place holder for all the data we're using
		private struct tCamera 
		{
			public UInt32	UID;
			public UInt32	Handle;
			public tFrame	Frame;
			public GCHandle GC;
			public byte[]	Buffer;
		};

		static void YUV2RGB(int y,int u,int v,ref int r,ref int g,ref int b)
		{
			// u and v are +-0.5
			u -= 128;
			v -= 128;

			// Conversion (clamped to 0..255)
			r = Math.Min(Math.Max(0,(int)(y + 1.370705 * (float)v)),255);
			g = Math.Min(Math.Max(0,(int)(y - 0.698001 * (float)v - 0.337633 * (float)u)),255);
			b = Math.Min(Math.Max(0,(int)(y + 1.732446 * (float)u)),255);
		}

		// frame callback dummy 
		static void FrameDummyCB(IntPtr pFrame)
		{
		}

		// wait for a camera to be detected
		static void WaitForCamera()
		{
			System.Console.WriteLine("waiting for a camera ");
			while(Pv.CameraCount() == 0)
			{
				System.Console.Write(".");
				Thread.Sleep(250);
			}
			System.Console.WriteLine("");			
		}

		// get the UID of the first camera listed
		static bool CameraGet(ref tCamera Camera)
		{
			UInt32 count,connected = 0;
			tCameraInfo[] list = new tCameraInfo[1];

			if((count = Pv.CameraList(list,1,ref connected)) != 0)
			{
				Camera.UID = list[0].UniqueId;
				return true;
			}
			else
				return false;
		}

		// open the camera
		static bool CameraOpen(ref tCamera Camera)
		{
			return Pv.CameraOpen(Camera.UID,tAccessFlags.eAccessMaster,out Camera.Handle) == 0;
		}

		// close the camera
		static void CameraClose(ref tCamera Camera)
		{
			// reset the trigger mode
			Pv.AttrEnumSet(Camera.Handle,"FrameStartTriggerMode","Freerun");
			// close the camera
			Pv.CameraClose(Camera.Handle);
			// delete the allocated buffer
			Camera.GC.Free();
			// reset the handle
			Camera.Handle = 0;
		}

        // change the camera gain value (using register access)
        static void CameraChangeGainBy(ref tCamera Camera, UInt32 Value)
        {
            UInt32 Done = 0;
            UInt32[] Data = new UInt32[1];
            UInt32[] Address = new UInt32[1];

            Address[0]  = 0x14150;
            Data[0]     = 0;

            if (Pv.RegisterRead(Camera.Handle, 1, Address, Data, ref Done) == 0)
                System.Console.WriteLine("value of register {0} is {1} {2}", Address[0], Data[0], Done);
            else
                System.Console.WriteLine("Failed to read register {0}", Address[0]);

            Data[0] += Value;

            if (Pv.RegisterWrite(Camera.Handle, 1, Address, Data, ref Done) == 0)
                System.Console.WriteLine("register {0} was written to {1} {2}", Address[0], Data[0], Done);
            else
                System.Console.WriteLine("Failed to write register {0}", Address[0]);

        }

		// setup the camera for capture
		static bool CameraSetup(ref tCamera Camera)
		{
			UInt32 FrameSize = 0;

			// get the bytes size of the buffer we beed to allocated
			if(Pv.AttrUint32Get(Camera.Handle,"TotalBytesPerFrame",ref FrameSize) == 0)
			{
				Camera.Buffer = new byte[FrameSize];
				Camera.GC     = GCHandle.Alloc(Camera.Buffer,GCHandleType.Pinned);

				Camera.Frame.ImageBuffer     = Camera.GC.AddrOfPinnedObject();  // @ of the pinned buffer
				Camera.Frame.ImageBufferSize = FrameSize;						// buffer size

				System.Console.WriteLine("Frame size = {0} bytes",FrameSize);

				// start the capture mode
				if(Pv.CaptureStart(Camera.Handle) == 0)
				{
					// set the camera in software acquisition mode
					if(Pv.AttrEnumSet(Camera.Handle,"FrameStartTriggerMode","Software") == 0)
					{			
						// and set the acquisition mode into continuous
						if(Pv.CommandRun(Camera.Handle,"AcquisitionStart") != 0)
						{
							// if that fail, we reset the camera to non capture mode
							Pv.CaptureEnd(Camera.Handle);
							return false;
						}
						else
							return true;
					}
					else
						return false;
				}
				else
					return true;
			}
			else
				return false;
		}

		// snap a frame from the camera
		static bool CameraSnap(ref tCamera Camera)
		{
			bool result;
			GCHandle pFrame = GCHandle.Alloc(Camera.Frame,GCHandleType.Pinned);
			tFrameCallback FrameCB = new tFrameCallback(FrameDummyCB);
			
			// queue the frame
			if(Pv.CaptureQueueFrame(Camera.Handle,pFrame.AddrOfPinnedObject(),FrameCB) == 0)
			{	
				// trigger the capture
				if(Pv.CommandRun(Camera.Handle,"FrameStartTriggerSoftware") == 0)
				{
					// then wait for the frame to be returned
					if(Pv.CaptureWaitForFrameDone(Camera.Handle,pFrame.AddrOfPinnedObject(),0) == 0)
					{
						// copy the frame structure back in the frame we have
						Camera.Frame = (tFrame)Marshal.PtrToStructure(pFrame.AddrOfPinnedObject(),typeof(tFrame));
	
						// check the status flag
						if(!(result = (Camera.Frame.Status == tErr.eErrSuccess)))
							System.Console.WriteLine("frame captured un-succesfully {0}",Camera.Frame.Status);
					}
					else
					{
						Pv.CaptureQueueClear(Camera.Handle);
						result = false;
					}
				}
				else
				{
					Pv.CaptureQueueClear(Camera.Handle);
					result = false;
				}
			}
			else
				result = false;

			pFrame.Free();

			return result;
		}

		// convert the raw data in the frame's buffer into the bitmap's data, this method doesn't support 
		// the following Pixel format: eFmtRgb48, eFmtYuv411 and eFmtYuv444
		static unsafe bool Frame2Data(ref tCamera Camera,ref BitmapData Data)
		{
			switch(Camera.Frame.Format)
			{
				case tImageFormat.eFmtMono8:
				{
					UInt32 lOffset = 0;
					UInt32 lPos = 0;
					byte* lDst = (byte *)Data.Scan0;
					
					while(lOffset<Camera.Frame.ImageBufferSize)
					{
						lDst[lPos]   = Camera.Buffer[lOffset];
						lDst[lPos+1] = Camera.Buffer[lOffset];
						lDst[lPos+2] = Camera.Buffer[lOffset];

						lOffset++;
						lPos += 3;

						// take care of the padding in the destination bitmap
						if((lOffset % Camera.Frame.Width) == 0)
							lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
					}

					return true;
				}
				case tImageFormat.eFmtMono16:
				{
					UInt32 lOffset = 0;
					UInt32 lPos = 0;
					byte* lDst = (byte *)Data.Scan0;
					byte bitshift = (byte)((int)Camera.Frame.BitDepth - 8);
					UInt16* lSrc = (UInt16*)Camera.Frame.ImageBuffer;
					
					while(lOffset<Camera.Frame.Width * Camera.Frame.Height)
					{
						lDst[lPos]   = (byte)(lSrc[lOffset] >> bitshift);
						lDst[lPos+1] = lDst[lPos];
						lDst[lPos+2] = lDst[lPos];

						lOffset++;
						lPos += 3;

						// take care of the padding in the destination bitmap
						if((lOffset % Camera.Frame.Width) == 0)
							lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
					}

					return true;
				}
				case tImageFormat.eFmtBayer8:
				{
					UInt32 WidthSize = Camera.Frame.Width * 3;
					GCHandle pFrame = GCHandle.Alloc(Camera.Frame,GCHandleType.Pinned);
					UInt32 remainder = (((WidthSize + 3U) & ~3U) - WidthSize);
								
					// interpolate the colors
					IntPtr pRed   = (IntPtr)((byte *)Data.Scan0 + 2);
					IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1);
					IntPtr pBlue  = (IntPtr)((byte *)Data.Scan0);
					Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(),pRed,pGreen,pBlue,2,remainder);

					pFrame.Free();

					return true;
				}
				case tImageFormat.eFmtBayer16:
				{
					UInt32 WidthSize = Camera.Frame.Width * 3;
					UInt32 lOffset   = 0;
					byte bitshift    = (byte)((int)Camera.Frame.BitDepth - 8);
					UInt16* lSrc     = (UInt16*)Camera.Frame.ImageBuffer;
					byte*   lDst     = (byte*)Camera.Frame.ImageBuffer;
					UInt32 remainder = (((WidthSize + 3U) & ~3U) - WidthSize);
					GCHandle pFrame;

					Camera.Frame.Format = tImageFormat.eFmtBayer8;

					pFrame = GCHandle.Alloc(Camera.Frame,GCHandleType.Pinned);
					
					// shift the pixel
					while(lOffset<Camera.Frame.Width * Camera.Frame.Height)
						lDst[lOffset] = (byte)(lSrc[lOffset++] >> bitshift);

					// interpolate the colors
					IntPtr pRed   = (IntPtr)((byte *)Data.Scan0 + 2);
					IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1);
					IntPtr pBlue  = (IntPtr)((byte *)Data.Scan0);
					Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(),pRed,pGreen,pBlue,2,remainder);

					pFrame.Free();

					return true;
				}
				case tImageFormat.eFmtRgb24:
				{
					UInt32 lOffset = 0;
					UInt32 lPos = 0;
					byte* lDst = (byte *)Data.Scan0;
					
					while(lOffset<Camera.Frame.ImageBufferSize)
					{
						// copy the data
						lDst[lPos]   = Camera.Buffer[lOffset+2];
						lDst[lPos+1] = Camera.Buffer[lOffset+1];
						lDst[lPos+2] = Camera.Buffer[lOffset];

						lOffset+= 3;
						lPos   += 3;
						// take care of the padding in the destination bitmap
						if((lOffset % (Camera.Frame.Width * 3)) == 0)
							lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
					}
					
					return true;
				}
				case tImageFormat.eFmtRgb48:
				{
					UInt32 lOffset = 0;
					UInt32 lPos    = 0;
					UInt32 lLength = Camera.Frame.ImageBufferSize / sizeof(UInt16);
					UInt16* lSrc   = (UInt16*)Camera.Frame.ImageBuffer;
					byte* lDst     = (byte *)Data.Scan0;
					byte bitshift  = (byte)((int)Camera.Frame.BitDepth - 8);
					
					while(lOffset < lLength)
					{
						// copy the data
						lDst[lPos]   = (byte)(lSrc[lOffset+2] >> bitshift);
						lDst[lPos+1] = (byte)(lSrc[lOffset+1] >> bitshift);
						lDst[lPos+2] = (byte)(lSrc[lOffset] >> bitshift);

						lOffset+= 3;
						lPos   += 3;

						// take care of the padding in the destination bitmap
						if((lOffset % (Camera.Frame.Width * 3)) == 0)
							lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3);
					}

					return true;
				}
				case tImageFormat.eFmtYuv411:
				{
					UInt32 lOffset = 0;
					UInt32 lPos = 0;
					byte* lDst = (byte *)Data.Scan0;
					int y1,y2,y3,y4,u,v;
					int r,g,b;

					r = g = b = 0;

					while(lOffset<Camera.Frame.ImageBufferSize)
					{
						u  = Camera.Buffer[lOffset++];
						y1 = Camera.Buffer[lOffset++];
						y2 = Camera.Buffer[lOffset++];
						v  = Camera.Buffer[lOffset++];
						y3 = Camera.Buffer[lOffset++];
						y4 = Camera.Buffer[lOffset++]; 					
		
						YUV2RGB(y1,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;
						YUV2RGB(y2,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;
						YUV2RGB(y3,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;
						YUV2RGB(y4,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;						
					}

					return true;
				}
				case tImageFormat.eFmtYuv422:
				{
					UInt32 lOffset = 0;
					UInt32 lPos = 0;
					byte* lDst = (byte *)Data.Scan0;
					int y1,y2,u,v;
					int r,g,b;

					r = g = b = 0;

					while(lOffset<Camera.Frame.ImageBufferSize)
					{
						u  = Camera.Buffer[lOffset++];
						y1 = Camera.Buffer[lOffset++];
						v  = Camera.Buffer[lOffset++];
						y2 = Camera.Buffer[lOffset++];
						
						YUV2RGB(y1,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;
						YUV2RGB(y2,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;
					}

					return true;
				}
				case tImageFormat.eFmtYuv444:
				{
					UInt32 lOffset = 0;
					UInt32 lPos = 0;
					byte* lDst = (byte *)Data.Scan0;
					int y1,y2,u,v;
					int r,g,b;

					r = g = b = 0;

					while(lOffset<Camera.Frame.ImageBufferSize)
					{
						u  = Camera.Buffer[lOffset++];
						y1 = Camera.Buffer[lOffset++];
						v  = Camera.Buffer[lOffset++];
						lOffset++;
						y2 = Camera.Buffer[lOffset++];
						lOffset++;
						
						YUV2RGB(y1,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;
						YUV2RGB(y2,u,v,ref r,ref g,ref b);
						lDst[lPos++] = (byte)b;
						lDst[lPos++] = (byte)g;
						lDst[lPos++] = (byte)r;
					}

					return true;
				}
				default:
					return false;
			}
		}

		// save a frame to a file
		static bool FrameSave(ref tCamera Camera,string file)
		{
			Bitmap lBitmap = new Bitmap((int)Camera.Frame.Width,(int)Camera.Frame.Height,PixelFormat.Format24bppRgb);
			Rectangle lRect = new Rectangle(new Point(0,0),new Size((int)Camera.Frame.Width,(int)Camera.Frame.Height));
			BitmapData lData = lBitmap.LockBits(lRect,ImageLockMode.ReadWrite,PixelFormat.Format24bppRgb);
			
			if(Frame2Data(ref Camera,ref lData))
			{
				lBitmap.UnlockBits(lData);
				lBitmap.Save(file);
				return true;
			}
			else
			{
				lBitmap.UnlockBits(lData);
				return false;
			}
		}

		/// <summary>
		/// The main entry point for the application.
		/// </summary>
		[STAThread]
		static void Main(string[] args)
		{
			tErr err = 0;

			// initialize the API
			if((err = Pv.Initialize()) == 0)
			{
				tCamera Camera = new tCamera();

				WaitForCamera();
				if(CameraGet(ref Camera))
				{
					if(CameraOpen(ref Camera))
					{
						System.Console.WriteLine("camera {0} open",Camera.UID);	

						if(CameraSetup(ref Camera))
						{
							UInt32 Count = 0;
							String filename;

							System.Console.WriteLine("camera setup for acquisition ...");	

							while(Count < 10)
							{
								if(CameraSnap(ref Camera))
								{
									filename = "snap";
									filename += Count++;
									filename += ".bmp";

									if(FrameSave(ref Camera,filename) == false)
										System.Console.WriteLine("failed to save the image");
									else
										System.Console.WriteLine("frame saved in {0}",filename);									
								}
								else
									System.Console.WriteLine("failed to snap an image");

								Thread.Sleep(200);
							}
						}
						else
							System.Console.WriteLine("failed to setup the camera");

						CameraClose(ref Camera);
					}
					else
						System.Console.WriteLine("camera {0} failed to be open",Camera.UID);
				}
				else
					System.Console.WriteLine("failed to get a camera");

				Pv.UnInitialize();
			}
			else
			{
				System.Console.Write("failed to initialize the API : ");	
				System.Console.WriteLine(err);
			}

			Thread.Sleep(800);
		}
	}
}
