/**
	Copyright (C) 2009,2010  Tobias Domhan

    This file is part of AndOpenGLCam.

    AndObjViewer is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    AndObjViewer is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with AndObjViewer.  If not, see <http://www.gnu.org/licenses/>.
 
 */
package com.colladaviewer.android.camera;

import java.util.Iterator;
import java.util.List;
import java.util.logging.Logger;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;


/**
 * Handles callbacks of the camera preview
 * camera preview demo:
 * http://developer.android.com/guide/samples/ApiDemos/src/com/example/android/apis/graphics/CameraPreview.html
 * YCbCr 420 colorspace infos:
	 * http://wiki.multimedia.cx/index.php?title=YCbCr_4:2:0
	 * http://de.wikipedia.org/wiki/YCbCr-Farbmodell
	 * http://www.elektroniknet.de/home/bauelemente/embedded-video/grundlagen-der-videotechnik-ii-farbraum-gammakorrektur-digitale-video-signale/4/
 * @see android.hardware.Camera.PreviewCallback
 * @author Tobias Domhan
 *
 */
public class CameraHandler implements PreviewCallback {
	
    private final static Logger logger = Logger.getLogger(CameraHandler.class.getName());
    
    //private GLSurfaceView glSurfaceView;
	private PreviewFrameSink frameSink;
	
	private CameraConstFPS constFPS = null;	
	private AutoFocusHandler focusHandler = null;
	private int previewFrameWidth=240;
	private int previewFrameHeight=160;
	private byte[] buffer;
	
	//Modes:
	public final static int MODE_RGB=0;
	public final static int MODE_GRAY=1;
	private int mode = MODE_GRAY;
	private Object modeLock = new Object();
	//private ARToolkit markerInfo;
	private ConversionWorker convWorker;
	private Camera cam;
	private CameraStatus camStatus;
	private boolean threadsRunning = true;
	
	
	
	public CameraHandler(PreviewFrameSink sink, CameraStatus camStatus) 
	{

		this.frameSink = sink;
		convWorker = new ConversionWorker(sink);
		this.camStatus = camStatus;
	}
	



	/**
	 * Returns the best pixel format of the list or -1 if none suites.
	 * @param listOfFormats
	 * @return
	 */
	public static int getBestSupportedFormat(List<Integer> listOfFormats) {
		int format = -1;
		for (Iterator<Integer> iterator = listOfFormats.iterator(); iterator.hasNext();) {
			Integer integer =  iterator.next();
			if(integer.intValue() == PixelFormat.YCbCr_420_SP) {
				//alright the optimal format is supported..let's return
				format = PixelFormat.YCbCr_420_SP;
				return format;
			} else if(integer.intValue() == PixelFormat.YCbCr_422_SP) {
				format = PixelFormat.YCbCr_422_SP;
				//this format is not optimal. do not return, a better format might be in the list.
			}
		}
		return format;
	}
	
	/**
	 * the size of the camera preview frame is dynamic
	 * we will calculate the next power of two texture size
	 * in which the preview frame will fit
	 * and set the corresponding size in the renderer
	 * how to decode camera YUV to RGB for opengl:
	 * http://groups.google.de/group/android-developers/browse_thread/thread/c85e829ab209ceea/d3b29d3ddc8abf9b?lnk=gst&q=YUV+420#d3b29d3ddc8abf9b
	 * @param camera
	 */
	protected void init(Camera camera)  {
	          
	    cam = camera;
	    camera.setDisplayOrientation(0);
	    
	    Parameters camParams = camera.getParameters();        
		
		int bitsPerPixel = ImageFormat.getBitsPerPixel(camParams.getPreviewFormat());
		
	      //get width/height of the camera
        Size previewSize = camParams.getPreviewSize();
			
		// Allocate a few buffers for frame callback
		buffer = new byte[(previewSize.width * previewSize.height * bitsPerPixel) / 8];
		camera.addCallbackBuffer(buffer);
        //buffer = new byte[(previewSize.width * previewSize.height * bitsPerPixel) / 8];
        //camera.addCallbackBuffer(buffer);
        //buffer = new byte[(previewSize.width * previewSize.height * bitsPerPixel) / 8];
        //camera.addCallbackBuffer(buffer);
		
		camera.setPreviewCallbackWithBuffer(this);
		
		
		threadsRunning = true;
		/*
		if(Config.USE_ONE_SHOT_PREVIEW) {
			constFPS  = new CameraConstFPS(4, camera);
			constFPS.start();
		}	
		if(focusHandler == null) {
			focusHandler = new AutoFocusHandler(camera);
			focusHandler.start();
			//markerInfo.setVisListener(focusHandler);
		}
		*/
	}

	//size of a texture must be a power of 2
	//private byte[] frame;
	
	/**
	 * new frame from the camera arrived. convert and hand over
	 * to the renderer
	 * how to convert between YUV and RGB:http://en.wikipedia.org/wiki/YUV#Y.27UV444
	 * Conversion in C-Code(Android Project):
	 * http://www.netmite.com/android/mydroid/donut/development/tools/yuv420sp2rgb/yuv420sp2rgb.c
	 * http://code.google.com/p/android/issues/detail?id=823
	 * @see android.hardware.Camera.PreviewCallback#onPreviewFrame(byte[], android.hardware.Camera)
	 */
	public synchronized void onPreviewFrame(byte[] data, Camera camera) {
			//prevent null pointer exceptions
			if (data == null)
				return;
			convWorker.nextFrame(data);			
			//camera.addCallbackBuffer(data);
	}
	
/*	 public static void setCameraDisplayOrientation(Activity activity,
	         int cameraId, android.hardware.Camera camera) {
	     android.hardware.Camera.CameraInfo info =
	             new android.hardware.Camera.CameraInfo();
	     android.hardware.Camera.getCameraInfo(cameraId, info);
	     int rotation = activity.getWindowManager().getDefaultDisplay()
	             .getRotation();
	     int degrees = 0;
	     switch (rotation) {
	         case Surface.ROTATION_0: degrees = 0; break;
	         case Surface.ROTATION_90: degrees = 90; break;
	         case Surface.ROTATION_180: degrees = 180; break;
	         case Surface.ROTATION_270: degrees = 270; break;
	     }

	     int result;
	     if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
	         result = (info.orientation + degrees) % 360;
	         result = (360 - result) % 360;  // compensate the mirror
	     } else {  // back-facing
	         result = (info.orientation - degrees + 360) % 360;
	     }
	     camera.setDisplayOrientation(result);
	 }

	*/
	
	protected void setMode(int pMode) 
	{
		synchronized (modeLock) 
		{			
			this.mode = pMode;		
		}		
	}
	
	/**
	 * A worker thread that does colorspace conversion in the background.
	 * Need so that we can throw frames away if we can't handle the throughput.
	 * Otherwise the more and more frames would be enqueued, if the conversion did take
	 * too long.
	 * @author Tobias Domhan
	 *
	 */
	class ConversionWorker extends Thread {
		private byte[] curFrame;
		private boolean newFrame = false;
		private PreviewFrameSink frameSink;
		
		/**
		 * 
		 */
		public ConversionWorker(PreviewFrameSink frameSink) {
			setDaemon(true);
			this.frameSink = frameSink;
			start();
		}
		
		/* (non-Javadoc)
		 * @see java.lang.Thread#run()
		 */
		@Override
		public synchronized void run() {	
			setName("ConversionWorker");
	        
			while(true) 
			{
				while(!newFrame) 
				{
					//protect against spurious wakeups
					try {
						wait();//wait for next frame
					} catch (InterruptedException e) {}
				}
				newFrame = false;
				
				frameSink.setNextFrame(curFrame);
				
		
				yield();
			}
			
		}
		
		final void nextFrame(byte[] frame) {
			if(this.getState() == Thread.State.WAITING) {
				//ok, we are ready for a new frame:
				curFrame = frame;
				newFrame = true;
				//do the work:
				synchronized (this) {
					this.notify();
				}				
			} else {
				//ignore it
			}
		}
	}
	
	/**
	 * ensures a constant minimum frame rate.
	 * @author tobi
	 *
	 */
	class CameraConstFPS extends Thread {
		
		private long waitTime;
		private Camera cam;
		
		public CameraConstFPS(int fps, Camera cam) {
			waitTime = (long)(1.0/fps*1000);
			this.cam = cam;
		}
		
		@Override
		public synchronized void run() {
			super.run();
			setName("CameraConstFPS");
			while(threadsRunning) {
				try {
					wait(waitTime);
				} catch (InterruptedException e) {}
				if(camStatus.previewing) {
					try {
						cam.setOneShotPreviewCallback(CameraHandler.this);
					} catch(RuntimeException ex) {
						//
					}
				}
			}			
		}
	}

	class AutoFocusHandler extends Thread implements AutoFocusCallback {
		
		private Camera camera;
		private long lastScan;
		private static final int MIN_TIME = 1500;
		private static final int ENSURE_TIME = 10000;
		
		private boolean visible = false;
		
		public AutoFocusHandler(Camera camera) {
			this.camera = camera;
		}
		
		@Override
		public synchronized void run() {
			super.run();
			setName("Autofocus handler");
			//do an initial auto focus
			if(camStatus.previewing) {
				camera.autoFocus(this);
				lastScan = System.currentTimeMillis();
			}
			while(threadsRunning) {
				try {
					wait(ENSURE_TIME);
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
				long currTime = System.currentTimeMillis();
				//if at least ENSURE_TIME has passed since the last scan
				//and no marker is visible, do an scan
				if(!visible && (currTime-lastScan)>ENSURE_TIME) {
					if(camStatus.previewing)  {
						camera.autoFocus(this);
						lastScan = currTime;
					}
				}
				yield();
			}
		}

	
		public void onAutoFocus(boolean arg0, Camera arg1) {
			
		}

	
		public void makerVisibilityChanged(boolean visible) {
			this.visible = visible;
			if(!visible) {
				long currTime = System.currentTimeMillis();
				if((currTime-lastScan)>MIN_TIME) {
					if(camStatus.previewing) {
						camera.autoFocus(this);
						lastScan = currTime;
					}
				}	
			}
		}
	}
	
	public void stopThreads() {
		threadsRunning = false;
		if(constFPS!= null)
			constFPS.interrupt();
		if(focusHandler!= null)
			focusHandler.interrupt();
	}

}
