package de.fhd.pms;

import java.awt.*;
import java.awt.image.BufferedImage;
import java.applet.*;
import java.net.*;

import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;

import com.mathworks.toolbox.javabuilder.*;
import FaceDetection.FaceDetect;

public class JavaCamApp extends Applet implements Runnable
{
	private static final long serialVersionUID = 1L;
	boolean	boolean_Debug;			  // Debugging enabled/disabled
	Image	image_WebcamImage = null; // Image from web camera 
	Thread	thread_This;			  // Thread for refreshing image
	int		int_Interval;			  // Interval between image refreshes ...
	Image test1  = getFaceImage();	
	
	public static void main(String args[]){ // to run as normal java application
		System.out.println("IPCam");
		ImageIcon icon = new ImageIcon(getFaceImage());
        JLabel label =  new JLabel(icon);
        JFrame frame = new JFrame();
        frame.setSize(icon.getIconWidth(),icon.getIconHeight());
        frame.setContentPane(label);
        frame.setVisible(true); 
//	
		
	}
	
	public void init() // init method
	{
	    final String string_Url;
		URL		url_Webcam;
		//Image test1  = getFaceImage();
		
        // initialize trace , if selected 
		boolean_Debug = false;
		if (getParameter("Trace") != null)
		{
			boolean_Debug = true;
		}
		trace("Init called");

		
		
		string_Url = getParameter("URL");
		if (string_Url == null) {
			showStatus("No URL for Webcam image supplied");
			return;
		}
		
		// set interval for grabbing the image
		try {
			int_Interval = Integer.parseInt(getParameter("Interval"));
		} catch (Exception e) {
			int_Interval = 1; 
		}
		trace("Refresh interval=" + int_Interval);
		
		// get image from web cam
		try
		{
			url_Webcam = new URL(string_Url);
			trace("Target URL="+url_Webcam.toString());
			image_WebcamImage = getImage(url_Webcam);
			//image_WebcamImage = getFaceImage();
			
		    MediaTracker mediaTracker_track = new MediaTracker(this); // Monitoring of processing status
			mediaTracker_track.addImage(image_WebcamImage, 1);
			mediaTracker_track.waitForID(1);
		}
		catch(Exception e)
		{
			trace("Image get exception:"+e.toString());
			showStatus("Problem getting initial image: "+e.toString());
			return;
		}
		
		// Create and start the new thread initially suspended
		thread_This = new Thread(this);
		thread_This.suspend();
		thread_This.start();
	}

	@SuppressWarnings("deprecation")
	public void start() //start method
	{
		trace("Start called");
		thread_This.resume();
		//System.out.println("Start");
	}
	
	public void run() // run method
	{
		trace("run(): Thread started!");
		while (true)
		{
			try
			{
				// Put the refresh thread to sleep for the supplied interval
				Thread.sleep(int_Interval*2000); 
				// Flush the image in the image buffer so we get a new one
				image_WebcamImage.flush(); trace("Flushing image");

				// redraw the image if face detected ==============================================
	      
			      
			      
			      
				// call face detection algorithm
				
				
				
				// Track the image and wait till its loaded
				MediaTracker mediaTracker_track = new MediaTracker(this);
				mediaTracker_track.addImage(image_WebcamImage, 1);
				mediaTracker_track.waitForID(1);
				// Repaint the image
				repaint();
			}
			catch (Exception e)
			{
				trace("Caught:"+e.toString());
			}
		}
	}

	public void destroy() // destroy method 
	{
		trace("Applet.destroy() called");
		thread_This.stop();
	}
	
	public void stop() // stop method
	{
		trace("Stop called");
		thread_This.suspend();
	}
	
	public void trace(String message) // trace some information to console
	{
		if (boolean_Debug)
		{
			System.out.println(message);
		}
	}

	public void paint(Graphics g) // method to repaint the image
	{
		trace("paint()");
		if (image_WebcamImage != null)
		{
			g.drawImage(image_WebcamImage, 0,0, this);
		}
	}

	private static Image getFaceImage()
    {
        try
        {
            // Our deployment uses native resources and should be disposed of as soon as possible. 
            FaceDetect matlabModel = new FaceDetect();
            try
            {
                // If we had any inputs to our method they would be passed in here.  
                Object[] results = matlabModel.mainFace(1);
                // This array uses native resources and should be disposed of as soon as possible.
                MWArray mwArray = (MWArray)results[0];
                try {
                    // rendering array to image 
                    BufferedImage img = Images.renderArrayData((MWNumericArray)mwArray);
                    // BufferedImageOp op = new ColorConvertOp(ColorSpace.getInstance(ColorSpace.CS_GRAY), null); 
                    // return op.filter(img, null);
                    return img;
                }
                finally {
                    MWArray.disposeArray(mwArray);
                }
            }
            finally {
                matlabModel.dispose();
            }
        }
        catch(MWException mwe)
        {
            mwe.printStackTrace();
            return null;
        }
    }    


}


