package actions;

import java.awt.Dimension;
import java.io.IOException;

import javax.media.CannotRealizeException;
import javax.media.CaptureDeviceInfo;
import javax.media.CaptureDeviceManager;
import javax.media.DataSink;
import javax.media.Format;
import javax.media.IncompatibleSourceException;
import javax.media.Manager;
import javax.media.MediaLocator;
import javax.media.NoDataSinkException;
import javax.media.NoDataSourceException;
import javax.media.NoProcessorException;
import javax.media.Processor;
import javax.media.ProcessorModel;
import javax.media.format.AudioFormat;
import javax.media.format.VideoFormat;
import javax.media.protocol.DataSource;
import javax.media.protocol.FileTypeDescriptor;

public class CameraOn implements Action {


	@Override
	public void setHandles(Object... hdls) {
		// TODO Auto-generated method stub
		
	}

	@Override
	public Object call() throws Exception {
		// TODO Auto-generated method stub
		return null;
	}
/*	private boolean debugDeviceList = false;

	private String defaultVideoDeviceName = "Microsoft WDM Image Capture (Win32):0";
	private String defaultAudioDeviceName = "DirectSoundCapture";
	private String defaultVideoFormatString = "size=640x480, encoding=yuv, maxdatalength=614400";
	private String defaultAudioFormatString = "linear, 16000.0 hz, 8-bit, mono, unsigned";

	private CaptureDeviceInfo captureVideoDevice = null;
	private CaptureDeviceInfo captureAudioDevice = null;
	private VideoFormat captureVideoFormat = null;
	private AudioFormat captureAudioFormat = null;

	public Object call() throws Exception {
		String rs = "success.";
		java.util.Vector<CaptureDeviceInfo> deviceListVector = CaptureDeviceManager
				.getDeviceList(null);

		if (deviceListVector == null || deviceListVector.size() == 0) {
			rs = "... error: media device list vector is null, program aborted";
			return rs;
		}

		for (int x = 0; x < deviceListVector.size(); x++) {

			// display device name
			CaptureDeviceInfo deviceInfo = (CaptureDeviceInfo) deviceListVector
					.elementAt(x);

			String deviceInfoText = deviceInfo.getName();

			// display device formats
			Format deviceFormat[] = deviceInfo.getFormats();
			for (int y = 0; y < deviceFormat.length; y++) {
				// serach for default video device
				if (captureVideoDevice == null)
					if (deviceFormat[y] instanceof VideoFormat)
						if (deviceInfo.getName()
								.indexOf(defaultVideoDeviceName) >= 0) {
							captureVideoDevice = deviceInfo;

						}

				// search for default video format
				if (captureVideoDevice == deviceInfo)
					if (captureVideoFormat == null)
						if (captureVideoFormat instanceof VideoFormat
								&& videoFormatToString(
										(VideoFormat) deviceFormat[y]).indexOf(
										defaultVideoFormatString) >= 0) {
							captureVideoFormat = (VideoFormat) deviceFormat[y];

						}

				// serach for default audio device
				if (captureAudioDevice == null)
					if (deviceFormat[y] instanceof AudioFormat)
						if (deviceInfo.getName()
								.indexOf(defaultAudioDeviceName) >= 0) {
							captureAudioDevice = deviceInfo;

						}

				// search for default audio format
				if (captureAudioDevice == deviceInfo)
					if (captureAudioFormat == null)
						if (DeviceInfo.formatToString(deviceFormat[y]).indexOf(
								defaultAudioFormatString) >= 0) {
							captureAudioFormat = (AudioFormat) deviceFormat[y];

						}

			}
		}

		if (captureAudioFormat == null || captureVideoFormat == null) {
			return "There is no avaliable format";
		}
		try {
			// setup video data source
			// -----------------------
			MediaLocator videoMediaLocator = captureVideoDevice.getLocator();
			DataSource videoDataSource = null;

			videoDataSource = javax.media.Manager
					.createDataSource(videoMediaLocator);

			if (!DeviceInfo.setFormat(videoDataSource, captureVideoFormat)) {
				return "Error: unable to set video format - program aborted";

			}

			// setup audio data source
			// -----------------------
			MediaLocator audioMediaLocator = captureAudioDevice.getLocator();
			DataSource audioDataSource = null;

			audioDataSource = javax.media.Manager
					.createDataSource(audioMediaLocator);

			if (!DeviceInfo.setFormat(audioDataSource, captureAudioFormat)) {
				return "Error: unable to set audio format - program aborted";
			}

			// merge the two data sources
			// --------------------------
			DataSource mixedDataSource = null;

			DataSource dArray[] = new DataSource[2];
			dArray[0] = videoDataSource;
			dArray[1] = audioDataSource;
			mixedDataSource = javax.media.Manager
					.createMergingDataSource(dArray);

			// create a new processor
			// ----------------------

			// setup output file format ->> msvideo
			FileTypeDescriptor outputType = new FileTypeDescriptor(
					FileTypeDescriptor.MSVIDEO);

			// setup output video and audio data format
			Format outputFormat[] = new Format[2];
			outputFormat[0] = new VideoFormat(VideoFormat.INDEO50);
			outputFormat[1] = new AudioFormat(AudioFormat.GSM_MS  LINEAR );

			// create processor
			ProcessorModel processorModel = new ProcessorModel(mixedDataSource,
					outputFormat, outputType);
			Processor processor = null;

			processor = Manager.createRealizedProcessor(processorModel);

			// get the output of the processor
			DataSource source = processor.getDataOutput();

			// create a File protocol MediaLocator with the location
			// of the file to which bits are to be written
			MediaLocator dest = new MediaLocator("file:testcam.avi");

			// create a datasink to do the file
			DataSink dataSink = null;
			MyDataSinkListener dataSinkListener = null;

			dataSink = Manager.createDataSink(source, dest);
			dataSinkListener = new MyDataSinkListener();
			dataSink.addDataSinkListener(dataSinkListener);
			dataSink.open();

			// now start the datasink and processor

			dataSink.start();

			processor.start();

			try {
				Thread.currentThread().sleep(10000);
			} catch (InterruptedException ie) {
			} // capture for 10 seconds

			// stop and close the processor when done capturing...
			// close the datasink when EndOfStream event is received...
			processor.stop();
			processor.close();

			dataSinkListener.waitEndOfStream(10);
			dataSink.close();

		} catch (Exception e) {
			rs = e.getMessage();
			return rs;
		}
		return rs;

	}

	private String videoFormatToString(VideoFormat videoFormat) {
		StringBuffer result = new StringBuffer();

		// add width x height (size)
		Dimension d = videoFormat.getSize();
		result.append("size=" + (int) d.getWidth() + "x" + (int) d.getHeight()
				+ ", ");

		
		 * // try to add color depth if (videoFormat instanceof
		 * IndexedColorFormat) { IndexedColorFormat f = (IndexedColorFormat)
		 * videoFormat; result.append("color depth=" + f.getMapSize() + ", "); }
		 

		// add encoding
		result.append("encoding=" + videoFormat.getEncoding() + ", ");

		// add max data length
		result.append("maxdatalength=" + videoFormat.getMaxDataLength() + "");

		return result.toString();
	}*/

	@Override
	public boolean checkCmd(String cmd) {
		// TODO Auto-generated method stub
		return false;
	}
}
