
package main;

/*******************************************************************************
 * Copyright (c) 2008, 2010 Xuggle Inc.  All rights reserved.
 *  
 * This file is part of Xuggle-Xuggler-Main.
 *
 * Xuggle-Xuggler-Main is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * Xuggle-Xuggler-Main is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public License
 * along with Xuggle-Xuggler-Main.  If not, see <http://www.gnu.org/licenses/>.
 *******************************************************************************/
import camera.AbstractCamera;
import camera.IPCam;
import com.jme3.math.Vector3f;
import gui.MainFrame;
import ij.ImagePlus;
import ij.plugin.filter.GaussianBlur;
import ij.process.ColorProcessor;
import ij.process.ImageProcessor;

import java.awt.Color;
import java.awt.Component;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;

import javax.imageio.ImageIO;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JSlider;
import javax.swing.JTextField;
import javax.swing.Timer;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;

import misc.VideoCreator;

import trackingAlgorithms.FrameProcessor;
import trackingAlgorithms.videoprocessing.RgbThresholdEffect;
import trackingAlgorithms.videoprocessing.RgbVideoEffect;

import com.sun.media.ui.Slider;
import com.xuggle.xuggler.Global;
import com.xuggle.xuggler.ICodec;
import com.xuggle.xuggler.IContainer;
import com.xuggle.xuggler.IPacket;
import com.xuggle.xuggler.IPixelFormat;
import com.xuggle.xuggler.IStream;
import com.xuggle.xuggler.IStreamCoder;
import com.xuggle.xuggler.IVideoPicture;
import com.xuggle.xuggler.IVideoResampler;
import com.xuggle.xuggler.Utils;

import environment.AbstractEnvironment;
import environment.Environment1;
import environment.Environment2;
import java.util.ArrayList;

/**
 * Takes a media container, finds the first video stream, decodes that stream,
 * and then displays the video frames, at the frame-rate specified by the
 * container, on a window.
 * 
 * @author aclarke
 * 
 */
public class MasterCamPlayer extends Thread implements ActionListener {

	private boolean capture = true;
	private int timerDelay = 200;
	private MainFrame frame;
	private Timer timer;
	private FrameProcessor frameProcessor;
	private boolean start;
	int num = 0;
	private JSlider minBlobSizeSlider;
	private JSlider thresholdSlider;
	// private JSlider alphaSlider;
	// private JSlider betaSlider;
	// private JSlider tHSlider;
	// private JSlider tSSlider;

	private JButton moveRight;
	private JButton moveLeft;
	private JButton moveUp;
	private JButton moveDown;
	
	private JButton resetCameras;
	
	private JList camerasList;
	private JCheckBox runTracking;
	private int numberToStart = 0;
	// starting delay.. number of discarded frames at the start.. we discard
	// some frames in the beginning.
	private final int STARTING_DELAY = 50;
	private AbstractEnvironment env;
	private ImagePlus iplus;
	private ImageProcessor iprocessor;
	String filename;
	private ArrayList<AbstractCamera> cameras;
	VideoCreator vc;

	public JSlider createJSlider(int orientation, int startValue, int max,
			int tick, int startX, int startY, int width, int height,
			int minorTick, int majorTick, boolean drawTicks, boolean drawLabel) {
		JSlider slider = new JSlider(orientation, startValue, max, tick);
		slider.setMajorTickSpacing(majorTick);
		slider.setMinorTickSpacing(minorTick);
		slider.setBounds(startX, startY, width, height);
		slider.setPaintTicks(drawTicks);
		slider.setPaintLabels(drawLabel);
		return slider;
	}

	public MasterCamPlayer(String ip) {
		cameras = new ArrayList<AbstractCamera>();

		AbstractCamera cam;

		// adding cameras
		cam = new IPCam(new Vector3f(350, 0, 5), new Vector3f(320, 0, 300),
				"192.168.1.6", "cam1", 320);// 9
		cameras.add(cam);
		cam = new IPCam(new Vector3f(-40, 0, 260), new Vector3f(-10, 0, 10),
				"192.168.1.7", "cam2", 320);
		cameras.add(cam);
		cam = new IPCam(new Vector3f(360, 0, 260), new Vector3f(340, 0, 10),
				"192.168.1.9", "cam3", 320);// 7
		cameras.add(cam);
		cam = new IPCam(new Vector3f(-30, 0, 5), new Vector3f(-10, 0, 232),
				"192.168.1.8", "cam4", 320);// 6
		cameras.add(cam);
		// cam = new IPCam(new Vector3f(-10, 0, -10), new Vector3f(-10, 0, 232),
		// "169.254.0.99", 320);//6
		// cameras.add(cam);
		timer = new Timer(timerDelay, this);
		env = new Environment1();

		minBlobSizeSlider = new JSlider(JSlider.VERTICAL, 0, 2000, 300);
		thresholdSlider = new JSlider(JSlider.VERTICAL, 0, 200, 20);

		frameProcessor = new FrameProcessor(env, cameras, minBlobSizeSlider,
				thresholdSlider);

		frame = new MainFrame();
		frame.setVisible(true);
		iplus = new ImagePlus();
		filename = ip;
		vc = new VideoCreator();

		// alphaSlider = createJSlider(JSlider.HORIZONTAL, 0, 10, 1, 10, 400,
		// 200, 50, 1, 2,true,true);
		// betaSlider = createJSlider(JSlider.HORIZONTAL, 0, 10, 1, 10, 450,
		// 200, 50, 1, 2,true,true);
		// tHSlider = createJSlider(JSlider.HORIZONTAL, 0, 10, 1, 10, 500, 200,
		// 50, 1, 2,true,true);
		// tSSlider = createJSlider(JSlider.HORIZONTAL, 0, 10, 1, 10, 550, 200,
		// 50, 1, 2,true,true);
		moveRight = new JButton("Right");
		moveLeft = new JButton("Left");
		moveUp = new JButton("Move Up");
		moveDown = new JButton("Move Down");
		resetCameras = new JButton("Reset");
		moveRight.setBounds(150, 500, 70, 30);
		moveLeft.setBounds(20, 500, 70, 30);
		moveUp.setBounds(70, 430, 100, 30);
		moveDown.setBounds(70, 570, 100, 30);
		resetCameras.setBounds(70, 630, 100, 30);
		
		runTracking = new JCheckBox();
		runTracking.setBounds(110,500,20,20);

		/* minimum blob size slider */
		minBlobSizeSlider.setMajorTickSpacing(400);
		minBlobSizeSlider.setMinorTickSpacing(100);
		minBlobSizeSlider.setPaintTicks(true);
		minBlobSizeSlider.setPaintLabels(true);
		minBlobSizeSlider.setBounds(10, 10, 200, 150);

		/* Threshold slider */
		thresholdSlider.setMajorTickSpacing(20);
		thresholdSlider.setMinorTickSpacing(5);
		thresholdSlider.setPaintTicks(true);
		thresholdSlider.setPaintLabels(true);
		thresholdSlider.setBounds(10, 170, 200, 200);

		Font font = new Font("Serif", Font.ITALIC, 15);
		minBlobSizeSlider.setFont(font);
		thresholdSlider.setFont(font);
		frame.mp.controlPanel.add(minBlobSizeSlider);
		frame.mp.controlPanel.add(thresholdSlider);
		frame.mp.controlPanel.add(moveRight);
		frame.mp.controlPanel.add(moveLeft);
		frame.mp.controlPanel.add(moveUp);
		frame.mp.controlPanel.add(moveDown);
		frame.mp.controlPanel.add(runTracking);
		frame.mp.controlPanel.add(resetCameras);
		String[] camsNams = new String[cameras.size()];
		for (int i = 0; i < cameras.size(); i++) {
			camsNams[i] = cameras.get(i).getName();
		}
		camerasList = new JList(camsNams);
		camerasList.setBounds(230, 170, 100, 200);
		frame.mp.controlPanel.add(camerasList);

		camerasList.addMouseListener(new MouseListener() {

			@Override
			public void mouseReleased(MouseEvent arg0) {
				// TODO Auto-generated method stub

			}

			@Override
			public void mousePressed(MouseEvent arg0) {
				// TODO Auto-generated method stub

			}

			@Override
			public void mouseExited(MouseEvent arg0) {
				// TODO Auto-generated method stub

			}

			@Override
			public void mouseEntered(MouseEvent arg0) {
				// TODO Auto-generated method stub

			}

			@Override
			public void mouseClicked(MouseEvent e) {
				// TODO Auto-generated method stub
				System.out.println(camerasList.getSelectedIndex());
			}
		});

		moveRight.addActionListener(new ActionListener() {

			@Override
			public void actionPerformed(ActionEvent arg0) {
				// TODO Auto-generated method stub
				frameProcessor.tracker.testingBlob.bounds.x += 5;

			}
		});
		moveLeft.addActionListener(new ActionListener() {

			@Override
			public void actionPerformed(ActionEvent arg0) {
				// TODO Auto-generated method stub
				frameProcessor.tracker.testingBlob.bounds.x -= 5;

			}
		});
		moveUp.addActionListener(new ActionListener() {

			@Override
			public void actionPerformed(ActionEvent arg0) {
				// TODO Auto-generated method stub
				frameProcessor.tracker.testingBlob.bounds.y += 5;

			}
		});
		moveDown.addActionListener(new ActionListener() {

			@Override
			public void actionPerformed(ActionEvent arg0) {
				// TODO Auto-generated method stub
				frameProcessor.tracker.testingBlob.bounds.y -= 5;

			}
		});
		
		runTracking.addActionListener(new ActionListener() {
			
			@Override
			public void actionPerformed(ActionEvent arg0) {
				// TODO Auto-generated method stub
				frameProcessor.tracker.testingPositions = !frameProcessor.tracker.testingPositions ;
			}
		});
		
		
		resetCameras.addActionListener(new ActionListener() {
			
			@Override
			public void actionPerformed(ActionEvent arg0) {
				// TODO Auto-generated method stub
				for(AbstractCamera cam:cameras)
					cam.reset();
			}
		});
						
		
		// frame.mp.controlPanel.add(alphaSlider);
		// frame.mp.controlPanel.add(betaSlider);
		// frame.mp.controlPanel.add(tHSlider);
		// frame.mp.controlPanel.add(tSSlider);

		// alphaSlider.addChangeListener(new ChangeListener() {
		// @Override
		// public void stateChanged(ChangeEvent c) {
		// if(c.getSource()==alphaSlider)
		// ((RgbThresholdEffect)(frameProcessor.tracker.processingChain[2])).alpha
		// = alphaSlider.getValue()/10.0;
		// System.out.println("Alpha = "+alphaSlider.getValue()/10.0);
		// }
		// });
		//
		// betaSlider.addChangeListener(new ChangeListener() {
		// @Override
		// public void stateChanged(ChangeEvent c) {
		// if(c.getSource()==betaSlider)
		// ((RgbThresholdEffect)(frameProcessor.tracker.processingChain[2])).beta=
		// betaSlider.getValue()/10.0;
		// }
		// });
		//
		// tHSlider.addChangeListener(new ChangeListener() {
		// @Override
		// public void stateChanged(ChangeEvent c) {
		// if(c.getSource()==tHSlider)
		// ((RgbThresholdEffect)(frameProcessor.tracker.processingChain[2])).tH
		// = tHSlider.getValue()/10.0;
		// }
		// });
		//
		// tSSlider.addChangeListener(new ChangeListener() {
		// @Override
		// public void stateChanged(ChangeEvent c) {
		// if(c.getSource()==tSSlider)
		// ((RgbThresholdEffect)(frameProcessor.tracker.processingChain[2])).tS=
		// tSSlider.getValue()/10.0;
		// }
		// });
		timer.start();
	}

	/**
	 * Takes a media container (Camera URL), opens it, opens up a Swing window
	 * and displays video frames with <i>roughly</i> the right timing.
	 * 
	 * 
	 * @throws MalformedURLException
	 */
	@Override
	public void run() {
		timer.start();
		// Let's make sure that we can actually convert video pixel formats.
		if (!IVideoResampler
				.isSupported(IVideoResampler.Feature.FEATURE_COLORSPACECONVERSION)) {
			throw new RuntimeException("you must install the GPL version"
					+ " of Xuggler (with IVideoResampler support) for "
					+ "this demo to work");
		}

		// Create a Xuggler container object
		IContainer container = IContainer.make();

		// Open up the container
		if (container.open(filename, IContainer.Type.READ, null) < 0) {
			throw new IllegalArgumentException("could not open file: "
					+ filename);
		}

		// query how many streams the call to open found
		int numStreams = container.getNumStreams();

		// and iterate through the streams to find the first video stream
		int videoStreamId = -1;
		IStreamCoder videoCoder = null;
		for (int i = 0; i < numStreams; i++) {
			// Find the stream object
			IStream stream = container.getStream(i);
			// Get the pre-configured decoder that can decode this stream;
			IStreamCoder coder = stream.getStreamCoder();

			if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
				videoStreamId = i;
				videoCoder = coder;
				break;
			}
		}
		if (videoStreamId == -1) {
			throw new RuntimeException(
					"could not find video stream in container: " + filename);
		}

		/*
		 * Now we have found the video stream in this file. Let's open up our
		 * decoder so it can do work.
		 */
		if (videoCoder.open() < 0) {
			throw new RuntimeException(
					"could not open video decoder for container: " + filename);
		}

		IVideoResampler resampler = null;
		if (videoCoder.getPixelType() != IPixelFormat.Type.BGR24) {
			// if this stream is not in BGR24, we're going to need to
			// convert it. The VideoResampler does that for us.
			resampler = IVideoResampler.make(videoCoder.getWidth(),
					videoCoder.getHeight(), IPixelFormat.Type.BGR24,
					videoCoder.getWidth(), videoCoder.getHeight(),
					videoCoder.getPixelType());
			if (resampler == null) {
				throw new RuntimeException("could not create color space "
						+ "resampler for: " + filename);
			}
		}

		/*
		 * Now, we start walking through the container looking at each packet.
		 */
		IPacket packet = IPacket.make();
		long firstTimestampInStream = Global.NO_PTS;
		long systemClockStartTime = 0;
		while (container.readNextPacket(packet) >= 0) {

			/*
			 * Now we have a packet, let's see if it belongs to our video stream
			 */
			if (packet.getStreamIndex() == videoStreamId) {
				/*
				 * We allocate a new picture to get the data out of Xuggler
				 */
				IVideoPicture picture = IVideoPicture.make(
						videoCoder.getPixelType(), videoCoder.getWidth(),
						videoCoder.getHeight());

				int offset = 0;
				while (offset < packet.getSize()) {
					/*
					 * Now, we decode the video, checking for any errors.
					 */
					int bytesDecoded = videoCoder.decodeVideo(picture, packet,
							offset);
					if (bytesDecoded < 0) {
						throw new RuntimeException(
								"got error decoding video in: " + filename);
					}
					offset += bytesDecoded;

					/*
					 * Some decoders will consume data in a packet, but will not
					 * be able to construct a full video picture yet. Therefore
					 * you should always check if you got a complete picture
					 * from the decoder
					 */
					if (picture.isComplete()) {
						IVideoPicture newPic = picture;

						/*
						 * If the resampler is not null, that means we didn't
						 * get the video in BGR24 format and need to convert it
						 * into BGR24 format.
						 */
						if (resampler != null) {
							// we must resample
							newPic = IVideoPicture.make(
									resampler.getOutputPixelFormat(),
									picture.getWidth(), picture.getHeight());

							if (resampler.resample(newPic, picture) < 0) {
								throw new RuntimeException(
										"could not resample video from: "
												+ filename);
							}
						}
						if (newPic.getPixelType() != IPixelFormat.Type.BGR24) {
							throw new RuntimeException("could not decode video"
									+ " as BGR 24 bit data in: " + filename);
						}

						/**
						 * We could just display the images as quickly as we
						 * decode them, but it turns out we can decode a lot
						 * faster than you think.
						 * 
						 * So instead, the following code does a poor-man's
						 * version of trying to match up the frame-rate
						 * requested for each IVideoPicture with the system
						 * clock time on your computer.
						 * 
						 * Remember that all Xuggler IAudioSamples and
						 * IVideoPicture objects always give timestamps in
						 * Microseconds, relative to the first decoded item. If
						 * instead you used the packet timestamps, they can be
						 * in different units depending on your IContainer, and
						 * IStream and things can get hairy quickly.
						 */
						if (firstTimestampInStream == Global.NO_PTS) {
							// This is our first time through
							firstTimestampInStream = picture.getTimeStamp();
							// get the starting clock time so we can hold up
							// frames
							// until the right time.
							systemClockStartTime = System.currentTimeMillis();
						} else {
							long systemClockCurrentTime = System
									.currentTimeMillis();
							long millisecondsClockTimeSinceStartofVideo = systemClockCurrentTime
									- systemClockStartTime;
							// compute how long for this frame since the first
							// frame in the
							// stream.
							// remember that IVideoPicture and IAudioSamples
							// timestamps are
							// always in MICROSECONDS,
							// so we divide by 1000 to get milliseconds.
							long millisecondsStreamTimeSinceStartOfVideo = (picture
									.getTimeStamp() - firstTimestampInStream) / 1000;
							final long millisecondsTolerance = 0; // and we give
																	// ourselfs
																	// 50 ms of
																	// tolerance
							final long millisecondsToSleep = (millisecondsStreamTimeSinceStartOfVideo - (millisecondsClockTimeSinceStartofVideo + millisecondsTolerance));
							if (millisecondsToSleep > 0) {
								try {
									Thread.sleep(millisecondsToSleep);
								} catch (InterruptedException e) {
									// we might get this when the user closes
									// the dialog box, so
									// just return from the method.
									return;
								}
							}
						}

						// And finally, convert the BGR24 to an Java buffered
						// image

						if (capture) {
							capture = false;
							if (!start) {
								numberToStart++;
								if (numberToStart > STARTING_DELAY) {
									start = true;
									timerDelay = 200;
									System.out.println("start");
								}
							} else {
								BufferedImage javaImage = Utils
										.videoPictureToImage(newPic);
//								 javaImage = VideoCreator.resizeImage(javaImage, 320,
//								 240);
								long st = System.currentTimeMillis();
								iplus.setImage(javaImage);
								iprocessor = iplus.getProcessor();
								// iprocessor.invert();
								ImageProcessor cip = iprocessor.duplicate();
								BufferedImage coloredImage = cip
										.getBufferedImage();

								if (RgbVideoEffect.testing)
									RgbVideoEffect.coloredInputImage = iprocessor
											.duplicate();
								// // convert to gray scale image
								iprocessor = iprocessor.convertToByte(true);
								// apply median filter to help in removing
								// camera noise.
								// iprocessor.medianFilter();
								javaImage = iprocessor.getBufferedImage();

								// process input frame
								frameProcessor.processFrame(javaImage,
										coloredImage);
								num++;
								// //draw the frame on the jframe
								//

								frame.drawImage(coloredImage);
								long end = System.currentTimeMillis();
//								System.out.println(end - st);

								/** write the input frame for test purposes */
								try {
									ImageIO.write(coloredImage, "png",
											new File("img_" + num + ".png"));
								} catch (IOException e) {
									// TODO Auto-generated catch block
									e.printStackTrace();
								}
							}
						}
					}
				}
			} else {
				/*
				 * This packet isn't part of our video stream, so we just
				 * silently drop it.
				 */
				do {
				} while (false);
			}

		}
		/*
		 * Technically since we're exiting anyway, these will be cleaned up by
		 * the garbage collector... but because we're nice people and want to be
		 * invited places for Christmas, we're going to show how to clean up.
		 */
		if (videoCoder != null) {
			videoCoder.close();
			videoCoder = null;
		}
		if (container != null) {
			container.close();
			container = null;
		}
		closeJavaWindow();

	}

	public static void main(String[] args) throws MalformedURLException {
		 String filename = "http://192.168.1.101/video.mjpg";
		MasterCamPlayer cap = new MasterCamPlayer(filename);
		cap.run();
	}

	/**
	 * Forces the swing thread to terminate; I'm sure there is a right way to do
	 * this in swing, but this works too.
	 */
	private static void closeJavaWindow() {
		System.exit(0);
	}

	@Override
	public void actionPerformed(ActionEvent e) {
		// TODO Auto-generated method stub
		if (e.getSource() == timer) {
			try {
				capture = true;
			} catch (NullPointerException ex) {
				System.out.println("wait..");
			}
		}

	}
}