package misc;

/*******************************************************************************
 * Copyright (c) 2008, 2010 Xuggle Inc.  All rights reserved.
 *  
 * This file is part of Xuggle-Xuggler-Main.
 *
 * Xuggle-Xuggler-Main is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * Xuggle-Xuggler-Main is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public License
 * along with Xuggle-Xuggler-Main.  If not, see <http://www.gnu.org/licenses/>.
 *******************************************************************************/
import gui.MainFrame;
import ij.ImagePlus;
import ij.process.ImageProcessor;

import java.awt.Component;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;

import javax.imageio.ImageIO;
import javax.swing.BorderFactory;
import javax.swing.JLabel;
import javax.swing.JSlider;
import javax.swing.Timer;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;

import trackingAlgorithms.FrameProcessor;

import com.xuggle.xuggler.Global;
import com.xuggle.xuggler.ICodec;
import com.xuggle.xuggler.IContainer;
import com.xuggle.xuggler.IPacket;
import com.xuggle.xuggler.IPixelFormat;
import com.xuggle.xuggler.IStream;
import com.xuggle.xuggler.IStreamCoder;
import com.xuggle.xuggler.IVideoPicture;
import com.xuggle.xuggler.IVideoResampler;
import com.xuggle.xuggler.Utils;

import environment.AbstractEnvironment;
import environment.Environment1;
import environment.Environment2;

/**
 * Takes a media container, finds the first video stream,
 * decodes that stream, and then displays the video frames,
 * at the frame-rate specified by the container, on a 
 * window.
 * @author aclarke
 *
 */
public class CameraPlayer implements ActionListener {

    private boolean capture = true;
    private int timerDelay = 200;
    private MainFrame frame;
    private Timer timer;
    private FrameProcessor frameProcessor;
    private boolean start;
    int num = 0;
    private JSlider minBlobSizeSlider;
    private JSlider thresholdSlider;
    private int numberToStart = 0;  // starting delay
    private final int STARTING_DELAY = 50;
    private AbstractEnvironment env;
    private ImagePlus iplus;
    private ImageProcessor iprocessor;

    public CameraPlayer() {
        timer = new Timer(timerDelay, this);
        env = new Environment1();
        minBlobSizeSlider = new JSlider(JSlider.VERTICAL,
                0, 2000, 300);
        thresholdSlider = new JSlider(JSlider.VERTICAL,
                0, 200, 20);
//		frameProcessor = new FrameProcessor(env,minBlobSizeSlider,thresholdSlider);
//		frame = new DrawImage(640,480);
        frame = new MainFrame();
        frame.setVisible(true);
        iplus = new ImagePlus();

//		JLabel max_Lable = new JLabel("minimum blob size", JLabel.CENTER);
//        max_Lable.setAlignmentX(Component.CENTER_ALIGNMENT);

//        JLabel thresholdLable = new JLabel("Threshold", JLabel.CENTER);
//        max_Lable.setAlignmentX(Component.CENTER_ALIGNMENT);


        //Turn on labels at major tick marks.

        minBlobSizeSlider.setMajorTickSpacing(400);
        minBlobSizeSlider.setMinorTickSpacing(100);
        minBlobSizeSlider.setPaintTicks(true);
        minBlobSizeSlider.setPaintLabels(true);
        minBlobSizeSlider.setBounds(10, 10, 200, 250);

        thresholdSlider.setMajorTickSpacing(20);
        thresholdSlider.setMinorTickSpacing(5);
        thresholdSlider.setPaintTicks(true);
        thresholdSlider.setPaintLabels(true);
        thresholdSlider.setBounds(10, 270, 200, 200);

//        minBlobSizeSlider.setBorder(
//                BorderFactory.createEmptyBorder(0,0,100,0));
        Font font = new Font("Serif", Font.ITALIC, 15);
        minBlobSizeSlider.setFont(font);
        thresholdSlider.setFont(font);
//        frame.add(max_Lable);
        frame.mp.controlPanel.add(minBlobSizeSlider);
        frame.mp.controlPanel.add(thresholdSlider);
//        frame.add(thresholdSlider);
//        frame.add(thresholdLable);
        timer.start();
    }

    /**
     * Takes a media container (Camera URL), opens it,
     * opens up a Swing window and displays
     * video frames with <i>roughly</i> the right timing.
     *  
     * 
     * @throws MalformedURLException 
     */
    public void run() {
//		String filename = "http://192.168.1.101/video.mjpg";  // master camera
//		String filename = "C:\\Users\\hp\\Desktop\\Experement\\master\\2011100402074700.avi";
//		String filename = "D:\\Project\\Videos\\Scene2Cam2.avi";
//		String filename = "D:\\Movie_0001.wmv";
//		2013011915595500---2013011915200600
        String filename = "D:\\Project\\Experiment 2\\master\\will be used\\2012021005312300.avi";
        // Let's make sure that we can actually convert video pixel formats.
        if (!IVideoResampler.isSupported(
                IVideoResampler.Feature.FEATURE_COLORSPACECONVERSION)) {
            throw new RuntimeException("you must install the GPL version"
                    + " of Xuggler (with IVideoResampler support) for "
                    + "this demo to work");
        }

        // Create a Xuggler container object
        IContainer container = IContainer.make();

        // Open up the container
        if (container.open(filename, IContainer.Type.READ, null) < 0) {
            throw new IllegalArgumentException("could not open file: " + filename);
        }

        // query how many streams the call to open found
        int numStreams = container.getNumStreams();

        // and iterate through the streams to find the first video stream
        int videoStreamId = -1;
        IStreamCoder videoCoder = null;
        for (int i = 0; i < numStreams; i++) {
            // Find the stream object
            IStream stream = container.getStream(i);
            // Get the pre-configured decoder that can decode this stream;
            IStreamCoder coder = stream.getStreamCoder();

            if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
                videoStreamId = i;
                videoCoder = coder;
                break;
            }
        }
        if (videoStreamId == -1) {
            throw new RuntimeException("could not find video stream in container: "
                    + filename);
        }

        /*
         * Now we have found the video stream in this file.  Let's open up our decoder so it can
         * do work.
         */
        if (videoCoder.open() < 0) {
            throw new RuntimeException("could not open video decoder for container: "
                    + filename);
        }

        IVideoResampler resampler = null;
        if (videoCoder.getPixelType() != IPixelFormat.Type.BGR24) {
            // if this stream is not in BGR24, we're going to need to
            // convert it.  The VideoResampler does that for us.
            resampler = IVideoResampler.make(videoCoder.getWidth(),
                    videoCoder.getHeight(), IPixelFormat.Type.BGR24,
                    videoCoder.getWidth(), videoCoder.getHeight(), videoCoder.getPixelType());
            if (resampler == null) {
                throw new RuntimeException("could not create color space "
                        + "resampler for: " + filename);
            }
        }


        /*
         * Now, we start walking through the container looking at each packet.
         */
        IPacket packet = IPacket.make();
        long firstTimestampInStream = Global.NO_PTS;
        long systemClockStartTime = 0;
        while (container.readNextPacket(packet) >= 0) {

            /*
             * Now we have a packet, let's see if it belongs to our video stream
             */
            if (packet.getStreamIndex() == videoStreamId) {
                /*
                 * We allocate a new picture to get the data out of Xuggler
                 */
                IVideoPicture picture = IVideoPicture.make(videoCoder.getPixelType(),
                        videoCoder.getWidth(), videoCoder.getHeight());

                int offset = 0;
                while (offset < packet.getSize()) {
                    /*
                     * Now, we decode the video, checking for any errors.
                     * 
                     */
                    int bytesDecoded = videoCoder.decodeVideo(picture, packet, offset);
                    if (bytesDecoded < 0) {
                        throw new RuntimeException("got error decoding video in: "
                                + filename);
                    }
                    offset += bytesDecoded;

                    /*
                     * Some decoders will consume data in a packet, but will not be able to construct
                     * a full video picture yet.  Therefore you should always check if you
                     * got a complete picture from the decoder
                     */
                    if (picture.isComplete()) {
                        IVideoPicture newPic = picture;

                        /*
                         * If the resampler is not null, that means we didn't get the
                         * video in BGR24 format and
                         * need to convert it into BGR24 format.
                         */
                        if (resampler != null) {
                            // we must resample
                            newPic = IVideoPicture.make(resampler.getOutputPixelFormat(),
                                    picture.getWidth(), picture.getHeight());

                            if (resampler.resample(newPic, picture) < 0) {
                                throw new RuntimeException("could not resample video from: "
                                        + filename);
                            }
                        }
                        if (newPic.getPixelType() != IPixelFormat.Type.BGR24) {
                            throw new RuntimeException("could not decode video"
                                    + " as BGR 24 bit data in: " + filename);
                        }

                        /**
                         * We could just display the images as quickly as we decode them,
                         * but it turns out we can decode a lot faster than you think.
                         * 
                         * So instead, the following code does a poor-man's version of
                         * trying to match up the frame-rate requested for each
                         * IVideoPicture with the system clock time on your computer.
                         * 
                         * Remember that all Xuggler IAudioSamples and IVideoPicture objects
                         * always give timestamps in Microseconds, relative to the first
                         * decoded item. If instead you used the packet timestamps, they can
                         * be in different units depending on your IContainer, and IStream
                         * and things can get hairy quickly.
                         */
                        if (firstTimestampInStream == Global.NO_PTS) {
                            // This is our first time through
                            firstTimestampInStream = picture.getTimeStamp();
                            // get the starting clock time so we can hold up frames
                            // until the right time.
                            systemClockStartTime = System.currentTimeMillis();
                        } else {
                            long systemClockCurrentTime = System.currentTimeMillis();
                            long millisecondsClockTimeSinceStartofVideo =
                                    systemClockCurrentTime - systemClockStartTime;
                            // compute how long for this frame since the first frame in the
                            // stream.
                            // remember that IVideoPicture and IAudioSamples timestamps are
                            // always in MICROSECONDS,
                            // so we divide by 1000 to get milliseconds.
                            long millisecondsStreamTimeSinceStartOfVideo =
                                    (picture.getTimeStamp() - firstTimestampInStream) / 1000;
                            final long millisecondsTolerance = 0; // and we give ourselfs 50 ms of tolerance
                            final long millisecondsToSleep =
                                    (millisecondsStreamTimeSinceStartOfVideo
                                    - (millisecondsClockTimeSinceStartofVideo
                                    + millisecondsTolerance));
                            if (millisecondsToSleep > 0) {
                                try {
                                    Thread.sleep(millisecondsToSleep);
                                } catch (InterruptedException e) {
                                    // we might get this when the user closes the dialog box, so
                                    // just return from the method.
                                    return;
                                }
                            }
                        }

                        // And finally, convert the BGR24 to an Java buffered image

                        if (capture) {
                            capture = false;
                            if (!start) {
                                numberToStart++;
                                if (numberToStart > STARTING_DELAY) {
                                    start = true;
                                    timerDelay = 100;
                                    System.out.println("start");
                                }
                            } else {
                                BufferedImage javaImage = Utils.videoPictureToImage(newPic);

                                long st = System.currentTimeMillis();

                                iplus.setImage(javaImage);
                                iprocessor = iplus.getProcessor();
                                ImageProcessor ip2 = iprocessor.duplicate();

//			            	iprocessor.copyBits(ip2, arg1, arg2, arg3)
                                iprocessor = iprocessor.convertToByte(true);
//			            	System.out.println(iprocessor.get(50, 50));
////			            	iprocessor.findEdges();
//			            	
////			            	iprocessor.dilate();
////			            	iprocessor.erode();
//			            
////			            	iprocessor.medianFilter();
////			            	iprocessor.invert();
//			            	iprocessor.findEdges();
////			            	iprocessor.medianFilter();
//////			            	iprocessor.autoThreshold();
//////			            	iprocessor.invert();
//			            	removeNoise(iprocessor);
//////			            	iprocessor.autoThreshold();
//			            	iprocessor.erode();
//			            	iprocessor.dilate();
//			            	iprocessor.dilate();
//			            	iprocessor.erode();
//			            	iprocessor.erode();
//			            	removeNoise(iprocessor);
//////			            	iprocessor.dilate();
////			            	iprocessor.erode();
//			            	
////			            	iprocessor.erode();
////			            	iprocessor.findEdges();
////			            	iprocessor.autoThreshold();
                                iprocessor.medianFilter();
                                javaImage = iprocessor.getBufferedImage();

                                long end = System.currentTimeMillis();
                                System.out.println(end - st);
                                frameProcessor.processFrame(javaImage, ip2.getBufferedImage());

//			            	try {
//								ImageIO.write(javaImage, "png", new File("hum_"+num+".png"));
//							} catch (IOException e) {
//								// TODO Auto-generated catch block
//								e.printStackTrace();
//							}
                                num++;
                                frame.drawImage(ip2.getBufferedImage());

                                try {
                                    ImageIO.write(javaImage, "png", new File("img_" + num + ".png"));
                                } catch (IOException e) {
                                    // TODO Auto-generated catch block
                                    e.printStackTrace();
                                }
                            }
                        }
                    }
                }
            } else {
                /*
                 * This packet isn't part of our video stream, so we just
                 * silently drop it.
                 */
                do {
                } while (false);
            }

        }
        /*
         * Technically since we're exiting anyway, these will be cleaned up by 
         * the garbage collector... but because we're nice people and want
         * to be invited places for Christmas, we're going to show how to clean up.
         */
        if (videoCoder != null) {
            videoCoder.close();
            videoCoder = null;
        }
        if (container != null) {
            container.close();
            container = null;
        }
        closeJavaWindow();

    }

    private void removeNoise(ImageProcessor javaImage) {
        // TODO Auto-generated method stub
        for (int i = 0; i < javaImage.getWidth(); i++) {
            for (int j = 0; j < javaImage.getHeight(); j++) {
                int c = javaImage.get(i, j);
                int bb = (c & 0xff);
                int gg = ((c >> 8) & 0xff);
                int rr = ((c >> 16) & 0xff);
                if (bb < 250 || gg < 250 || rr < 250) {
                    javaImage.set(i, j, 0);
                }
            }

        }

    }

    public static void main(String[] args) throws MalformedURLException {

        CameraPlayer cap = new CameraPlayer();

        cap.run();

    }

    /**
     * Forces the swing thread to terminate; I'm sure there is a right
     * way to do this in swing, but this works too.
     */
    private static void closeJavaWindow() {
        System.exit(0);
    }

    @Override
    public void actionPerformed(ActionEvent e) {
        // TODO Auto-generated method stub
        if (e.getSource() == timer) {
            try {
                capture = true;
            } catch (NullPointerException ex) {
                System.out.println("wait..");
            }
        }

    }
}