import java.awt.image.BufferedImage;
import java.io.DataInputStream;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;

import com.xuggle.xuggler.Global;
import com.xuggle.xuggler.IContainer;
import com.xuggle.xuggler.IContainer.Type;
import com.xuggle.xuggler.ICodec;
import com.xuggle.xuggler.IContainerFormat;
import com.xuggle.xuggler.IError;
import com.xuggle.xuggler.IPacket;
import com.xuggle.xuggler.IPixelFormat;
import com.xuggle.xuggler.IStream;
import com.xuggle.xuggler.IStreamCoder;
import com.xuggle.xuggler.IVideoPicture;
import com.xuggle.xuggler.IVideoResampler;
import com.xuggle.xuggler.Utils;
import com.xuggle.xuggler.demos.VideoImage;
import com.xuggle.xuggler.io.IURLProtocolHandler;

public class PlayNetworkStream {

	String in;

	public PlayNetworkStream(String url) {
		in = url;
	}

	public void exec() {

		System.out.println("exec started...");

		IContainer container = IContainer.make();
		// If IContainer cannot guess the format, then replace null
		// with the correct format.
		container.open(in, IContainer.Type.READ, null);
		System.out.println("container1.isopened: " + container.isOpened());

		int numStreams = container.getNumStreams();
		System.out.println("numStreams: " + numStreams);


		// and iterate through the streams to find the first video stream
		int videoStreamId = -1;
		IStreamCoder videoCoder = null;
		for (int i = 0; i < numStreams; i++) {
			// Find the stream object
			IStream stream = container.getStream(i);
			// Get the pre-configured decoder that can decode this stream;
			IStreamCoder coder = stream.getStreamCoder();
			System.out.println(coder);
			// what if there are multiple video streams?
			if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
				videoStreamId = i;
				videoCoder = coder;
				break;
			}
		}

		if (videoStreamId == -1)
			throw new RuntimeException(
					"could not find video stream in container: " + in);

		
	    /*
	     * Now we have found the video stream in this file.  Let's open up our decoder so it can
	     * do work.
	     */
	    if (videoCoder.open() < 0)
	      throw new RuntimeException("could not open video decoder for container: "
	          + in);

	    IVideoResampler resampler = null;
	    if (videoCoder.getPixelType() != IPixelFormat.Type.BGR24)
	    {
	      // if this stream is not in BGR24, we're going to need to
	      // convert it.  The VideoResampler does that for us.
	      resampler = IVideoResampler.make(videoCoder.getWidth(), 
	          videoCoder.getHeight(), IPixelFormat.Type.BGR24,
	          videoCoder.getWidth(), videoCoder.getHeight(), videoCoder.getPixelType());
	      if (resampler == null)
	        throw new RuntimeException("could not create color space " +
	        		"resampler for: " + in);
	    }
	    /*
	     * And once we have that, we draw a window on screen
	     */
		System.out.println("adu3");
	    openJavaWindow();
		System.out.println("adu4");

	    /*
	     * Now, we start walking through the container looking at each packet.
	     */
	    IPacket packet = IPacket.make();
	    long firstTimestampInStream = Global.NO_PTS;
	    long systemClockStartTime = 0;
	    while(container.readNextPacket(packet) >= 0)
	    {
	      /*
	       * Now we have a packet, let's see if it belongs to our video stream
	       */
	      if (packet.getStreamIndex() == videoStreamId)
	      {
	        /*
	         * We allocate a new picture to get the data out of Xuggler
	         */
	        IVideoPicture picture = IVideoPicture.make(videoCoder.getPixelType(),
	            videoCoder.getWidth(), videoCoder.getHeight());

	        int offset = 0;
	        while(offset < packet.getSize())
	        {
	          /*
	           * Now, we decode the video, checking for any errors.
	           * 
	           */
	        	packet.getData();
	          int bytesDecoded = videoCoder.decodeVideo(picture,packet,offset);
	          if (bytesDecoded < 0)
	            throw new RuntimeException("Error decoding video in: " + in);
	          offset += bytesDecoded;

	          /*
	           * Some decoders will consume data in a packet, but will not be 
	           * able to construct a full video picture yet. Therefore you 
	           * should always check if you got a complete picture from the 
	           * decoder.
	           */
	          if (picture.isComplete())
	          {
	            IVideoPicture newPic = picture;
	            /*
	             * If the resampler is not null, that means we didn't get the
	             * video in BGR24 format and
	             * need to convert it into BGR24 format.
	             */
	            if (resampler != null)
	            {
	              // we must resample
	              newPic = IVideoPicture.make(resampler.getOutputPixelFormat(),
	                  picture.getWidth(), picture.getHeight());
	              if (resampler.resample(newPic, picture) < 0)
	                throw new RuntimeException("could not resample video from: "
	                    + in);
	            }
	            if (newPic.getPixelType() != IPixelFormat.Type.BGR24)
	              throw new RuntimeException("could not decode video" +
	              		" as BGR 24 bit data in: " + in);

	            /**
	             * We could just display the images as quickly as we decode them,
	             * but it turns out we can decode a lot faster than you think.
	             * 
	             * So instead, the following code does a poor-man's version of
	             * trying to match up the frame-rate requested for each
	             * IVideoPicture with the system clock time on your computer.
	             * 
	             * Remember that all Xuggler IAudioSamples and IVideoPicture objects
	             * always give timestamps in Microseconds, relative to the first
	             * decoded item. If instead you used the packet timestamps, they can
	             * be in different units depending on your IContainer, and IStream
	             * and things can get hairy quickly.
	             */
	            if (firstTimestampInStream == Global.NO_PTS)
	            {
	              // This is our first time through
	              firstTimestampInStream = picture.getTimeStamp();
	              // get the starting clock time so we can hold up frames
	              // until the right time.
	              systemClockStartTime = System.currentTimeMillis();
	            } else {
	              long systemClockCurrentTime = System.currentTimeMillis();
	              long millisecondsClockTimeSinceStartofVideo =
	                systemClockCurrentTime - systemClockStartTime;
	              // compute how long for this frame since the first frame in the
	              // stream.
	              // remember that IVideoPicture and IAudioSamples timestamps are
	              // always in MICROSECONDS,
	              // so we divide by 1000 to get milliseconds.
	              long millisecondsStreamTimeSinceStartOfVideo =
	                (picture.getTimeStamp() - firstTimestampInStream)/1000;
	              final long millisecondsTolerance = 50; // and we give ourselfs 50 ms of tolerance
	              final long millisecondsToSleep = 
	                (millisecondsStreamTimeSinceStartOfVideo -
	                  (millisecondsClockTimeSinceStartofVideo +
	                      millisecondsTolerance));
	              if (millisecondsToSleep > 0)
	              {
	                try
	                {
	                  Thread.sleep(millisecondsToSleep);
	                }
	                catch (InterruptedException e)
	                {
	                  // we might get this when the user closes the dialog box, so
	                  // just return from the method.
	                  return;
	                }
	              }
	            }

	            // And finally, convert the BGR24 to an Java buffered image
	            //  BufferedImage javaImage = Utils.videoPictureToImage(newPic);
	            BufferedImage javaImage = Utils.videoPictureToImage(picture);

	            // and display it on the Java Swing window
	            updateJavaWindow(javaImage);
	          }
	        }
	      }
	      else
	      {
	        /*
	         * This packet isn't part of our video stream, so we just
	         * silently drop it.
	         */
	      }

	    }
		
	    /*
	     * Technically since we're exiting anyway, these will be cleaned up by 
	     * the garbage collector... but because we're nice people and want
	     * to be invited places for Christmas, we're going to show how to clean up.
	     */
	    if (videoCoder != null)
	    {
	      videoCoder.close();
	      videoCoder = null;
	    }
	    if (container !=null)
	    {
	      container.close();
	      container = null;
	    }
	    closeJavaWindow();

	  }

	  /**
	   * The window we'll draw the video on.
	   * 
	   */
	  private static VideoImage mScreen = null;

	  private static void updateJavaWindow(BufferedImage javaImage)
	  {
	    mScreen.setImage(javaImage);
	  }

	  /**
	   * Opens a Swing window on screen.
	   */
	  private static void openJavaWindow()
	  {
	    mScreen = new VideoImage();
	  }

	  /**
	   * Forces the swing thread to terminate; I'm sure there is a right
	   * way to do this in swing, but this works too.
	   */
	  private static void closeJavaWindow()
	  {
	    System.exit(0);
	  }		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
		
/*		IContainerFormat format = format = IContainerFormat.make();
		format.setInputFormat("h264");
		IContainer container2 = IContainer.make();

		String url, user, pass, filename;

		// InputStream in = new AxisConnection( url, user, pass ).getStream();
		InputStream in = null;

		if (container2.open(new DataInputStream(in), format, true, false) < 0)
			throw new IllegalArgumentException("could not open file: "
					+ "filename");

		// .....
		Map<Integer, IStreamCoder> knownStreams = new HashMap<Integer, IStreamCoder>();
		IPacket packet = IPacket.make();

		long firstTimestampInStream = Global.NO_PTS;
		long systemClockStartTime = 0;

		// here it hangs
		while (container2.readNextPacket(packet) >= 0) {
			if (packet.isComplete())
				;
		}

		// .................

		// I check what happen with InputStream( when readNextPacket() is
		// blocked ) - it continuously read, mean it not blocked for stream of
		// camera.
		// Thanks
*/
	
}
