package publish;

import java.awt.*;
import java.io.*;
import java.net.InetAddress;
import javax.media.*;
import javax.media.protocol.*;
import javax.media.protocol.DataSource;
import javax.media.format.*;
import javax.media.control.TrackControl;
import javax.media.control.QualityControl;
import javax.media.rtp.*;
import javax.media.rtp.rtcp.*;
import java.util.Vector;

public class AVTransmit_Capture {

	// Input MediaLocator
	// Can be a file or http or capture source
	private MediaLocator locator;
	private String ipAddress;
	private int portBase;
	//The Processor is used to process the origin data and tranform it to the datasource to be send
	private Processor processor = null;
	// is this the sessionManager?
	private RTPManager rtpMgrs[];
	//The Data Source is used to get the data
	private DataSource dataOutput = null;

	public AVTransmit_Capture(String ipAddress, String pb, Format format)
	{
		this.ipAddress = ipAddress;
		Integer integer = Integer.valueOf(pb);
		if (integer != null)
			this.portBase = integer.intValue();
	}

	/**
	 * Starts the transmission. Returns null if transmission started ok.
	 * Otherwise it returns a string with the reason why the setup failed.
	 */
	public synchronized String start()
	{
		String result;

		// Create a processor for the specified media locator
		// and program it to output JPEG/RTP
		//如果result不为null则出错，返回错误原因
		result = createProcessor();
		if (result != null)
			return result;

		// Create an RTP session to transmit the output of the
		// processor to the specified IP address and port no.
		//This Transmitter is used to
		//send the datasource to be send
		result = createTransmitter();
		if (result != null)
		{
			processor.close();
			processor = null;
			return result;
		}

		// Start the transmission
		processor.start();

		return null;
	}

	/**
	 * Stops the transmission if already started
	 */
	public void stop()
	{
		synchronized (this)
		{
			if (processor != null)
			{
				processor.stop();
				processor.close();
				processor = null;
				for (int i = 0; i < rtpMgrs.length; i++)
				{
					//关闭所有的RTPManager
					rtpMgrs[i].removeTargets( "Session ended.");
					rtpMgrs[i].dispose();
				}
			}
		}
	}

	private String createProcessor()
	{
		DataSource ds;
		DataSource clone;

		Vector devices = CaptureDeviceManager.getDeviceList(null);
		CaptureDeviceInfo diVideo = null;
		CaptureDeviceInfo diAudio = null;

		if (devices.size() > 2)
		{
			diVideo = (CaptureDeviceInfo) devices.elementAt(2);//2是视频捕捉器
			System.out.println("Capture Video Device Name: " +diVideo.getName());

			diAudio = (CaptureDeviceInfo) devices.elementAt(0);//2是视频捕捉器
			System.out.println("Capture Audio Device Name: " +diAudio.getName());

			try
			{
				DataSource[] dataSources = new DataSource[2];
				dataSources[0] = Manager.createDataSource(diVideo.getLocator());
				dataSources[1] = Manager.createDataSource(diAudio.getLocator());
				//////processor = manager.createProcessor();
				processor = Manager.createProcessor(Manager.createMergingDataSource(dataSources));
			}
			catch(NoDataSourceException e)
			{
				return "NoDataSourceException" +e;
			}
			catch (IOException e)
			{
				return "IOException creating processor" +e;
			}
			catch(IncompatibleSourceException e)
			{
				return "IncompatibleSourceException" +e;
			}
			catch (NoProcessorException e)
			{
				return "Couldn't create processor" +e;
			}
		}

		// Wait for it to configure，必须要等到Configure操作完成后才能进行下一步
		boolean result = waitForState(processor, Processor.Configured);
		if (result == false)
			return "Couldn't configure processor";

		// Get the tracks from the processor，必须Configure操作完成后进行
		TrackControl [] tracks = processor.getTrackControls();

		// Do we have atleast one track?
		if (tracks == null || tracks.length < 1)
			return "Couldn't find tracks in processor";

		// Set the output content descriptor to RAW_RTP
		// This will limit the supported formats reported from
		// Track.getSupportedFormats to only valid RTP formats.
		// 修改媒体类型为RTP类型
		ContentDescriptor cd = new ContentDescriptor(ContentDescriptor.RAW_RTP);
		processor.setContentDescriptor(cd);

		Format supported[];
		Format chosen;
		boolean atLeastOneTrack = false;

		System.out.println("tracks length is: "+tracks.length);
		// Program the tracks.重点
		for (int i = 0; i < tracks.length; i++)
		{
			Format format = tracks[i].getFormat();
			if (tracks[i].isEnabled())
			{
				//取得所有Tracks支持的媒体类型，用只用第一支持的媒体类型
				supported = tracks[i].getSupportedFormats();
				System.out.println("getSupportedFormats length is: "+tracks.length);
				for(int j = 0;j < supported.length; j++)
				{
					System.out.println(" ========= "+j+" is: "+supported[j].toString());
				}

				// We've set the output content to the RAW_RTP.
				// So all the supported formats should work with RTP.
				// We'll just pick the first one.
				if (supported.length > 0)
				{
					if (supported[1] instanceof VideoFormat)
					{
						// For video formats, we should double check the
						// sizes since not all formats work in all sizes.
						chosen = checkForVideoSizes(tracks[i].getFormat(), supported[1]);
					}
					else
						chosen = supported[1];
					tracks[i].setFormat(chosen);
					System.err.println("Track " + i + " is set to transmit as:");
					System.err.println("  " + chosen);
					atLeastOneTrack = true;
				}
				else
					tracks[i].setEnabled(false);
			}
			else
				tracks[i].setEnabled(false);
		}

		if (!atLeastOneTrack)
			return "Couldn't set any of the tracks to a valid RTP format";

		// Realize the processor. This will internally create a flow
		// graph and attempt to create an output datasource for JPEG/RTP
		// audio frames.
		result = waitForState(processor, Controller.Realized);
		if (result == false)
			return "Couldn't realize processor";

		// Set the JPEG quality to .5.
		setJPEGQuality(processor, 0.5f);

		// Get the output data source of the processor
		dataOutput = processor.getDataOutput();

		return null;
	}


	/**
	 * Use the RTPManager API to create sessions for each media
	 * track of the processor.
	 * 启动RTPManager
	 */
	private String createTransmitter()
	{
		// Cheated.  Should have checked the type.
		PushBufferDataSource pbds = (PushBufferDataSource)dataOutput;///******the data output is doing here
		//
		PushBufferStream pbss[] = pbds.getStreams();

		System.out.println("pbss.length is: "+pbss.length);

		rtpMgrs = new RTPManager[pbss.length];
		SessionAddress localAddr, destAddr;
		InetAddress ipAddr;
		SendStream sendStream;
		int port;
		SourceDescription srcDesList[];

		for (int i = 0; i < pbss.length; i++)
		{
			try
			{
				rtpMgrs[i] = RTPManager.newInstance();

				// The local session address will be created on the
				// same port as the the target port. This is necessary
				// if you use AVTransmit_Capture in conjunction with JMStudio.
				// JMStudio assumes -  in a unicast session - that the
				// transmitter transmits from the same port it is receiving
				// on and sends RTCP Receiver Reports back to this port of
				// the transmitting host.

				port = portBase + 2*i;

				//客户端IP地址
				ipAddr = InetAddress.getByName(ipAddress);

				localAddr = new SessionAddress( InetAddress.getLocalHost(),	port);

				destAddr = new SessionAddress( ipAddr, port);

				rtpMgrs[i].initialize( localAddr);

				rtpMgrs[i].addTarget( destAddr);

				System.err.println( "Created RTP session: " + ipAddress + " " + port);
				//How did the Stream be send out?
				sendStream = rtpMgrs[i].createSendStream(dataOutput, i);
				sendStream.start();
			}
			catch (Exception  e)
			{
				return e.getMessage();
			}
		}

		return null;
	}


	/**
	 * For JPEG and H263, we know that they only work for particular
	 * sizes.  So we'll perform extra checking here to make sure they
	 * are of the right sizes.
	 * 检查特定的媒体类型JPEG和H263，进行特殊处理
	 */
	Format checkForVideoSizes(Format original, Format supported)
	{
		int width, height;
		Dimension size = ((VideoFormat)original).getSize();
		Format jpegFmt = new Format(VideoFormat.JPEG_RTP);
		Format h263Fmt = new Format(VideoFormat.H263_RTP);

		if (supported.matches(jpegFmt))
		{
			// For JPEG, make sure width and height are divisible by 8.
			width = (size.width % 8 == 0 ? size.width :
					(int)(size.width / 8) * 8);
			height = (size.height % 8 == 0 ? size.height :
					(int)(size.height / 8) * 8);
		}
		else if (supported.matches(h263Fmt))
		{
			// For H.263, we only support some specific sizes.
			if (size.width < 128)
			{
				width = 128;
				height = 96;
			}
			else if (size.width < 176)
			{
				width = 176;
				height = 144;
			}
			else
			{
				width = 352;
				height = 288;
			}
		}
		else
		{
			// We don't know this particular format.  We'll just
			// leave it alone then.
			return supported;
		}

		return (new VideoFormat(null,
				new Dimension(width, height),
				Format.NOT_SPECIFIED,
				null,
				Format.NOT_SPECIFIED)).intersects(supported);
	}


	/**
	 * Setting the encoding quality to the specified value on the JPEG encoder.
	 * 0.5 is a good default.
	 */
	void setJPEGQuality(Player p, float val)
	{
		Control cs[] = p.getControls();
		QualityControl qc = null;
		VideoFormat jpegFmt = new VideoFormat(VideoFormat.JPEG);

		// Loop through the controls to find the Quality control for
		// the JPEG encoder.
		for (int i = 0; i < cs.length; i++)
		{
			if (cs[i] instanceof QualityControl && cs[i] instanceof Owned)
			{
				Object owner = ((Owned)cs[i]).getOwner();

				// Check to see if the owner is a Codec.
				// Then check for the output format.
				if (owner instanceof Codec)
				{
					Format fmts[] = ((Codec)owner).getSupportedOutputFormats(null);
					for (int j = 0; j < fmts.length; j++)
					{
						if (fmts[j].matches(jpegFmt))
						{
							qc = (QualityControl)cs[i];
							qc.setQuality(val);
							System.err.println("- Setting quality to " +
									val + " on " + qc);
							break;
						}
					}
				}
				if (qc != null)
					break;
			}
		}
	}


	/****************************************************************
	 * Convenience methods to handle processor's state changes.
	 ****************************************************************/

	private Integer stateLock = new Integer(0);
	private boolean failed = false;

	Integer getStateLock()
	{
		return stateLock;
	}

	void setFailed()
	{
		failed = true;
	}

	private synchronized boolean waitForState(Processor p, int state)
	{
		p.addControllerListener(new StateListener());
		failed = false;

		// Call the required method on the processor
		if (state == Processor.Configured)
		{
			p.configure();
		}
		else if (state == Processor.Realized)
		{
			p.realize();
		}

		// Wait until we get an event that confirms the
		// success of the method, or a failure event.
		// See StateListener inner class
		while (p.getState() < state && !failed)
		{
			synchronized (getStateLock())
			{
				try
				{
					getStateLock().wait();
				}
				catch (InterruptedException ie)
				{
					return false;
				}
			}
		}

		if (failed)
			return false;
		else
			return true;
	}

	/****************************************************************
	 * Inner Classes
	 ****************************************************************/

	class StateListener implements ControllerListener {

		public void controllerUpdate(ControllerEvent ce) {

			// If there was an error during configure or
			// realize, the processor will be closed
			//出错了
			if (ce instanceof ControllerClosedEvent)
				setFailed();

			// All controller events, send a notification
			// to the waiting thread in waitForState method.
			if (ce instanceof ControllerEvent) {
				synchronized (getStateLock()) {
					getStateLock().notifyAll();
				}
			}
		}
	}


	/****************************************************************
	 * Sample Usage for AVTransmit_Capture class
	 ****************************************************************/

	public static void main(String [] args)
	{
		// We need three parameters to do the transmission
		// For example,
		//   java AVTransmit_Capture 129.130.131.132 42050
		// 129.130.131.132是客户端地址，如果要允许所有的129.130.131.XXX内的用户使用
		// 可以配置成129.130.131.255
		args = new String[]{"192.168.1.218","42050"};
		if (args.length < 2)
		{
			prUsage();
		}

		Format fmt = null;

		// Create a audio transmit object with the specified params.
		AVTransmit_Capture at = new AVTransmit_Capture(args[0], args[1], fmt);
		// Start the transmission
		String result = at.start();

		// result will be non-null if there was an error. The return
		// value is a String describing the possible error. Print it.
		if (result != null)
		{
			System.err.println("Error : " + result);
			System.exit(0);
		}

		System.err.println("Start transmission for 60 seconds...");

		// Transmit for 60 seconds and then close the processor
		// This is a safeguard when using a capture data source
		// so that the capture device will be properly released
		// before quitting.
		// The right thing to do would be to have a GUI with a
		// "Stop" button that would call stop on AVTransmit_Capture
		try
		{
			Thread.currentThread().sleep(600000);
		}
		catch (InterruptedException ie)
		{
		}

		// Stop the transmission
		at.stop();

		System.err.println("...transmission ended.");

		System.exit(0);
	}

	static void prUsage()
	{
		System.err.println("Usage: AVTransmit_Capture <destIP> <destPortBase>");
		System.err.println("     <destIP>: multicast, broadcast or unicast IP address for the transmission");
		System.err.println("     <destPortBase>: network port numbers for the transmission.");
		System.err.println("                     The first track will use the destPortBase.");
		System.err.println("                     The next track will use destPortBase + 2 and so on.\n");
		System.exit(0);
	}
}

