package edu.thu.thss.yxy.media;

import java.io.*;
import java.net.*;
import java.util.*;

import javax.sdp.*;
import javax.media.*;
import javax.media.control.BufferControl;
import javax.media.control.TrackControl;
import javax.media.format.AudioFormat;
import javax.media.format.VideoFormat;
import javax.media.protocol.*;
import javax.media.rtp.RTPManager;
import javax.media.rtp.SessionAddress;

import edu.thu.thss.yxy.media.event.MediaListener;
import edu.thu.thss.yxy.media.event.MediaEvent;
import edu.thu.thss.yxy.media.util.JMFInit;
import edu.thu.thss.yxy.sip.sdp.SdpUtils;
import edu.thu.thss.yxy.util.*;

/**
 * a wrapper of media libraries such as JMF,It takes care of all media play and
 * capture as well as media transport (e.g. over RTP).
 * 
 * @author Andy Gao
 * 
 */
public class MediaManager {
	/**
	 * Our logger.
	 */
	private final static Logger logger = Logger.getLogger(MediaManager.class);

	private RtpReceiver avReceiver;
	private Vector<RtpTransmitter> avTransmitters = new Vector<RtpTransmitter>();
	private ArrayList<MediaListener> mediaListeners = new ArrayList<MediaListener>();
	/**
	 * IP to which this <tt>flow send media</tt>
	 */
	private String remoteAddress = null;

	/**
	 * The local port used
	 */
	private int localAudioPort = -1;
	private int localVideoPort = -1;
	/**
	 * Port for the remote endpoint
	 */
	private int remoteAudioPort = -1;
	@SuppressWarnings("unused")
	private int remoteVideoPort = -1;

	/**
	 * Codecs chosen
	 */
	private String negotiatedAudioCodec;
	private String negotiatedVideoCodec;

	/**
	 * The SdpFactory instance that we use for construction of all sdp
	 * descriptions.
	 */
	private SdpFactory sdpFactory = null;

	/**
	 * A flag indicating whether the media service implementation is ready to be
	 * used.
	 */
	private boolean isStarted = false;

	/**
	 * An object that we use for.
	 */
	private final ProcessorUtility processorUtility = new ProcessorUtility();

	/**
	 * The processor that will be handling content coming from our capture data
	 * sources.
	 */
	private Processor sourceProcessor = null;

	/**
	 * SDP Codes of all video formats that JMF supports for current datasource.
	 */
	private String[] supportedVideoEncodings = new String[0];

	/**
	 * SDP Codes of all audio formats that JMF supports for current datasource.
	 */
	private String[] supportedAudioEncodings = new String[0];

	// Sdp Codes of all formats that we can receive
	private String[] receivableVideoFormats = new String[] {
	// sdp format // corresponding JMF Format
			Integer.toString(SdpConstants.H263), // javax.media.format.VideoFormat.H263_RTP
			Integer.toString(SdpConstants.JPEG), // javax.media.format.VideoFormat.JPEG_RTP
			Integer.toString(SdpConstants.H261) // javax.media.format.VideoFormat.H261_RTP
	};

	private String[] receivableAudioFormats = new String[] {
	// sdp format // corresponding JMF Format
			Integer.toString(SdpConstants.PCMU), // javax.media.format.AudioFormat.ULAW_RTP;
			Integer.toString(SdpConstants.GSM), // javax.media.format.AudioFormat.GSM_RTP;
			Integer.toString(SdpConstants.G723), // javax.media.format.AudioFormat.G723_RTP
			Integer.toString(SdpConstants.DVI4_8000), // javax.media.format.AudioFormat.DVI_RTP;
			Integer.toString(SdpConstants.DVI4_16000), // javax.media.format.AudioFormat.DVI_RTP;
			Integer.toString(SdpConstants.PCMA), // javax.media.format.AudioFormat.ALAW;
			Integer.toString(SdpConstants.G728), // javax.media.format.AudioFormat.G728_RTP;
			Integer.toString(SdpConstants.G729) // javax.media.format.AudioFormat.G729_RTP
	};

	/**
	 * The indicator which determines whether {@link #supportedAudioEncodings}
	 * and {@link #supportedVideoEncodings} are already calculated to be
	 * up-to-date with the current {@link #sourceProcessor} and the lock to
	 * synchronize the access to the mentioned calculation.
	 */
	private final boolean[] supportedEncodingsAreCalculated = new boolean[1];

	/**
	 * A data source merging our audio and video data sources.
	 */
	private DataSource avDataSource = null;

	/**
	 * A list of currently active RTPManagers mapped against Local session
	 * addresses. The list is used by transmitters and receivers so that
	 * receiving and transmitting from the same port simultaneousl is possible
	 */
	Hashtable<SessionAddress, RTPManager> activeRtpManagers = new Hashtable<SessionAddress, RTPManager>();

	public void start() throws MediaException {
		try {
			sdpFactory = SdpFactory.getInstance();

			setupJMF();
			String mediaSource = PropertiesUtil
					.getProperty("edu.thu.thss.yxy.media.MEDIA_SOURCE");
			if (mediaSource == null || "".equals(mediaSource)) {
				initCaptureDevices();
			} else {
				MediaLocator locator = new MediaLocator(mediaSource);
				avDataSource = Manager.createDataSource(locator);
				if (avDataSource != null)
					initProcessor(avDataSource);
				else
					sourceProcessor = null;
			}

			isStarted = true;
		} catch (Exception ex) {
			logger.error("Failed to initialize media control", ex);
			isStarted = false;
		}
	}

	/**
	 * Runs JMFInit the first time the application is started so that capture
	 * devices are properly detected and initialized by JMF.
	 * 
	 * @throws MediaException
	 *             if an exception occurs during the detection.
	 */
	public static void setupJMF() throws MediaException {

		// .jmf is the place where we store the jmf.properties file used
		// by JMF. if the directory does not exist or it does not contain
		// a jmf.properties file. or if the jmf.properties file has 0 length
		// then this is the first time we're running and should continue to
		// with JMFInit
		String homeDir = System.getProperty("user.home");
		File jmfDir = new File(homeDir, ".jmf");
		String classpath = System.getProperty("java.class.path");
		classpath += System.getProperty("path.separator")
				+ jmfDir.getAbsolutePath();
		System.setProperty("java.class.path", classpath);

		if (!jmfDir.exists())
			jmfDir.mkdir();

		File jmfProperties = new File(jmfDir, "jmf.properties");

		if (!jmfProperties.exists()) {
			try {
				jmfProperties.createNewFile();
			} catch (IOException ex) {
				throw new MediaException("Failed to create jmf.properties - "
						+ jmfProperties.getAbsolutePath(),
						MediaException.INTERNAL_ERROR);
			}
		}

		if (jmfProperties.length() == 0)
			JMFInit.start();
	}

	public void openMediaStreams(String sdpData) throws MediaException {
		if (!isStarted) {
			throw new MediaException(
					"The MediaManager had not been properly started! "
							+ "Impossible to continue",
					MediaException.SERVICE_NOT_STARTED);
		}
		prepareMediaSession(sdpData);
		startReceiver();
//		startTransmitter();
	}

	public void stop() throws MediaException {
		closeStreams();
		closeProcessor();
		isStarted = false;
	}

	public void closeStreams() {
		removeAllRtpManagers();
        stopTransmitters();
        stopReceiver();
        fireMediaStatusChanged();	
	}

	private void closeProcessor() throws MediaException {
		if (sourceProcessor != null) {
			sourceProcessor.stop();
			sourceProcessor.close();
		}
		if (avDataSource != null)
			avDataSource.disconnect();
	}

	/**
	 * Start receiving RTP stream
	 */
	protected void startReceiver() {
		MediaSessionDescription mediaSD = new MediaSessionDescription();
		mediaSD.setAddress(remoteAddress);
		mediaSD.setDestinationPort(remoteAudioPort);
		mediaSD.setLocalPort(localAudioPort);
		mediaSD.setTransportProtocol("udp");
		mediaSD.setAudioFormat(negotiatedAudioCodec);
		mediaSD.setVideoFormat(negotiatedVideoCodec);
		avReceiver = new RtpReceiver(mediaSD);
		avReceiver.setMediaManager(this);
		avReceiver.initialize();
	}

	public String generateSdpDescription() throws MediaException {
		try {
			SessionDescription sessDescr = sdpFactory
					.createSessionDescription();
			// "v=0"
			Version v = sdpFactory.createVersion(0);
			InetSocketAddress publicVideoAddress = new InetSocketAddress(
					InetAddress.getLocalHost(), getVideoPort());
			InetSocketAddress publicAudioAddress = new InetSocketAddress(
					InetAddress.getLocalHost(), getAudioPort());
			InetAddress publicIpAddress = publicAudioAddress.getAddress();
			String addrType = publicIpAddress instanceof Inet6Address ? "IP6"
					: "IP4";

			// spaces in the user name mess everything up.
			Origin o = sdpFactory.createOrigin(PropertiesUtil.getProperty(
					"user.name").replace(' ', '_'), 0, 0, "IN", addrType,
					publicIpAddress.getHostAddress());
			// "s=-"
			SessionName s = sdpFactory.createSessionName("-");
			// c=
			Connection c = sdpFactory.createConnection("IN", addrType,
					publicIpAddress.getHostAddress());
			// "t=0 0"
			TimeDescription t = sdpFactory.createTimeDescription();
			Vector<TimeDescription> timeDescs = new Vector<TimeDescription>();
			timeDescs.add(t);
			// --------Audio media description
			String[] formats = receivableAudioFormats;
			MediaDescription am = sdpFactory.createMediaDescription("audio",
					publicAudioAddress.getPort(), 1, "RTP/AVP", formats);
			if (getSupportedAudioEncodings().length == 0) {
				am.setAttribute("recvonly", null);

			}
			// --------Video media description
			// "m=video 22222 RTP/AVP 34";
			String[] vformats = receivableVideoFormats;
			MediaDescription vm = sdpFactory.createMediaDescription("video",
					publicVideoAddress.getPort(), 1, "RTP/AVP", vformats);
			if (getSupportedVideoEncodings().length == 0) {
				vm.setAttribute("recvonly", null);
			}
			Vector<MediaDescription> mediaDescs = new Vector<MediaDescription>();

			// Add Video and media descriptions if the user has not requested
			// otherwise (feature request by Pradeep Cheetal)
			mediaDescs.add(am);
			mediaDescs.add(vm);

			sessDescr.setVersion(v);
			sessDescr.setOrigin(o);
			sessDescr.setConnection(c);
			sessDescr.setSessionName(s);
			sessDescr.setTimeDescriptions(timeDescs);
			if (mediaDescs.size() > 0)
				sessDescr.setMediaDescriptions(mediaDescs);

			return sessDescr.toString();
		} catch (Exception exc) {
			logger.error("An SDP exception occurred while generating local sdp description",
							exc);
			throw new MediaException(
					"An SDP exception occurred while generating local sdp description",
					MediaException.GENERAL_ERROR);
		}
	}

	/**
	 * start transmitting RTP stream
	 * 
	 * @throws MediaException
	 */
	protected void startTransmitter() throws MediaException {
		MediaSessionDescription mediaSessionDesc = new MediaSessionDescription();
		mediaSessionDesc.setAddress(remoteAddress);
		mediaSessionDesc.setLocalPort(localAudioPort);
		mediaSessionDesc.setDestinationPort(remoteAudioPort);
		mediaSessionDesc.setTransportProtocol("udp");
		mediaSessionDesc.setAudioFormat(negotiatedAudioCodec);
		mediaSessionDesc.setVideoFormat(negotiatedVideoCodec);
		if (avDataSource != null) {
			RtpTransmitter avTransmitter = new RtpTransmitter(mediaSessionDesc);
			avTransmitter.setMediaManagerCallback(this);
			avTransmitters.add(avTransmitter);
			// Start the media transmission
			String result = avTransmitter.start();
			System.out.println("Media Transmission started!!!");
			// result will be non-null if there was an error. The return
			// value is a String describing the possible error. Print it.
			if (result != null) {
				System.err.println("Error : " + result);
			}
		}
	}

	/**
	 * Stop transmitting RTP stream
	 */
	protected void stopTransmitters() {
		for (int i = avTransmitters.size() - 1; i >= 0; i--) {
			try {
				avTransmitters.elementAt(i).stop();
			} catch (Exception exc) {
				logger.error("Could not close transmitter " + i, exc);
			}
			avTransmitters.removeElementAt(i);
		}
		System.out.println("Media Transmitter stopped!!!");
	}

	/**
	 * Stop receiving RTP stream
	 */
	protected void stopReceiver() {
		if (avReceiver != null) {
			System.out.println("Media Receiver stopped!!!");
			avReceiver.stop();
			avReceiver = null;
		}
	}

	/**
	 * Extracts from the sdp all the information to initiate the media session
	 * 
	 * @param incomingSdpBody
	 *            - the sdp Body of the incoming call to negotiate the media
	 *            session
	 */
	@SuppressWarnings("unchecked")
	public void prepareMediaSession(String incomingSdpBody) {
		SessionDescription sessionDescription = null;
		try {
			sessionDescription = sdpFactory
					.createSessionDescription(incomingSdpBody);
			// Get the remote address where the user agent has to connect to
			Connection remoteConnection = sessionDescription.getConnection();
			remoteAddress = remoteConnection.getAddress();
		} catch (SdpParseException spe) {
			spe.printStackTrace();
		}
		localAudioPort = getAudioPort();
		localVideoPort = getVideoPort();
		System.out.println("Local listening audio port : " + localAudioPort);
		// Extract the codecs from the sdp session description
		List<Format> audioCodecList = SdpUtils.extractAudioCodecs(sessionDescription);
		remoteAudioPort = SdpUtils.getAudioPort(sessionDescription);
		// printCodecs(audioCodecList);
		System.out.println("Remote listening audio port : " + remoteAudioPort);
		List<Format> videoCodecList = SdpUtils.extractVideoCodecs(sessionDescription);
		remoteVideoPort = SdpUtils.getVideoPort(sessionDescription);
		// printCodecs(videoCodecList);
		// System.out.println("Remote listening video port : "+remoteVideoPort);
		negotiatedAudioCodec = negotiateAudioCodec(audioCodecList);
		negotiatedVideoCodec = negotiateVideoCodec(videoCodecList);
	}

	/**
	 * Opens all detected capture devices making them ready to capture.
	 * <p>
	 * The method is kept private because it relies on
	 * {@link #deviceConfiguration} which is (publicly) set only through
	 * {@link #initialize(DeviceConfiguration)}.
	 * </p>
	 * 
	 * @throws MediaException
	 *             if opening the devices fails.
	 */
	@SuppressWarnings("unchecked")
	private void initCaptureDevices() throws MediaException {
		CaptureDeviceInfo audioDeviceInfo = null;
		CaptureDeviceInfo videoDeviceInfo = null;
		DataSource audioDataSource = null;
		DataSource videoDataSource = null;

		disposeBeforeInitCaptureDevices();

		Vector<CaptureDeviceInfo> captureDevices = CaptureDeviceManager
				.getDeviceList(null);
		System.out.println("- number of capture devices: "
				+ captureDevices.size());

		for (int i = 0; i < captureDevices.size(); i++) {
			CaptureDeviceInfo captureDevice = captureDevices.elementAt(i);
			System.out.println("- name of the capture device: "
					+ captureDevice.getName());
			Format[] formatArray = captureDevice.getFormats();
			for (int j = 0; j < formatArray.length; j++) {
				Format format = formatArray[j];
				if (format instanceof VideoFormat) {
					logger.info("- format accepted by this VIDEO device: "
							+ format.toString().trim());
					if (videoDeviceInfo == null) {
						videoDeviceInfo = captureDevice;
					}
				} else if (format instanceof AudioFormat) {
					logger.info("- format accepted by this AUDIO device: "
							+ format.toString().trim());
					if (audioDeviceInfo == null) {
						audioDeviceInfo = captureDevice;
					}
				} else {
					logger.info("- format of type UNKNOWN");
				}
			}
		}
		// Init audio device

		if (audioDeviceInfo != null) {
			audioDataSource = createDataSource(audioDeviceInfo.getLocator());
		}

		// Init video device
		if (videoDeviceInfo != null) {
			videoDataSource = createDataSource(videoDeviceInfo.getLocator());
		}

		// Create the audio/video data source.
		if (audioDataSource != null && videoDataSource != null) {
			try {
				avDataSource = Manager.createMergingDataSource(new DataSource[] {
								audioDataSource, videoDataSource });
			} catch (IncompatibleSourceException exc) {
				logger.fatal("Failed to create a media data source!"
						+ "Media transmission won't be enabled!", exc);
				throw new InternalError("Failed to create a media data source!"
						+ "Media transmission won't be enabled!"
						+ exc.getMessage());
			}
		} else if (audioDataSource != null)
			avDataSource = audioDataSource;
		else if (videoDataSource != null)
			avDataSource = videoDataSource;
		else
			avDataSource = null;

		// avDataSource may be null (Bug report Vince Fourcade)
		if (avDataSource != null)
			initProcessor(avDataSource);
		else
			sourceProcessor = null;
	}

	/**
	 * Allows this instance to dispose of any state which is reinitialized by
	 * {@link #initCaptureDevices()}. For example, a vital requirement is to
	 * invoke {@link Controller#close()} on <code>sourceProcessor</code>
	 * regardless of the fact that it is soon to not be referenced at all or it
	 * will keep unnecessary threads alive and they will in turn keep just about
	 * anything created to an associated <code>Call</code>.
	 */
	private void disposeBeforeInitCaptureDevices() {
		if (avDataSource != null) {
			try {
				avDataSource.stop();
			} catch (IOException ex) {
				logger.error("Failed to properly stop avDataSource.", ex);
			}

			avDataSource.disconnect();
		}
		if (sourceProcessor != null) {
			sourceProcessor.stop();
			if (sourceProcessor.getState() == Processor.Realized) {
				DataSource dataOutput = sourceProcessor.getDataOutput();

				if (dataOutput != null)
					dataOutput.disconnect();
			}
			sourceProcessor.deallocate();
			sourceProcessor.close();
		}
	}

	/**
	 * Detects the audio and video encodings supported by the current
	 * {@link #sourceProcessor}.
	 */
	private void detectSupportedEncodings() {
		// check out the formats that our processor supports and update our
		// supported formats arrays.
		TrackControl[] trackControls = sourceProcessor.getTrackControls();
		logger.debug("We will be able to transmit in:");

		List<String> transmittableAudioEncodings = new ArrayList<String>();
		List<String> transmittableVideoEncodings = new ArrayList<String>();

		for (TrackControl trackControl : trackControls) {
			Format[] formats = trackControl.getSupportedFormats();
			for (int j = 0; j < formats.length; j++) {
				Format format = formats[j];
				String encoding = format.getEncoding();

				int sdpInt = MediaUtil.jmfToSdpEncoding(encoding);
				if (sdpInt != MediaUtil.UNKNOWN_ENCODING) {
					String sdp = String.valueOf(sdpInt);

					if (format instanceof AudioFormat) {
						if (!transmittableAudioEncodings.contains(sdp)) {
							if (logger.isDebugEnabled()) {
								logger.debug("Audio=[" + (j + 1) + "]="
										+ encoding + "; sdp=" + sdp);
							}
							transmittableAudioEncodings.add(sdp);
						}
					} else if (format instanceof VideoFormat) {
						if (!transmittableVideoEncodings.contains(sdp)) {
							if (logger.isDebugEnabled()) {
								logger.debug("Video=[" + (j + 1) + "]="
										+ encoding + "; sdp=" + sdp);
							}
							transmittableVideoEncodings.add(sdp);
						}
					}
				} else {
					logger.debug("unknown encoding format " + encoding);
				}
			}
		}

		// now update the supported encodings arrays.
		final int transmittableAudioEncodingCount = transmittableAudioEncodings
				.size();
		if (transmittableAudioEncodingCount > 0) {
			supportedAudioEncodings = transmittableAudioEncodings
					.toArray(new String[transmittableAudioEncodings.size()]);
		}

		final int transmittableVideoEncodingCount = transmittableVideoEncodings
				.size();
		if (transmittableVideoEncodingCount > 0) {
			supportedVideoEncodings = transmittableVideoEncodings
					.toArray(new String[transmittableVideoEncodings.size()]);
		}
	}

	/**
	 * Ensures {@link #supportedAudioEncodings} and
	 * {@link #supportedVideoEncodings} are up-to-date with the current
	 * {@link #sourceProcessor} i.e. calculates them if necessary.
	 */
	private void ensureSupportedEncodingsAreDetected() {
		synchronized (supportedEncodingsAreCalculated) {
			if (!supportedEncodingsAreCalculated[0]) {
				if (sourceProcessor != null) {
					detectSupportedEncodings();
				}
				supportedEncodingsAreCalculated[0] = true;
			}
		}
	}
	
	 /**
     * Removes all rtp managers from the rtp manager cache.
     */
	synchronized void removeAllRtpManagers() {
		Enumeration<SessionAddress> rtpManages = activeRtpManagers.keys();
		while (rtpManages.hasMoreElements()) {
			SessionAddress item = rtpManages.nextElement();
			activeRtpManagers.remove(item);
		}
	}

   /**
    * Moves formats with the specified encoding to the top of the array list
    * so that they are the ones chosen for transmission (if supported by the
    * remote party) (feature request by Vince Fourcade)
    */
	protected void surfacePreferredEncodings(String[] formats) {
		String preferredAudioEncoding = PropertiesUtil
				.getProperty("edu.thu.thss.yxy.media.PREFERRED_AUDIO_ENCODING");
		String preferredVideoEncoding = PropertiesUtil
				.getProperty("edu.thu.thss.yxy.media.PREFERRED_VIDEO_ENCODING");
		if (preferredAudioEncoding == null && preferredVideoEncoding == null) {
			return;
		}
		for (int i = 0; i < formats.length; i++) {
			String encoding = formats[i];
			if ((preferredAudioEncoding != null && encoding
					.equalsIgnoreCase(preferredAudioEncoding))
					|| (preferredVideoEncoding != null && encoding
							.equalsIgnoreCase(preferredVideoEncoding))) {
				formats[i] = formats[0];
				formats[0] = encoding;
				break;
			}
		}

	}

	/**
	 * Initialize the processor that we will be using for transmission. The
	 * method also updates the list of supported formats limiting it to the
	 * formats supported by <tt>dataSource</tt>
	 * 
	 * @param dataSource
	 *            the source to use for our source processor.
	 * @throws MediaException
	 *             if connecting the data source or initializing the processor
	 *             fails.
	 */
	private void initProcessor(DataSource dataSource) throws MediaException {
		try {
			try {
				dataSource.connect();
			}
			// Thrown when operation is not supported by the OS
			catch (NullPointerException ex) {
				logger.error("An internal error occurred while"
						+ " trying to connec to the datasource!", ex);
				throw new MediaException("An internal error occurred while"
						+ " trying to connec to the datasource!",
						MediaException.INTERNAL_ERROR, ex);
			}

			Control ctl = (Control) dataSource
					.getControl("javax.media.control.BufferControl");

			if (ctl != null) {
				((BufferControl) ctl).setBufferLength(60);// buffers in ms
			}

			sourceProcessor = Manager.createProcessor(dataSource);

			if (!processorUtility.waitForState(sourceProcessor,
					Processor.Configured)) {
				throw new MediaException(
						"Media manager could not configure processor\n"
								+ "for the specified data source",
						MediaException.INTERNAL_ERROR);
			}

		} catch (NoProcessorException ex) {
			logger.error("Media manager could not create a processor\n"
					+ "for the specified data source", ex);
			throw new MediaException(
					"Media manager could not create a processor\n"
							+ "for the specified data source",
					MediaException.INTERNAL_ERROR, ex);
		} catch (IOException ex) {
			logger.error("Media manager could not connect "
					+ "to the specified data source", ex);
			throw new MediaException("Media manager could not connect "
					+ "to the specified data source",
					MediaException.INTERNAL_ERROR, ex);
		}
		sourceProcessor.setContentDescriptor(new ContentDescriptor(
				ContentDescriptor.RAW_RTP));

		/*
		 * The lists of the supported audio and video encodings will have to be
		 * calculated again in order to get them up-to-date with the current
		 * sourceProcessor.
		 */
		synchronized (supportedEncodingsAreCalculated) {
			supportedEncodingsAreCalculated[0] = false;
		}
	}

	/**
	 * Returns a JMF DataSource object over the device that <tt>locator</tt>
	 * points to.
	 * 
	 * @param locator
	 *            the MediaLocator of the device/movie that we'd like to
	 *            transmit from.
	 * @return a connected <tt>DataSource</tt> for the media specified by the
	 *         locator.
	 */
	public static DataSource createDataSource(MediaLocator locator) {
		try {
			logger.info("Creating datasource for:"
					+ ((locator != null) ? locator.toExternalForm() : "null"));
			return Manager.createDataSource(locator);
		} catch (NoDataSourceException ex) {
			// The failure only concens us
			logger.error("Could not create data source for "
					+ ((locator != null) ? locator.toExternalForm() : "null"),
					ex);
			return null;
		} catch (IOException ex) {
			// The failure only concerns us
			logger.error("Could not create data source for "
					+ ((locator != null) ? locator.toExternalForm() : "null"),
					ex);
			return null;
		}
	}

	/**
	 * Returns the duration of the output data source. Usually this will be
	 * DURATION_UNKNOWN, but if the current data source is set to an audio file,
	 * then this value will be of some use.
	 * 
	 * @return the output duration
	 */
	public javax.media.Time getOutputDuration() {
		return (sourceProcessor == null) ? Duration.DURATION_UNKNOWN
				: sourceProcessor.getDuration();
	}

	/**
	 * Implements <tt>getSupportedAudioEncodings</tt> from interface
	 * <tt>MediaService</tt>
	 * 
	 * @return an array of Strings containing audio formats in the order of
	 *         preference.
	 */
	public String[] getSupportedAudioEncodings() {
		ensureSupportedEncodingsAreDetected();
		return this.supportedAudioEncodings;
	}

	/**
	 * Implements <tt>getSupportedVideoEncodings</tt> from interface
	 * <tt>MediaService</tt>
	 * 
	 * @return an array of Strings containing video formats in the order of
	 *         preference.
	 */
	public String[] getSupportedVideoEncodings() {
		ensureSupportedEncodingsAreDetected();
		return this.supportedVideoEncodings;
	}

	/**
	 * Find the best codec between our own supported codecs and the remote
	 * supported codecs to initiate the media session Currently, take the first
	 * one to match
	 * 
	 * @param audioCodecList
	 *            - the list of the remote audio supported codecs
	 * @return the negotiated audio codec
	 */
	@SuppressWarnings("unchecked")
	public String negotiateAudioCodec(List<Format> audioCodecList) {
		// Find the mapping of the jmf format to the sdp format
		List<String> audioCodecSupportedSdpFormat = new ArrayList<String>();
		for (String format : getSupportedAudioEncodings()) {
			int sdpInt = Integer.parseInt(format);
			if (sdpInt != MediaUtil.UNKNOWN_ENCODING) {
				String sdpCodecValue = String.valueOf(sdpInt);
				audioCodecSupportedSdpFormat.add(sdpCodecValue);
			}
		}
		// find the best codec(currently the first one which is in both list)
		for (String supportedCodec : audioCodecSupportedSdpFormat) {
			Iterator iteratorRemoteCodec = audioCodecList.iterator();
			while (iteratorRemoteCodec.hasNext()) {
				String remoteCodec = iteratorRemoteCodec.next().toString();
				if (remoteCodec.equals(supportedCodec))
					return remoteCodec;
			}
		}
		return null;
	}

	/**
	 * Find the best codec between our own supported codecs and the remote
	 * supported codecs to initiate the media session Currently, take the first
	 * one to match
	 * 
	 * @param videoCodecList
	 *            - the list of the remote video supported codecs
	 * @return the negotiated video codec
	 */
	@SuppressWarnings("unchecked")
	public String negotiateVideoCodec(List<Format> videoCodecList) {
		// Find the mapping of the jmf format to the sdp format
		List<String> videoCodecSupportedSdpFormat = new ArrayList<String>();
		
		for (String format : getSupportedVideoEncodings()) {
			int sdpInt = Integer.parseInt(format);
			if (sdpInt != MediaUtil.UNKNOWN_ENCODING) {
				String sdpCodecValue = String.valueOf(sdpInt);
				videoCodecSupportedSdpFormat.add(sdpCodecValue);
			}
		}
		// find the best codec(currently the first one which is in both list)
		for (String supportedCodec : videoCodecSupportedSdpFormat) {
			Iterator iteratorRemoteCodec = videoCodecList.iterator();
			while (iteratorRemoteCodec.hasNext()) {
				String remoteCodec = iteratorRemoteCodec.next().toString();
				if (remoteCodec.equals(supportedCodec))
					return remoteCodec;
			}
		}
		return null;
	}

	/**
	 * Retrieve the audio port for this media session, if no audio port has been
	 * allowed it will choose one and return it
	 * 
	 * @return the audio port for this media session
	 */
	public int getAudioPort() {
		if (localAudioPort == -1) {
			String audioPort = PropertiesUtil
					.getProperty("edu.thu.thss.yxy.media.AUDIO_PORT");
			if (audioPort != null && !audioPort.equals("")) {
				localAudioPort = Integer.parseInt(audioPort);
			} else {
				localAudioPort = new Random().nextInt(8885);
				if (localAudioPort % 2 == 0)
					localAudioPort += 1024;
				else
					localAudioPort += 1025;
			}
		}
		return localAudioPort;
	}

	/**
	 * Retrieve the video port for this media session, if no video port has been
	 * allowed it will choose one and return it
	 * 
	 * @return the video port for this media session
	 */
	public int getVideoPort() {
		if (localVideoPort == -1) {
			String videoPort = PropertiesUtil
					.getProperty("edu.thu.thss.yxy.media.VIDEO_PORT");
			if (videoPort != null && !videoPort.equals("")) {
				localVideoPort = Integer.parseInt(videoPort);
			} else {
				localVideoPort = new Random().nextInt(8885);
				if (localVideoPort % 2 == 0)
					localVideoPort += 1024;
				else
					localVideoPort += 1025;
			}
		}
		return localVideoPort;
	}

	public Processor getSourceProcessor() {
		return sourceProcessor;
	}

	public void addMediaListener(MediaListener listener) {
		mediaListeners.add(listener);
	}

	public void fireMediaStreamReceived(Player player) {
		MediaEvent evt = new MediaEvent(player);
		for (int i = mediaListeners.size() - 1; i >= 0; i--)
			mediaListeners.get(i).receivedMediaStream(evt);
	}

	public void fireMediaStatusChanged() {
		for (int i = mediaListeners.size() - 1; i >= 0; i--)
			mediaListeners.get(i).mediaStatusChanged();
	}
}
