package conference.view;

import org.gstreamer.Caps;
import org.gstreamer.Element;
import org.gstreamer.ElementFactory;
import org.gstreamer.Pipeline;
import org.gstreamer.State;
import org.gstreamer.StateChangeReturn;
import org.gstreamer.elements.Queue;
import org.gstreamer.elements.Tee;

import conference.model.Client;
import conference.model.ClientList;
import conference.model.ConferenceStateClientSide;
import conference.model.ConferenceStateClientSide.CALL_STATE;


/**
*
* a class to send audio and video UDP streams
* 
*/
public class SenderStreamUDP {
	
	private final int DEFAULTVIDEOOUTPORT = 6666;
	private final int DEFAULTAUDIOOUTPORT = 7777;
	private final String DEFAULTHOSTIP = "127.0.0.1";
	
	private ConferenceStateClientSide conference;
	
	private Pipeline videoSenderPipeline;
	private Element videoSrc;
	private Element videoColorSpace;
	private Element videoScale;
	private Element videoRate;
	private Element videoFilter;
	private Element videoEnc;
	private Element videoDisplaySink;
	private Element videoUdpSink;
	private Tee videoTee;
	private Queue videoDisplayQueue;
	private Queue videoUdpQueue;
	
	private Pipeline audioSenderPipeline;
	private Element audioSrc;
	private Element audioConvert;
	private Element audioEnc;
	private Element audioSink;
	
	
	public SenderStreamUDP(Element videoComponentElement, ConferenceStateClientSide conference){
		this.conference=conference;
		
		// Creation of the videoSenderPipeline elements :
		videoSrc = ElementFactory.make("autovideosrc", "VideoSrc"); // TODO: replace with: videoSrc = ElementFactory.make("autovideosrc", "VideoSrc");
		videoColorSpace = ElementFactory.make("ffmpegcolorspace", "VideoColorSpace");
		videoScale = ElementFactory.make("videoscale","VideoScale");
		videoScale.set("method",1);
		videoRate = ElementFactory.make("videorate","VideoRate");
		videoFilter = ElementFactory.make("capsfilter","VideoFilter");
		videoFilter.setCaps(Caps.fromString("video/x-raw-yuv,width=(int)80,height=(int)60,framerate=(fraction)15/1"));
		videoEnc = ElementFactory.make("theoraenc","VideoEnc");
		videoEnc.set("bitrate",150);
		videoDisplaySink = videoComponentElement;
		videoUdpSink = ElementFactory.make("multiudpsink","VideoSink");
		videoUdpSink.set("clients", DEFAULTHOSTIP+":"+DEFAULTVIDEOOUTPORT);
		videoTee = new Tee("VideoTee");
		videoDisplayQueue = new Queue("VideoDisplayQueue");
		videoUdpQueue = new Queue("VideoUdpQueue");
		// Creation of the videoSenderPipeline pipeline and linking of the elements :
		videoSenderPipeline = new Pipeline("VideoSenderPipeline");
		videoSenderPipeline.addMany(videoSrc, videoColorSpace, videoScale, videoRate, videoFilter, videoEnc, videoDisplaySink, videoUdpSink, videoTee, videoDisplayQueue, videoUdpQueue);
		videoSrc.link(videoColorSpace);
		videoColorSpace.link(videoScale);
		videoScale.link(videoRate);
		videoRate.link(videoFilter);
		videoFilter.link(videoTee);
		videoTee.link(videoDisplayQueue);
		videoDisplayQueue.link(videoDisplaySink);
		videoTee.link(videoUdpQueue);
		videoUdpQueue.link(videoEnc);
		videoEnc.link(videoUdpSink);
		
		// Creation of the audioSenderPipeline elements :
		audioSrc = ElementFactory.make("autoaudiosrc","AudioSrc");
		audioConvert = ElementFactory.make("audioconvert","AudioConvert");
		audioEnc = ElementFactory.make("vorbisenc", "AudioEnc");
		audioEnc.set("quality", 0.1);
		audioSink = ElementFactory.make("multiudpsink", "AudioSink");
		audioSink.set("clients", DEFAULTHOSTIP+":"+DEFAULTAUDIOOUTPORT);
		// Creation of the audioSenderPipeline pipeline and linking of the elements :
		audioSenderPipeline = new Pipeline("AudioSenderPipeline");
		audioSenderPipeline.addMany(audioSrc, audioConvert, audioEnc, audioSink);
		audioSrc.link(audioConvert);
		audioConvert.link(audioEnc);
		audioEnc.link(audioSink);
		
		startPipelines();	
	}
	
	/**
	* starts the audio pipeline and the video pipeline of the ReceiverStreamUDP
	*  
	*/
	public void startPipelines() {
		StateChangeReturn videoStartResult = videoSenderPipeline.setState(State.PLAYING);
		StateChangeReturn audioStartResult = audioSenderPipeline.setState(State.PLAYING);
		System.out.println("SenderStreamUDP: Pipelines starting... video "+videoStartResult+" with clients "+videoUdpSink.get("clients")+" - audio "+audioStartResult+" with clients "+audioSink.get("clients"));
	}
	
	/**
	* stops the audio pipeline and the video pipeline of the ReceiverStreamUDP
	*  
	*/
	public void stopPipelines() {
		StateChangeReturn videoStopResult = videoSenderPipeline.setState(State.NULL);
		StateChangeReturn audioStopResult = audioSenderPipeline.setState(State.NULL);
		System.out.println("SenderStreamUDP: Pipeline stopping... video "+videoStopResult+" - audio "+audioStopResult);
	}
	
	/**
	* stops and starts the audio pipeline and the video pipeline of the ReceiverStreamUDP
	*  
	*/
	public void restartPipelines(){
		stopPipelines();
		startPipelines();
	}
	
	/**
	* sets the SenderStreamUDP pipelines clients from a client list (for broadcast) and a simple client (for private chat)
	*  
	*  @param clientList the list of all the clients (for the broadcast)
	*  @param privateClient the client with who we do the private chat (set to null if no private chat)
	*/
	public void setBroadcastAndPrivateDestinationHosts(ClientList clientList, Client privateClient){
		stopPipelines();
		if(clientList!=null && conference.userIsCurrentBroadcaster()){
			int size = clientList.size();
			if(size>0){
				String videoHosts = clientList.get(0).getipAddr()+":"+conference.getReceivingPortBroadcastVideo();
				String audioHosts = clientList.get(0).getipAddr()+":"+conference.getReceivingPortBroadcastAudio();
				if(size>1){
					for(int i=1; i<size; i++){
						videoHosts = videoHosts+","+clientList.get(i).getipAddr()+":"+conference.getReceivingPortBroadcastVideo();
						audioHosts = audioHosts+","+clientList.get(i).getipAddr()+":"+conference.getReceivingPortBroadcastAudio();
					}
				}
				// Then we add the private chat client:
				if(privateClient!=null&&conference.callState==CALL_STATE.IN_CALL){
					if(conference.clientIsInitiator){
						videoHosts = videoHosts+","+privateClient.getipAddr()+":"+conference.getReceivingPortPrivateAcceptorVideo();
						audioHosts = audioHosts+","+privateClient.getipAddr()+":"+conference.getReceivingPortPrivateAcceptorAudio();
					}else{
						videoHosts = videoHosts+","+privateClient.getipAddr()+":"+conference.getReceivingPortPrivateInitiatorVideo();
						audioHosts = audioHosts+","+privateClient.getipAddr()+":"+conference.getReceivingPortPrivateInitiatorAudio();
					}
				}
				
				videoUdpSink.set("clients", videoHosts);
				audioSink.set("clients", audioHosts);
				System.out.println("SenderStreamUDP: videoHosts = " + videoHosts);
				System.out.println("SenderStreamUDP: audioHosts = " + audioHosts);
				
				System.out.println("SenderStreamUDP: New broadcast targets: "+clientList.toTransferString());
			}	
		}
		else{
			// We just put the privat chat client
			if(privateClient!=null){
				if(conference.clientIsInitiator){
					videoUdpSink.set("clients", privateClient.getipAddr()+":"+conference.getReceivingPortPrivateAcceptorVideo());
					audioSink.set("clients", privateClient.getipAddr()+":"+conference.getReceivingPortPrivateAcceptorAudio());
				}else{
					videoUdpSink.set("clients", privateClient.getipAddr()+":"+conference.getReceivingPortPrivateInitiatorVideo());
					audioSink.set("clients", privateClient.getipAddr()+":"+conference.getReceivingPortPrivateInitiatorAudio());
				}
			}
		}
		
		startPipelines();
	}
	
	/**
	* returns true if the video pipeline is playing
	*  
	*  @return true if the video pipeline state is PLAYING, false otherwise
	*/
	public boolean isVideoPlaying(){
		try{
			return this.videoSenderPipeline.getState().equals(State.PLAYING);
		}catch(Exception e){
			System.out.println("SenderStreamUDP : Exception when trying to get the state of the videoSenderPipeline !");
			return false;
		}
	}
	
	/**
	* returns true if the audio pipeline is playing
	*  
	*  @return true if the audio pipeline state is PLAYING, false otherwise
	*/
	public boolean isAudioPlaying(){
		try{
			return this.audioSenderPipeline.getState().equals(State.PLAYING);
		}catch(Exception e){
			System.out.println("SenderStreamUDP : Exception when trying to get the state of the audioSenderPipeline !");
			return false;
		}
	}
}
