package viper.sender;

import java.io.IOException;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.util.Random;
import java.util.Vector;

import javax.media.Buffer;
import javax.media.CannotRealizeException;
import javax.media.CaptureDeviceInfo;
import javax.media.CaptureDeviceManager;
import javax.media.Control;
import javax.media.DataSink;
import javax.media.Format;
import javax.media.Manager;
import javax.media.MediaLocator;
import javax.media.NoDataSourceException;
import javax.media.NoProcessorException;
import javax.media.Processor;
import javax.media.ProcessorModel;
import javax.media.control.BufferControl;
import javax.media.format.AudioFormat;
import javax.media.protocol.BufferTransferHandler;
import javax.media.protocol.ContentDescriptor;
import javax.media.protocol.DataSource;
import javax.media.protocol.PushBufferDataSource;
import javax.media.protocol.PushBufferStream;

import viper.call.common.LiveStream;
import viper.main.StateManager;
import viper.main.ViperHeader;

public class CallSender implements Runnable {
	//capture from mic, diversify, and send packets
	
	public final static CallSender INSTANCE = new CallSender();
	private CallSender(){
		
		//Set type of audio format expected from capture device
		AudioFormat format = new AudioFormat(AudioFormat.LINEAR, //encoding type
				44100, //sampling frequency 
				16, //sample depth
				2); //channels

		// Get a list of connected capture devices and set the first available one
		// as the designated device
		Vector<?> devices = CaptureDeviceManager.getDeviceList(format);
		System.out.println("::Retrieving list of available capture devices...");
		cdi = (CaptureDeviceInfo) devices.elementAt(0);
		//System.out.println("::Capture device set with the following properties: ");
		//System.out.println(cdi);
		
		
	}
	
	public MediaLocator mediaLocator = null;
	public Processor mediaProcessor = null;
	public Extractor extractor = null;
	
	private DataSource source = null;
	private DataSource digitizedAudioSource;
	
	private CaptureDeviceInfo cdi;

	//Define audio codec type
	private static final Format[] FORMATS = new Format[]{
		new AudioFormat(AudioFormat.MPEG,
				44100,
				16,
				2,
				AudioFormat.BIG_ENDIAN,
				AudioFormat.SIGNED,
				8,
				8000.0,
				Format.byteArray)
	};
	//Define output of Processor
	private static final ContentDescriptor CONTENT_DESCRIPTOR =
		new ContentDescriptor(ContentDescriptor.RAW); //we just want raw data buffers

	
	public void run() {
		String ipArg = (String)StateManager.INSTANCE.getCalleeArray()[0];
		InetAddress destIP = UDPSender.ipBytesToInetAddress(
				UDPSender.ipStringToBytes(ipArg)
				);
		
		int destPort = StateManager.VIPER_PORT;


		//Create DataSource from capture device
		try {
			//Set type of audio format expected from capture device
			AudioFormat format = new AudioFormat(AudioFormat.LINEAR, //encoding type
					44100, //sampling frequency 
					16, //sample depth
					2); //channels
			// Get a list of connected capture devices and set the first available one
			// as the designated device
			Vector devices = CaptureDeviceManager.getDeviceList(format);
			System.out.println("::Retrieving list of available capture devices...");
			CaptureDeviceInfo cdi = (CaptureDeviceInfo) devices.elementAt(0);
			
			// Create the DataSource corresponding to the capture device.
			source = Manager.createDataSource(cdi.getLocator());
			// Set up the processor to encode audio digitally to defined format.
			setDataSource(source);
			System.out.println("::Capture Device connected!");
			
			System.out.println("::DataSource Initialized!");
		} catch (NoDataSourceException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} catch (NoProcessorException e) {
			e.printStackTrace();
		} catch (CannotRealizeException e) {
			e.printStackTrace();
		}

		try {
			//Begin encoding audio stream.
			mediaProcessor.start(); //Start the Processor: encode audio data by defined codec
			digitizedAudioSource.start();
			System.out.println("::Audio Encoding Started...");

			//Extract data stream from DataSource object
			//Extractor also doubles as the transmitter that sends each individual packetized data.
			extractor = new Extractor(digitizedAudioSource, destIP, destPort);
			extractor.setDropRate(0.3);
			extractor.start();

			System.out.println("::Transmitting to " + destIP + " on port " + destPort +"...");

		} catch (Throwable t) {
			t.printStackTrace();
		}
		
		System.out.println("CallSender started");
	}
	
	public void end()
	{
		extractor.stop();
		try {
			digitizedAudioSource.stop();
		} catch (IOException e) {
			e.printStackTrace();
		}
		mediaProcessor.stop();
		System.out.println("stop CallSender");
	}

	public void setDataSource(DataSource ds) throws IOException,
	NoProcessorException, CannotRealizeException {

		/*
		Control c = (Control)ds.getControl("javax.media.control.BufferControl");
		if (c!=null){
			System.out.println("DEBUG: ds has BufferControl...");
			((BufferControl)c).setBufferLength(16);
		}
		 */

		//Construct Processor that will handle encoding with specified codec.
		mediaProcessor = Manager.createRealizedProcessor(
				new ProcessorModel(ds, FORMATS, CONTENT_DESCRIPTOR));


		Control [] test = mediaProcessor.getControls();
		/*
		BitRateControl frcon = null;
		for(int i=0;i<test.length;i++){
			System.out.println(test[i]);
			if(test[i] instanceof BitRateControl){
				System.out.println("Ji: "+i);
				frcon = (BitRateControl)test[i];
			}
		}
		frcon = (BitRateControl)test[4];
		frcon.setBitRate(64000);
		 */

		//Isolate the BufferControl controller from Processor and manually set buffer times.
		BufferControl bcon = (BufferControl)test[1];
		bcon.setBufferLength(16); //in ms

		System.out.println("::DEBUG, Buffer Length: "+bcon.getBufferLength());

		// Get the digitized and encoded data stream as a DataSource
		digitizedAudioSource = mediaProcessor.getDataOutput();
	}

	class Extractor implements BufferTransferHandler {

		DataSource ds;
		PushBufferStream streams[];
		Buffer buff;
		LiveStream ls;
		Buffer bufftemp;
		DatagramSocket sock = StateManager.INSTANCE.getDatagramSocket();
		InetAddress destIP;
		double drate;
		int destPort;
		byte[] one;
		byte[] two;
		byte[] three;

		Random rgen;

		//Constructor
		public Extractor(DataSource src, InetAddress ip, int port){
			//DataSource containing the digitized audio stream is a push-type stream
			streams = ((PushBufferDataSource)src).getStreams();

			// There's only a single audio stream in the DataSource
			streams[0].setTransferHandler(this);

			ds = src;
			buff = new Buffer();
			bufftemp = new Buffer();

			ls = new LiveStream();

			destIP = ip;
			destPort = port;

			drate = 0.0;
			rgen = new Random();

			System.out.println("::Destination Host identified as: " + destIP + "/" + port);
		}

		public LiveStream getLiveStream(){
			return ls;
		}

		public PushBufferStream getStream(){
			return streams[0];
		}

		public void start() {
			try {
				ds.start();
			} catch (IOException e) {
				System.err.println(e);
			}
		}

		public void stop() {
			try {
				ds.stop();
			} catch (IOException e) {
				System.err.println(e);
			}
		}

		/** This will get called when there's data pushed from the PushBufferDataSource. **/
		public void transferData(PushBufferStream stream) {
			try {
				stream.read(buff);
			} catch (IOException e) {
				System.err.println(e);
				return;
			}
			transmit(buff);
		}

		//Set the packet loss rate as a percentage.
		public void setDropRate(double rate){
			drate = rate;
			System.out.println("::Set Packet Loss Rate to: " + drate);
		}

		//Executes every time there's data available to be sent in the PushBufferStream.
		//Used to extract each Buffer in the stream and convert it to byte[] for transmission.
		public void transmit(Buffer buffer) {
			/** Extracted Buffer/byte[] Debug Messages **/
			/*
			System.out.println("::DEBUG, Extracted Buffer Data:");
			System.out.println("  *Time Stamp: " + buffer.getTimeStamp());
			System.out.println("  *Sequence #: " + buffer.getSequenceNumber());
			System.out.println("  *Pertinent Data Length: " + buffer.getLength());
			System.out.println("  *Data Format: " + buffer.getFormat());
			System.out.println("  *Header: " + buffer.getHeader()); //32864
			System.out.println("  *Flags: " + buffer.getFlags());
			System.out.println("  *Actual Data Length: " + ((byte[])buffer.getData()).length);
			System.out.println("  *Offset to Pertinent Data: " + buffer.getOffset());

			System.out.println("  *Full Data: ");
			byte[] temp = (byte[])buffer.getData();
			for(int i=0;i<temp.length;i++){
				System.out.print(temp[i]+" ");
			}System.out.println("");
			 */

			/** TRANSMIT CODE **/
			byte[] toSend = (byte[])buffer.getData();
			// NON-ENCODED TRANSMISSION
			int counter = 1;
			byte[] packetBytes = ViperHeader.makeCallReceivePacket((byte)counter, toSend);
			UDPSender.sendUDP(destIP.getAddress(), packetBytes);

			// DIVERSITY ENCODED TRANSMISSION: (2,3,2) Test Scheme
			//			int counter = 1;
			//			
			//			if(counter==1){
			//				one = toSend;
			//			
			//				byte[] packetBytes = ViperHeader.makeCallReceivePacket((byte)counter, one);
			//				UDPSender.sendUDP(destIP.getAddress(), packetBytes);
			//				counter++;
			//			}
			//			else{
			//				three = new byte[toSend.length];
			//				two = toSend;
			//				for(int i=0;i<toSend.length;i++){
			//					three[i]=(byte)(one[i]^two[i]);
			//					//System.out.println("Pkt:"+buffer.getSequenceNumber()+" "+three[i]);
			//				}
			//				
			//				byte[] packetBytes = ViperHeader.makeCallReceivePacket((byte)counter, three);
			//				UDPSender.sendUDP(destIP.getAddress(), packetBytes);
			//				counter++;
			//			}


			/** Testing locally, build stream from "received" packets. **/
			/*
			byte[] holder = new byte[buffer.getLength()];
			System.arraycopy((byte[])buffer.getData(), 0, holder, 0, buffer.getLength());
			if(checkDrop()){
				ls.update(holder);
			}
			else
				System.out.println("::DEBUG, Packet Dropped!");
			 */
			/** Testing locally, received stream debug messages. **/
			/*
			try{
				ls.read(bufftemp);
			}catch(IOException e){
				System.err.println(e);
			}
			System.out.println("::DEBUG, Retrieved Data Stats:");
			System.out.println("  *Time Stamp: " + bufftemp.getTimeStamp());
			System.out.println("  *Seq Number: " + bufftemp.getSequenceNumber());
			System.out.println("  *Pertinent Data Length: " + bufftemp.getLength());
			System.out.println("  *Data Format: " + bufftemp.getFormat());
			System.out.println("  *Actual Data Length: " + (byte[])bufftemp.getData());
			System.out.println("  *Full Data: ");
			byte[] rtemp = (byte[])bufftemp.getData();
			for(int i=0;i<rtemp.length;i++){
				System.out.print(rtemp[i]+" ");
			}System.out.println("");

			System.out.println("::DEBUG, Checking for Equivalence with Sent Data:");
			for(int i=0;i<temp.length;i++){
				if(temp[i]!=rtemp[i])
					System.out.println("  *FAILED");
				else
					System.out.println("  *PASSED");
			}
			 */
		}

}
}
