package viper.send;

import viper.main.StateManager;

import java.io.*;
import java.util.Vector;
import javax.media.*;
import javax.media.protocol.*;
import javax.media.format.*;
import viper.receive.*;
import java.net.*;

public class CallSender implements Runnable {
	//capture from mic, diversify, and send packets
	
	public MediaLocator mediaLocator = null;
	public Processor mediaProcessor = null;
	private DataSource digitizedAudioSource;
	//sender destIP - RELAYS
	private InetAddress destIP = null;
	private InetAddress[] relayIP_Array = null;
	private DataSource source = null;
	private final int destPort =  StateManager.VIPER_PORT;
	
	public static int SEQ_NUM = 0;
	
	//Define audio codec type
	private static final Format[] FORMATS = new Format[]{
		new AudioFormat(AudioFormat.MPEG)
	};
	//Define output of Processor
	private static final ContentDescriptor CONTENT_DESCRIPTOR =
		new ContentDescriptor(ContentDescriptor.RAW); //we just want raw data buffers
	
	public void run() {
		
		try {
			//Set type of audio format expected from capture device
			AudioFormat format = new AudioFormat(AudioFormat.LINEAR, //encoding type
					44100, //sampling frequency 
					16, //sample depth
					2); //channels

			// Get a list of connected capture devices and set the first available one
			// as the designated device
			Vector devices = CaptureDeviceManager.getDeviceList(format);
			System.out.println("::Retrieving list of available capture devices...");
			CaptureDeviceInfo cdi = (CaptureDeviceInfo) devices.elementAt(0);
			System.out.println("::Capture device set with the following properties: ");
			System.out.println(cdi);

			//Create DataSource from capture device
			source = Manager.createDataSource(cdi.getLocator());
			System.out.println("::Capture Device connected!");

			//Construct VIPER object and assign capture device DataSource to it.
			//Construct Processor that will handle encoding with specified codec.
			mediaProcessor = Manager.createRealizedProcessor(
					new ProcessorModel(source, FORMATS, CONTENT_DESCRIPTOR));

			// Get the digitized and encoded data stream as a DataSource
			digitizedAudioSource = mediaProcessor.getDataOutput();
			System.out.println("::DataSource Initialized!");

			//Begin encoding audio stream.
			mediaProcessor.start();//Start the Processor: encode audio data by defined codec
			digitizedAudioSource.start();
			System.out.println("::Audio Encoding Started...");

			//Extract data stream from DataSource object
			//Extractor also doubles as the transmitter that sends each individual packetized data.
			LiveStream ls = extract(destIP, destPort);
			System.out.println("::Transmitting to " + destIP + "on port " + destPort +"...");
		} catch(Throwable t){
			t.printStackTrace();
		}
	}
	
	public LiveStream extract(InetAddress ip, int port) throws IOException{
		//Data transmission takes place within Extractor.
		Extractor ex = new Extractor(digitizedAudioSource, relayIP_Array);
		ex.start();
		return ex.getLiveStream();
	}

}
