package com.guohang.ESProject08;

//changes in this version:
//clean up code
//new entropy filter
//new energy / entropy threshold
//window = 10mins

import java.io.File;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.ArrayList;

import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.converters.ArffSaver;
import android.app.Service;
import android.content.Intent;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder.AudioSource;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import android.util.Log;


public class ContextService extends Service {
	
	public static final int RECORDER_SAMPLERATE = 8000;
	public boolean heard = false;
	
	public final static int FRAME_SIZE = 1024;
	public int sampleRate;
	private RecordAmplitudeTask recordTask;
	
	//communication with activity by messenger
	public Messenger mMessenger;
	private String emotion;
	private String arousal;
	
    //weka stuff
	private Instances mDataset;
	
    @Override
    public void onCreate() {
        super.onCreate();
    }
    
    @Override
    public void onDestroy(){
    	recordTask.cancel(true);
    	super.onDestroy();
    }

    @Override
    public int onStartCommand(Intent intent, int flags, int startId){
    	
        Bundle extras = intent.getExtras();
        
        // Get messager from the Activity
        if (extras != null) {
          mMessenger = (Messenger) extras.get("MESSENGER");
        }
       
        //start AsyncTasks for audio recording and model building
        recordTask = new RecordAmplitudeTask();
        recordTask.execute();
 
    	return START_STICKY;
    };
   
   //Create Audio Recorder Manager Class
    public class AudioClipRecorder{

    	private AudioRecord recorder;	
    	private static final String TAG = "AudioClipRecorder";
    	private static final int DEFAULT_BUFFER_INCREASE_FACTOR = 3;
    	
    	private FrameAdmissionControl frameControl = new FrameAdmissionControl();
    	private AsyncTask task;

        //Constructors	
    	public AudioClipRecorder(AsyncTask task)
    	{
    	  this.task = task;
    	}
    	
    	//Buffer stuff starts here.
    	public void startRecording()
    	{
    	  startRecording(RECORDER_SAMPLERATE, AudioFormat.ENCODING_PCM_16BIT);
    	}
    	
    	
    	public void startRecording(final int sampleRate, int encoding)
    	{
    	  int bufferSize = determineMinimumBufferSize(sampleRate, encoding);
    	  doRecording(sampleRate, encoding, bufferSize, bufferSize, DEFAULT_BUFFER_INCREASE_FACTOR);
    	}
    	
    	private int determineMinimumBufferSize(final int sampleRate, int encoding)
    	{
    	  int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, encoding);
    	  return minBufferSize;
    	}
    	
    	private int determineCalculatedBufferSize(final int sampleRate, int encoding, int numSamplesInBuffer)
    			{
    			  int minBufferSize = determineMinimumBufferSize(sampleRate, encoding);
    			  int bufferSize;
    			  // each sample takes two bytes, need a bigger buffer
    			  if (encoding == AudioFormat.ENCODING_PCM_16BIT)
    			    {
    			      bufferSize = numSamplesInBuffer * 2;
    			    }
    			  else
    			    {
    			      bufferSize = numSamplesInBuffer;
    			    }
    			  
    			  if (bufferSize < minBufferSize)
    			  {
    			  Log.w(TAG, "Increasing buffer to hold enough samples " + minBufferSize + " was: " + bufferSize);
    			  bufferSize = minBufferSize;
    			  }
    			  return bufferSize;
    			  }
    	
    	//the real job starts here
    	private void doRecording(final int sampleRate, int encoding, int recordingBufferSize, int readBufferSize, int bufferIncreaseFactor)
    			{
    			if (recordingBufferSize == AudioRecord.ERROR_BAD_VALUE)
    			{
    			  Log.e(TAG, "Bad encoding value, see logcat");
    			}
    			else if (recordingBufferSize == AudioRecord.ERROR)
    			{
    			  Log.e(TAG, "Error creating buffer size");
    			}
    			// give it extra space to prevent overflow
    			int increasedRecordingBufferSize = recordingBufferSize * bufferIncreaseFactor;
    			//*****Create recorder
    			   recorder = new AudioRecord(AudioSource.MIC, sampleRate, AudioFormat.CHANNEL_IN_MONO, encoding, increasedRecordingBufferSize);
    			
    			//*****Allocate read buffer space
    			short[] readBuffer = new short[readBufferSize];
    			//window of 40 frames
    			ArrayList<short[]> frameWindow = new ArrayList<short[]>();
    			final int windowSize = 40;
    			int _frame = 0;
    			
    			Log.d(TAG, "start recording, " + "recording bufferSize: "
    			+ increasedRecordingBufferSize
    			+ " read buffer size: " + readBufferSize);
    			
    			//*****Start recording
    			recorder.startRecording();
    			
    			while (true)
    			{
    				//*****Read from buffer
    				int bufferResult = recorder.read(readBuffer, 0, readBufferSize);
    			//in case external code stopped this while read was happening
    			if (((task == null) && task.isCancelled()))
    			{ 
    				done();
    				break;
    			}
    			// check for error conditions
    			if (bufferResult == AudioRecord.ERROR_INVALID_OPERATION)
    			{
    			Log.e(TAG, "error reading: ERROR_INVALID_OPERATION");
    			}
    			else if (bufferResult == AudioRecord.ERROR_BAD_VALUE)
    			{
    			Log.e(TAG, "error reading: ERROR_BAD_VALUE");
    			}
    			else
    			// no errors, do processing
    			{
    				if( _frame < windowSize ){
    					frameWindow.add(readBuffer);
    					readBuffer = new short[readBufferSize];
    					_frame = _frame + 1;
    				}
    				else{
    			    //emotion classification		
    				emotion = frameControl.bufferListener(frameWindow);
    				
    				Message backMsg = new Message();
    				//send message to activity
    				  try {
    					    //just an empty message
    					    backMsg.arg1= -1;
    					    backMsg.obj = emotion;
    					    
    					    mMessenger.send(backMsg);
    			          } catch (android.os.RemoteException e1) {
    			            Log.w(getClass().getName(), "Exception sending message", e1);
    			          }
    				  
    				_frame = 0;
    				frameWindow.clear();
    				}
    			}
    			}
    			
    			done();

    			}
    	
    	
    	public void done()
    	{
    	  Log.d(TAG, "shut down recorder");
    	  if (recorder != null)
    	   {
    	     recorder.stop();
    	     recorder.release();
    	     recorder = null;
    	   }
    	}
    }
    

     
    public class FrameAdmissionControl{

    	private Attribute mClassAttribute;
    	private String mLabel = "speech";
       
        public FrameAdmissionControl(){
        	
        	//Generate Weka Instances Data Structure
        	//zeor-crossing attributes: mean, std
        	//RMS attributes: std, LEFR (low energy frame rate)
        	//1 label attribute
        	ArrayList<Attribute> allAttr = new ArrayList<Attribute>();
        	DecimalFormat df = new DecimalFormat("0000");
        	
        	/*
        	for(int i=0; i<MFCC_FEATURE_COUNT; i++){
        		Attribute mm = new Attribute("mfcc"+df.format(i));
        		allAttr.add(mm);
        	}
        	*/
        	
        	allAttr.add(new Attribute("meanZC"));
        	allAttr.add(new Attribute("stdZC"));
        	allAttr.add(new Attribute("rangeZC"));
        	
        	allAttr.add(new Attribute("LEFR"));
        	allAttr.add(new Attribute("stdSR"));
        	allAttr.add(new Attribute("rangeSR"));
        	
        	ArrayList<String> labelItem = new ArrayList<String>();
        	labelItem.add("speech");
        	labelItem.add("nonspeech");
        	mClassAttribute = new Attribute("label",labelItem);
        	
        	allAttr.add(mClassAttribute);
        	
        	mDataset = new Instances("speech_features", allAttr, 10000);
        	mDataset.setClassIndex(mDataset.numAttributes() - 1);
        }
        
        public String bufferListener(ArrayList<short[]> window){
        	
        	/*
        	  speech/noise separation
        	  
        	  calculate features:
        	  1. zero-crossing, RMS for each frame
              2. features: range, mean, std of zero-crossing; range, std and LEFR of RMS
        	*/
        	
        	String emotion = "good";
        	
        	//step 1. calculate zero-crossing and RMS for each frame in a window
        	
        	double[] zeroCross = new double[window.size()];
        	double[] RMS = new double[window.size()];
        
        	//float[] mfccWindow = new float[window.size()*FRAME_SIZE];
        	
        	
        	for(int i=0;i<window.size();i++){
        		short[] _frame = window.get(i);
        		//convert from short to double
        		double[] frame = new double[_frame.length];
        		for(int k=0;k<_frame.length;k++){
        			frame[k] = _frame[k];
        		}      		
    			
        	    //get speakRate
        	    RMS[i] = RMS(frame); 
        	    
        		//calculate zero-crossing
        		int count = 0;
        		for (int samp = 0; samp < frame.length - 1; samp++)
        		{
        			if (frame[samp] > 0.0 && frame[samp + 1] < 0.0)
        				count++;
        			else if (frame[samp] < 0.0 && frame[samp + 1] > 0.0)
        				count++;
        			else if (frame[samp] == 0.0 && frame[samp + 1] != 0.0)
        				count++;
        		}
        		//now record zero-Crossing value
        		zeroCross[i] = count;
        		
        	}
        	
        	
        	//step 2. extract features:
        	
        	//get standard deviation, range and average of zero-crossing values (as if it is pitch, need more work here)
        	double meanZC = mean(zeroCross);
        	double stdZC = std(zeroCross);
        	double rangeZC = range(zeroCross);
        	
        	//get standard deviation, range and average of RMS values
        	int    LEFR = LEFR(RMS);
        	double stdRMS = std(RMS);
        	double rangeRMS = range(RMS);
        	

            //step 3. get features into weka format
            
 		     //create instance
        	 int numAttributes = 7;
 		     Instance inst = new DenseInstance(numAttributes);
    		 inst.setDataset(mDataset);
 		   
 	         
 	         inst.setValue(0, meanZC);
 	         inst.setValue(1, stdZC);
 	         inst.setValue(2, rangeZC);
 	         inst.setValue(3, LEFR);
 	         inst.setValue(4, stdRMS);
 	         inst.setValue(5, rangeRMS);
 	         
 	         inst.setValue(mClassAttribute, mLabel);
 	         mDataset.add(inst);     
 	         
 	         
 	         //return the classification result
 	         return emotion;
        }
        
        
        private double mean(double[] sample){
        	double mean = 0;
        	double sum = 0;
        	
        	for(int i=0;i<sample.length;i++){
        		sum = sum + sample[i];
        	}
        	
        	mean = sum/sample.length;
        	return mean;
        }
        
        private double std(double[] sample){
        	double std = 0;
        	double sum = 0;
        	
        	double mean = mean(sample);
        	for(int i=0;i<sample.length;i++){
        	  sum += Math.pow((sample[i] - mean), 2);
        	}
        	std = Math.sqrt( sum / ( sample.length - 1 ) );
        	return std;
        }
        
        private double range(double[] sample){
        	double range = 0;
        	double max = 0;
        	double min = 0;
        	
            max = sample[0];
            min = max;
        	for(int i=1;i<sample.length;i++){
        		if(sample[i] > max){
        			max = sample[i];
        		}
        		if(sample[i] < min){
        			min = sample[i];
        		}
        	}
        	return range = max - min;
        }
        
        private double RMS(double[] signal){
        	double RMS = 0.0;
            double sum = 0.0;
            
            for(int i=0;i<signal.length;i++){
            	sum += signal[i]*signal[i];
            }
         
            RMS = Math.sqrt(sum / signal.length);
            return RMS;
 
        }
        //low energy frame rate (defined as the number of frames within the window 
        //that have an RMS value less than 50% of the mean RMS for the entire window)
        private int LEFR(double[] RMS){
        	int LEFR = 0;
        	double mean = mean(RMS);
        	
        	for(int i=0;i<RMS.length;i++){
        		if(RMS[i]<mean*0.5){LEFR++;}
        	}
        	return LEFR;
        }
        

    }
    
    
    private class RecordAmplitudeTask extends AsyncTask<Void, Void, Void>
    {


		@Override
		protected Void doInBackground(Void...arg0){
			
			AudioClipRecorder recorder = new AudioClipRecorder(this);
		
			recorder.startRecording();
			return null;
		}
		
	 	   @Override
	 	   protected void onCancelled(){
	 		 
	 		  //save weka file
	 		  ArffSaver saver = new ArffSaver();
			  saver.setInstances(mDataset);
			  
			  File mFeatureFile;
			  File pPath= Environment.getExternalStorageDirectory();

			  if(!pPath.exists()) {
			          boolean bReturn= pPath.mkdirs();
			  }
			  
			  try {
			          mFeatureFile = new File(pPath, "features.arff");
			          mFeatureFile.createNewFile();
			          String outputLocation= mFeatureFile.getAbsolutePath().toString();
				      saver.setFile(mFeatureFile);
				      saver.writeBatch();
			   }catch (IOException e){
				   e.printStackTrace();
			   }
	 		   super.onCancelled();
	 	   }
				
    }
    
    /*
    private class BuildModelTask extends AsyncTask<Void, Void, Void>{
 	   
        BinManager binManager = new BinManager(mMessenger, getBaseContext());
    	FeatureModel model = new FeatureModel(FRAME_SIZE, RECORDER_SAMPLERATE, FEATURE_COUNT, dataSource, binManager);
      
 	   @Override
 	   protected Void doInBackground(Void...arg0){
 		  float[] accBlock = new float[FRAME_SIZE];
 		  int blockSize = 0;
		  
 		  while(true){
 		    try {
				accBlock[blockSize++] = mFrameBuffer.take().floatValue();
				
				if(blockSize == FRAME_SIZE){
					model.updateModel(accBlock);
					accBlock = new float[FRAME_SIZE];
					blockSize = 0;
					continue;
				}
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
 		  }
       } 
 	   
 	   @Override
 	   protected void onCancelled(){
 		   dataSource.close();
 		   model.taskFinish();
 		   super.onCancelled();
 	   }
 
    } */

	@Override
	public IBinder onBind(Intent arg0) {
		// TODO Auto-generated method stub
		return null;
	}
    

}
