/*
 * SeerMainActivity.java - Implementation for Android Runtime / Dalvik VM of Seer
 *
 * Seer - visual-to-auditory sensory substitution
 *
 * More infos at http://www.sensub.org
 *
 * 2008/04/05 - first Android Version (s.strahl@ucl.ac.uk)
 * 2008/04/12 - changed all operations to integer arithmetic (s.strahl@ucl.ac.uk)
 *
 * TODOs
 * - wav at resume of running==false that says "please touch screen or press center key to start"
 * - internationalization
*/

package org.sensub.seer;

import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.hardware.CameraDevice;
import android.os.Bundle;
import android.media.MediaPlayer;
import android.util.AttributeSet;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.io.FileOutputStream;
import java.util.Map;
import java.util.Arrays;

public class SeerMainActivity extends Activity
{
	private static final String TAG = "SeerMainActivity"; 
	private SeerComponent mSeerComponent;
	private TextView mStatusText;
	private TextView mInfoText;
    private Button mButton;
	
    @Override
	protected void onCreate(Bundle icicle)
    {
    	super.onCreate(icicle);
    	          
        setContentView(R.layout.main); // create UI from the XML layout file
        mSeerComponent = (SeerComponent) findViewById(R.id.seer);
        mStatusText = (TextView) findViewById(R.id.statustext);
        mStatusText.setText(R.string.msg_stopped);        
        mInfoText = (TextView) findViewById(R.id.infotext);
        mInfoText.setText(R.string.msg_info);
        mButton = (Button) findViewById(R.id.button);
        mButton.setOnClickListener(	new View.OnClickListener() { public void onClick(View view) { switchState(); } } );
    } /* protected void onCreate() */

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent msg)
    {
    	boolean handled = false;
    	if (keyCode == KeyEvent.KEYCODE_DPAD_CENTER || // if center key of DPAD press
    		keyCode == KeyEvent.KEYCODE_SPACE) // if space key is pressed
    	{
    		switchState(); // then switch running state
    		handled = true;
    	}
    	return handled;
	} /* public boolean onKeyDown() */

    @Override
    public boolean dispatchTouchEvent(MotionEvent ev)
    { 
    	if (ev.getAction() == MotionEvent.ACTION_DOWN) // if touchscreen pressed
    	{
    		switchState(); // then switch running state
    	}
    	return false;    	
	}

    @Override
	protected void onResume()
    {
        super.onResume();
        mSeerComponent.resume(); // get CameraDevice object back and init and start thread again
    }

    @Override
	protected void onPause()
    {
        super.onPause();
        mSeerComponent.pause(); // release CameraDevice object and remove thread as its wavetable has a large memory usage
    }

    private boolean switchState()
    {
    	if (mSeerComponent.running) // is thread running?
		{
    		mSeerComponent.stop(); // the stop it
    		mButton.setText(R.string.bt_stopped); // adjust button text
		}
		else // is thread not running?
		{
			mButton.setText(R.string.bt_running); // adjust button text
			mSeerComponent.start(); // then start it
		}
    	return true;
    } /* private boolean switchState() */

    public static class SeerComponent extends SurfaceView implements SurfaceHolder.Callback
    {
    	private static final String TAG = "SeerComponent"; 
    	SurfaceHolder  mHolder;
    	private PreviewThread mPreviewThread;
    	private boolean mHasSurface;
    	private final int width = 80; //320;
    	private final int height = 64; //240;
    	private final int lowestCF = 1000; // lowest frequency used
    	private final int highestCF = 6000; // highest frequency used
    	
    	private static final String PATH_TO_FILE_UNTIL_DIRECT_AUDIOBUFFER_POSSIBLE_IN_ANDROID = "/tmp/Seer.wav";
    	public boolean running = false;
    
    	//  we need this constructor for ViewInflate
    	public SeerComponent(Context context, AttributeSet attrs, Map params)
        {
            super(context, attrs, params);        
            mHolder = getHolder(); // get SurfaceHolder
            mHolder.addCallback(this); // get callbacks if surface is created / destroyed
            mHasSurface = false; // surface not ready yet        
            mHolder.setFixedSize(4*width, 4*height); // display used bitmap with a 4x zoom
        }

    	public void resume()
    	{
        	if (mPreviewThread == null) // if there is no thread waiting
    		{
    			mPreviewThread = new PreviewThread();
    		}
        	if ((mHasSurface == true) && (running == true)) // if surface and user ;) are ready
    		{
    			mPreviewThread.start(); // start visual-to-auditory conversion
    		}

    	} /* public void resume() */
    
    	public void pause()
    	{
    		if (mPreviewThread != null) // if there is a thread running
    		{
    			mPreviewThread.requestExitAndWait(); // remove thread if we are paused
    			mPreviewThread = null;
    		}
    	} /* public void pause() */

    	public void surfaceCreated(SurfaceHolder holder)
    	{
    		mHasSurface = true; // surface has been created            
    	}

    	public void surfaceDestroyed(SurfaceHolder holder)
    	{
    		mHasSurface = false; // stop the  thread           
    		pause();
    	}

    	public void surfaceChanged(SurfaceHolder holder, int format, int w, int h)
    	{
        // Surface size or format has changed. This should not happen in this example.
    	}

    	public void start()
    	{
    		running = true; // tell object that we are active
    		resume(); // let's get active
    	}
    
    	public void stop()
    	{
    		running = false; // tell object that we are inactive
    		pause(); // let's have a break...
    	}
    
    	class PreviewThread extends Thread
    	{
    		private boolean mDone; // flag if we need to exit
        
    		PreviewThread()
    		{
    			super();
    			mDone = false;
    		}
    
    		@Override
    		public void run()
    		{
                Bitmap mCameraBitmap; // bitmap where camera data is stored
                Canvas mCameraCanvas,mSurfaceCanvas; // canvas to get camera data and to draw image
                Rect mRect,mRect2; // camera and UI bitmap sizes
                SurfaceHolder holder;
                CameraDevice camera;
                MediaPlayer mMediaPlayer; // used to output waveform
    			byte[] audioBuffer; // 16bit waveform computed from video input
    			int[] pixels; // pixels from camera data
    			int[] sonification; // waveform for actual column
    			int[] wavetable; // precomputed sinusoidal carriers for different height
    			double freq;
    			double lowestCF_ERBS, highestCF_ERBS, spacingERBS, ERBS; // frequency scale of human auditory filters
    			int samplerate, nsamples;
    			int grayvalue;
    			int temp,i,w,h,pos_src,pos_dest,pos;
                                
    			camera = CameraDevice.open(); // open CameraDevice of mobile device
    			if (camera != null)
    			{
    				CameraDevice.CaptureParams param = new CameraDevice.CaptureParams();
                    param.type 			= 1; // preview
                    param.srcWidth      = 1280;
                    param.srcHeight     = 1024;
                    param.leftPixel     = 0;
                    param.topPixel      = 0;
                    param.outputWidth   = width;
                    param.outputHeight  = height;
                    param.dataFormat    = 2; // RGB_565
                    camera.setCaptureParams(param);
    			}
    			else
    			{
    				Log.d(TAG,"Error: could not open camera!");
    				return;
    			}

    			holder = mHolder; // get access to UI Surface
    			samplerate   = 16000; // 44.1kHz would be much nicer, perhaps possible once speed problem is solved
    			nsamples     = (int) Math.floor(samplerate/width); // how many samples do we have per column
    			audioBuffer  = new byte[2*samplerate]; // we have 16bit resolution
    			wavetable    = new int[height*nsamples]; // we have 'height' different frequencies
    			sonification = new int[nsamples]; // waveform of one column 
    			pixels       = new int[height*width]; // data from camera as integer array
    			
				lowestCF_ERBS  = 21.4 * Math.log10(0.00437 * lowestCF + 1.0); // ERB Scale := number of ERBs below each frequency
				highestCF_ERBS = 21.4 * Math.log10(0.00437 * highestCF + 1.0); // Moore & Glasberg "A revision of Zwicker's loudness  model" Acta Acustica, vol. 82, pp. 335-345 (1996)
				spacingERBS    = (highestCF_ERBS-lowestCF_ERBS)/(height-1); // distance of filters on human ERB Scale
    			for (pos=0,h=0; h<height; h++) // prepare wavetable
        		{
    				ERBS  = lowestCF_ERBS + h * spacingERBS; // freq on ERB scale
    				freq = (Math.pow(10.0, (ERBS / 21.4)) - 1.0) / 0.00437; // Moore & Glasberg "A revision of Zwicker's loudness  model" Acta Acustica, vol. 82, pp. 335-345 (1996)
    				for (i=0; i<nsamples; i++)
        			{
        				wavetable[pos++] = (int) (32767 * Math.sin(2*Math.PI * freq * ((double)i/samplerate))); // values are -32767...32767
        			}    				
        		}
    			
    			mMediaPlayer = new MediaPlayer(); // get new MediaPlayer for audio output
    			mCameraBitmap = Bitmap.createBitmap(width, height, true); // prepare bitmap where camera input is stored
            	mCameraCanvas = new Canvas(mCameraBitmap);
    			mRect = new Rect(0,0,width,height); // this is the size of the camera input
    			mRect2 = new Rect(0,0,8*width,8*height); // this is the size on the mobile display
    			try
            	{
                    Arrays.fill(audioBuffer, (byte) 0); // init audio buffer
                    interim_solution_until_direct_audiobuffer_possible_in_android(audioBuffer); // create WAV-file
            		mMediaPlayer.setDataSource(PATH_TO_FILE_UNTIL_DIRECT_AUDIOBUFFER_POSSIBLE_IN_ANDROID); // prepare Mediaplayer to play this WAV-file
            	}
            	catch(Exception ex)
            	{
            	    Log.d(TAG,"Error: could not play wav file:"+ex.toString());    		
            	}
    			
    			while (!mDone) // main visual-to-auditory conversion loop
    			{
                    try
                	{
                    	mMediaPlayer.prepareAsync(); // prepare to play WAV-file generated last time
                		mMediaPlayer.start(); // play this 1sec WAV while we prepare next sonification
                	}
                	catch(Exception ex)
                	{
                	    Log.d(TAG,"Error: could not play wav file:"+ex.toString());    		
                	}

                	camera.capture(mCameraCanvas); // get new image from camera
                	mCameraBitmap.getPixels(pixels, 0, width, 0, 0, width, height); // convert image into integer array
                	
    				mSurfaceCanvas = holder.lockCanvas(); // lock the surfaces to get the canvas   	
    				mSurfaceCanvas.drawARGB(255, 0, 0, 0); // clear canvas
    				mSurfaceCanvas.drawBitmap(mCameraBitmap, mRect, mRect2, null); // draw acquired image 4x zoomed
    				holder.unlockCanvasAndPost(mSurfaceCanvas); // we are finished painting
    				
    				for(pos_src=0,pos_dest=0,w=0; w<width; w++) // for every column in image
                	{
                		Arrays.fill(sonification,0); // init waveform fr this column
                		for (pos=0, h=0; h<height; h++)	// for every pixel in column
                		{
                			grayvalue = (int) ( (pixels[pos_src]&0xFF) + ( (pixels[pos_src]>>8) &0xFF) + ( (pixels[pos_src++]>>16) &0xFF) )/3; // 0...255
                			for (i=0; i<nsamples; i++) // add weighted copy from wavetable
                			{
                				sonification[i] += (int) (grayvalue * wavetable[pos++]); // + 0...8355585=32767*255
                			}
                		}
                		for (i=0; i<nsamples; i++) // copy final sound mixture into little-endian 16bit representation
                		{
                			temp = sonification[i] / (255*height); // -32767...32767
                			audioBuffer[pos_dest++] = (byte) (temp & 0xFF);
                			audioBuffer[pos_dest++] = (byte) ((temp>>8) & 0xFF);
                		}
                	} /* for (w=0; w<width; w++) */
                		
                	while (mMediaPlayer.isPlaying()) // did we generate new sonification below one second?
                	{
                		try
                		{
                			Thread.sleep(10); // then sleep for 10ms
                		}
                    	catch(Exception ex) {}                		
                	}
            		mMediaPlayer.stop(); // tell MediaPlayer to stop
            		mMediaPlayer.reset(); // clean up
                    interim_solution_until_direct_audiobuffer_possible_in_android(audioBuffer); // save new sonification into WAV-file
    			} /* while (!mDone) */

    			if (camera != null)
    				camera.close();    // be nice and release CameraDevice 
        	} /* public void run() */    		
        
    		public void requestExitAndWait() // don't call this from PreviewThread thread if you don't like deadlocks ;)        
    		{
    			mDone = true;
    			try
    			{
    				join();
    			}
    			catch (InterruptedException ex) { }
    		} /* public void requestExitAndWait() */
    		
    		private void interim_solution_until_direct_audiobuffer_possible_in_android(byte[] buffer)
    		{
    			FileOutputStream outFile;
//    			/* header for a 1sec 44.1kHz 16bit mono WAV-File */
//    			byte[] wavheader = {0x52, 0x49, 0x46, 0x46, -84, 0x58, 0x01, 0x00, 0x57, 0x41, 0x56, 0x45, 0x66, 0x6d, 0x74, 0x20, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x44, -84, 0x00, 0x00, -120, 0x58, 0x01, 0x00, 0x02, 0x00, 0x10, 0x00, 0x64, 0x61, 0x74, 0x61, -120, 0x58, 0x01, 0x00}; 
    			/* header for a 1sec 16kHz 16bit mono WAV-File */
    			byte[] wavheader = {0x52, 0x49, 0x46, 0x46, 0x24, 0x7d, 0x00, 0x00, 0x57, 0x41, 0x56, 0x45, 0x66, 0x6d, 0x74, 0x20, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, -128, 0x3e, 0x00, 0x00, 0x00, 0x7d, 0x00, 0x00, 0x02, 0x00, 0x10, 0x00, 0x64, 0x61, 0x74, 0x61, 0x00, 0x7d, 0x00, 0x00}; 
    			
    			try
    			{
    				outFile = new FileOutputStream(PATH_TO_FILE_UNTIL_DIRECT_AUDIOBUFFER_POSSIBLE_IN_ANDROID);    		
    				/* as this is a quick fix just output fixed correct header for a 1sec 16kHz 16bit mono WAV-File */
    				outFile.write(wavheader);
    				outFile.write(buffer);
    			}
    			catch (Exception ex)
    			{
    				Log.d(TAG,"Error: could not open wav file:"+ex.toString());
    				return;
    			}    	
    		} /* private void interim_solution_until_direct_audiobuffer_possible_in_android */
        } /* class PreviewThread */
    } /* public static class SeerComponent */
} /* public class SeerMainActivity */ 