package com.intel.factorytest.activity;

import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.widget.Button;
import android.widget.ImageView;

import com.intel.factorytest.R;
import com.intel.factorytest.fft.RealDoubleFFT;

public class MicrophoneShow extends BaseActivity {
    private static final String TAG = "MicrophoneShow";

    protected static final int FREQUENCY = 8000;
    protected static final int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
    protected static final int AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
    protected static final int BLOCK_SIZE = 256;
    protected static final int MAX_DATA_COUNT = 200;

    private RealDoubleFFT transformer;

    private boolean started = false;

    private RecordAudio recordTask;

    private ImageView mVoiceImageView;
    private Bitmap mVoiceBitmap;
    private Canvas mCanvas;
    private Paint mPaint;

    private int mImageWidth;
    private int mImageHeight;
    private int mDrawY;
    private int mScreenOrientation;
    
    private int iBufferSize;
    
    private int iPassCount;


    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.microphone_show);
        super.initView();        

        setTitle(R.string.SERVICEMENU_MICROPHONE_TXT);
        DisplayMetrics metric = new DisplayMetrics();
        getWindowManager().getDefaultDisplay().getMetrics(metric);
        Configuration mConfiguration = getResources().getConfiguration();
        mScreenOrientation = mConfiguration.orientation;
        if (mScreenOrientation == Configuration.ORIENTATION_PORTRAIT){
            mImageWidth = metric.widthPixels  / 2;
            mImageHeight = metric.heightPixels / 2;

        }else{
            mImageWidth = metric.widthPixels/ 3;
            mImageHeight = metric.heightPixels*2 /3;
        }
        mDrawY = mImageHeight/2;
            
        
        mVoiceImageView = (ImageView) this.findViewById(R.id.ImageViewVoice);
        mVoiceBitmap = Bitmap.createBitmap(mImageWidth, mImageHeight,
                Bitmap.Config.ARGB_8888);
        mCanvas = new Canvas(mVoiceBitmap);
        mPaint = new Paint();
        mPaint.setColor(Color.GREEN);
        mVoiceImageView.setImageBitmap(mVoiceBitmap);
        
        iBufferSize = mImageWidth;

        iPassCount = 0;
        started = true;
        recordTask = new RecordAudio();
        transformer = new RealDoubleFFT(BLOCK_SIZE);
        recordTask.execute();
    }

    private class RecordAudio extends AsyncTask<Void, double[], Void> {
        @Override
        protected Void doInBackground(Void... params) {
            try {
                int bufferSize = AudioRecord.getMinBufferSize(FREQUENCY,
                        channelConfiguration, AUDIO_ENCODING);

                AudioRecord audioRecord = new AudioRecord(
                        MediaRecorder.AudioSource.MIC, FREQUENCY,
                        channelConfiguration, AUDIO_ENCODING, bufferSize);

                short[] buffer = new short[BLOCK_SIZE];
                double[] toTransform = new double[BLOCK_SIZE];

                audioRecord.startRecording();

                while (started) {
                    int bufferReadResult = audioRecord.read(buffer, 0,
                            BLOCK_SIZE);

                    for (int i = 0; i < BLOCK_SIZE && i < bufferReadResult; i++) {
                        toTransform[i] = (double) buffer[i] / 32768.0; // signed
                        // 16
                        // bit
                    }
                    transformer.ft(toTransform);
                    publishProgress(toTransform);
                }
                audioRecord.stop();
            } catch (Throwable t) {
                Log.e(TAG, "Recording Failed");
                t.printStackTrace();
            }

            return null;
        }

        protected void onProgressUpdate(double[]... toTransform) {
            mCanvas.drawColor(Color.BLACK);
//            mCanvas.drawColor(Color.YELLOW);
            int x;
            int downy;
            int upy;
            
            for (int i = 0; i < toTransform[0].length; i++) {
                if(toTransform[0].length*2>iBufferSize){
                    x = i;
                }else{
                    x = i*2;
                }
                downy = (int) (mDrawY - (toTransform[0][i] * 10));
                upy = mDrawY;
                mCanvas.drawLine(x, downy, x, upy, mPaint);
                
                if (Math.abs(toTransform[0][i])>10)
                {
                    iPassCount++;    
//                    Log.d(TAG, "iPassCount:"+iPassCount);
                    if (iPassCount>MAX_DATA_COUNT){
                        iPassCount = 0;
                        started = false;
                        SetResult(true);
                        recordTask.cancel(true);
                        finish();
                    }
                }
            }
            mVoiceImageView.invalidate();
        }
    }

    @Override
    protected void onResume() {
        super.onResume();
    }

    @Override
    protected void onDestroy() {
        Log.d(TAG, "onDestroy");
        super.onDestroy();
        started = false;
        recordTask.cancel(true);
    }

    @Override
    public void finish() {
        setTestResult();
        super.finish();
    }

    @Override
    protected void onPause() {
        Log.d(TAG, "onPause");
        super.onPause();
        started = false;
        recordTask.cancel(true);
    }

}
