/*
 * Copyright (C) 2007 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package gl.live.danceshow.ui.camera;

import gl.live.danceshow.ui.widget.CustomAlertDialog;
import gl.live.danceshow.ui.widget.FixedLyricView;
import gl.live.mediamux.sdk.AudioRecordCallback;
import gl.live.mediamux.sdk.DecodedVideoFrameListener;
import gl.live.mediamux.sdk.AVRecorderSDK;
import gl.live.mediamux.sdk.SoxProcesser;
import gl.live.mediamux.sdk.event.PlayVideoEvent;

import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.StringTokenizer;

import com.android.app.mvmerged.R;

import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.graphics.drawable.AnimationDrawable;
import android.graphics.drawable.Drawable;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.media.MediaRecorder.AudioSource;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.provider.Settings;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.ImageView.ScaleType;
import android.widget.Toast;

// Need the following import to get access to the app resources, since this
// class is in a sub-package.

// ----------------------------------------------------------------------

/**
 *
 */
@SuppressLint("NewApi")
public abstract class AbsCameraPreviewActivity extends FragmentActivity
		implements AutoFocusCallback,AudioRecordCallback {
	protected CenteredPreview mPreview;
	protected AVRecorderSDK mMediaMux;
	protected ImageView mPlayVideo;
	protected int mAvator = 1;
	Camera mCamera;
	int numberOfCameras;
	int cameraCurrentlyLocked = -1;

	private String mediaFilePath; // 录制的文件
	protected String musicFile;
	protected String mMergingVideoPath;

	// The first rear facing camera
	int defaultCameraId;
	protected boolean preparingRecorder = false;

	@Override
	protected void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		// Hide the window title.
		requestWindowFeature(Window.FEATURE_NO_TITLE);
		HandlerThread camerastartupthread = new HandlerThread("cameradeviceopt");
		camerastartupthread.start();
		camerastartuphandler = new Handler(camerastartupthread.getLooper());
		// Create a RelativeLayout container that will hold a SurfaceView,
		// and set it as the content of our activity.
		// mPreview = new CenteredPreview(this);
		// setContentView(mPreview);

		setContentView(R.layout.camera_activity);
		// FrameLayout.LayoutParams p=new
		// FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT,
		// FrameLayout.LayoutParams.MATCH_PARENT, Gravity.TOP);
		mPreview = (CenteredPreview) findViewById(R.id.surfaceViewContainer);
		mPreview.setAvator(mAvator);

		mPlayVideo = (ImageView) findViewById(R.id.play_video);// new
																// ImageView(this);

		// Find the total number of cameras available
		numberOfCameras = Camera.getNumberOfCameras();

		// Find the ID of the default camera
		CameraInfo cameraInfo = new CameraInfo();
		for (int i = 0; i < numberOfCameras; i++) {
			Camera.getCameraInfo(i, cameraInfo);
			if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {
				defaultCameraId = i;
			}
		}

		// mMediaMux = new MediaMux(this);
		
		
		mProcesser = new SoxProcesser();
        mProcesser.setAudioRcordCallback(this);
	}

	protected boolean isSupportFlip() {
		return numberOfCameras != 1;
	}

	protected Handler camerastartuphandler;
	protected boolean startok = false;
	private boolean bResume = false;

	@Override
	protected void onResume() {
		super.onResume();
		bResume = true;
		mMediaMux = new AVRecorderSDK(this, mMergingVideoPath);
		mMediaMux.onResume();

		openCamera(false);
	}

	protected void openCamera(final boolean bswitch) {
		camerastartuphandler.post(new Runnable() {
			@Override
			public void run() {
				try {
					startok = false;
					if (bswitch) {
						defaultCameraId++;
						defaultCameraId = defaultCameraId % numberOfCameras;
					}
					switchCameraTo(defaultCameraId);
					runOnUiThread(new Runnable() {

						@Override
						public void run() {
							if (defaultCameraId == 1)
								mPreview.getCameraView().setPreviewScaleX(-1);
							else {
								mPreview.getCameraView().setPreviewScaleX(1);
							}
						}
					});
					startok = true;
				} catch (RuntimeException e) {
					runOnUiThread(new Runnable() {
						@Override
						public void run() {
							CustomAlertDialog mCustomDialog = new CustomAlertDialog(
									AbsCameraPreviewActivity.this);
							mCustomDialog
									.builder(R.style.DialogTVAnimWindowAnim);
							mCustomDialog.setTitle("检查相机");
							mCustomDialog.setMsg("可能你的相机无法使用，或者没有使用相机的权限！");
							mCustomDialog
									.setPositiveButton(
											getResources().getString(
													R.string.dialog_ok),
											new OnClickListener() {
												@Override
												public void onClick(View v) {
													Intent intent = new Intent();
													intent.setAction(Settings.ACTION_SETTINGS);
													startActivityForResult(
															intent, 0);
												}
											})
									.setNegativeButton(
											getResources().getString(
													R.string.dialog_cancel),
											new OnClickListener() {
												@Override
												public void onClick(View v) {
													finish();
												}
											}).show();
						}
					});
				}
			}
		});
	}

	private void handleCameraException(Exception e) {
		new AlertDialog.Builder(this).setMessage(e.getLocalizedMessage())
				.setOnCancelListener(new DialogInterface.OnCancelListener() {
					@Override
					public void onCancel(DialogInterface dialog) {
						finish();
					}
				}).setNegativeButton("cancel", null).show();
	}

	public static class CameraEvent {
		private SurfaceTexture mTexture;

		public CameraEvent(SurfaceTexture text) {
			mTexture = text;
		}
	}

	public void onEventMainThread(Runnable event) {
		if (camerastartuphandler == null)
			return;
		camerastartuphandler.post(event);
	}

	private void dumpParameters(Parameters parameters) {
		String flattened = parameters.flatten();
		StringTokenizer tokenizer = new StringTokenizer(flattened, ";");
		Log.d(TAG, "Dump all camera parameters:");
		while (tokenizer.hasMoreElements()) {
			Log.d(TAG, tokenizer.nextToken());
		}
	}

	public void onEventMainThread(CameraEvent event) {
		final CameraEvent e = event;
		if (camerastartuphandler == null)
			return;
		camerastartuphandler.post(new Runnable() {

			@Override
			public void run() {
				if (mCamera == null) {
					Log.d(TAG, "CameraEvent mCamera == null");
					return;
				}
				try {
					Log.d(TAG, "CameraEvent stopPreview setPreviewTexture");
					mCamera.stopPreview();
					mCamera.setPreviewTexture(e.mTexture);
					mCamera.startPreview();
					mMediaMux.setCamera(mCamera);
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		});
	}

	@Override
	protected void onPause() {
		Log.d(TAG, "onPause");
		bResume = false;

		camerastartuphandler.post(new Runnable() {
			@Override
			public void run() {
				releaseMediaRecorder();
				mPreview.setCamera(null, 0);
				mMediaMux.onPause();
				if (mCamera != null) {
					mCamera.stopPreview();
					mCamera.release();
					mCamera = null;
				}
			}
		});

		// Because the Camera object is a shared resource, it's very
		// important to release it when the activity is paused.

		super.onPause();
	}

	private void switchCameraTo(int cameraId) {
		// OK, we have multiple cameras.
		// Release this camera -> cameraCurrentlyLocked
		if (mCamera != null) {
			mCamera.stopPreview();
			mPreview.setCamera(null, defaultCameraId);
			mMediaMux.setCamera(null);
			mCamera.release();
			mCamera = null;
		}

		// Acquire the next camera and request Preview to reconfigure
		// parameters.
		if (!bResume)
			return;
		mCamera = Camera.open(cameraId);
		// dumpParameters(mCamera.getParameters());
		enableBeautyModeIfNeed();
		mMediaMux.setCamera(mCamera);
		cameraCurrentlyLocked = cameraId;
		mPreview.switchCamera(mCamera, cameraCurrentlyLocked);
		mCamera.startPreview();
		checkTorchMode();
	}

	private void enableBeautyModeIfNeed() {
		Parameters p = mCamera.getParameters();
		String bsupported = p.get("hw_beauty_mode_support");
		if (!"true".equalsIgnoreCase(bsupported)) {
			return;
		}
		int min = p.getInt("hw-min-beauty-level");
		int max = p.getInt("hw-max-beauty-level");
		p.set("hw-beauty-level", max / 2);
		mCamera.setParameters(p);
	}

	private void checkTorchMode() {
		if (mCamera == null)
			return;
		Parameters parameters = mCamera.getParameters();
		int max = parameters.getMaxExposureCompensation();
		int min = parameters.getMinExposureCompensation();
		// List<String> modes = parameters.getSupportedFlashModes();
		final boolean bshowtorch = !(max == min);
		runOnUiThread(new Runnable() {

			@Override
			public void run() {
				Log.d(TAG, "updateTorchView1");
				updateTorchView(bshowtorch);
				Log.d(TAG, "updateTorchView2");
			}
		});
	}

	private DecodedVideoFrameListener mDecodedVideoFrameListener = new DecodedVideoFrameListener() {

		@Override
		public void updateFrame(final Bitmap bm) {
			runOnUiThread(new Runnable() {

				@Override
				public void run() {
					Log.d(TAG, "DecodedVideoFrameListener updateFrame");
					mPlayVideo.setImageBitmap(bm);
				}
			});

		}
	};

	protected abstract void updateTorchView(boolean bshow);

	private boolean isRecording = false;

	protected static final String TAG = "CameraPreview";

	public synchronized boolean isRecording() {
		return isRecording;
	}

	private boolean prepareVideoRecorder() {
		try {
			synchronized (this) {
				mMediaMux.setCamera(mCamera);
				mMediaMux
						.setDecodeVideoFrameListener(mDecodedVideoFrameListener);
				// Step 2: Set sources
				if (!mMediaMux.initAudio(musicFile,
						VideoReviewActivity.TEST_WAV_AUDIO1,
						VideoReviewActivity.TEST_WAV_AUDIO2)) {
					return false;
				}
				// String timeStamp = new
				// SimpleDateFormat("yyyyMMdd-HHmmss").format(new Date());
				mediaFilePath = VideoReviewActivity.TEST_VIDEO_OUTPUT;// InitApplication.SdCardRecordedVideoPath
																		// +File.separator+"test".concat("_").concat(timeStamp).concat(".mp4");
				Log.d(TAG, "mediaFilePath " + mediaFilePath);
				File f = new File(mediaFilePath);
				if (f.exists()) {
					f.delete();
				}
				mMediaMux.initOutputVideo(mediaFilePath, "video/avc",720,720);
//				mMediaMux.initOutputVideoSurface(720, 720);
				mMediaMux.setCameraPreviewWH(PreviewManager.PREVIEW_W,
						PreviewManager.PREVIEW_H);
				mMediaMux.prepare();
				// runOnUiThread(new Runnable() {
				// @Override
				// public void run() {
				// mMediaMux.start();
				// }
				// });
			}
		} catch (Exception e) {
			return false;
		}
		return true;
	}

	protected void releaseMediaRecorder() {
		synchronized (mMediaMux) {
			if (mMediaMux != null) {
				// clear recorder configuration
				// release the recorder object
				mMediaMux.setCamera(null);
				mMediaMux.stop();
				try {
					mMediaMux.release();
				} catch (IllegalStateException e) {
					Log.e(TAG, "mMediaEngine release failed!");
				}
			}
		}
	}

	/**
	 * The capture button controls all user interaction. When recording, the
	 * button click stops recording, releases {@link MediaRecorder} and
	 * {@link Camera}. When not recording, it prepares the {@link MediaRecorder}
	 * and starts recording.
	 * 
	 * @param view
	 *            the view generating the event.
	 */

	private void stopCapture(boolean quit) {
		if (mMediaMux != null) {
			// stop recording and release camera
			onCaptureStopped();
			try {
				mMediaMux.stop(); // stop the recording
			} catch (IllegalStateException e) {
				// TODO 相机并没有产生数据，需要清理结果
				Toast.makeText(this, "相机录像失败", Toast.LENGTH_LONG).show();
			}
			releaseMediaRecorder(); // release the MediaRecorder object
		}
		// inform the user that recording has stopped
		isRecording = false;
		// releaseCamera();
		if (quit) {
			onCaptureFileReady(mediaFilePath);
			if (mCamera != null) {
				mCamera.stopPreview();
				mCamera.release();
				mCamera = null;
			}
		} else {
			mMediaMux.setCamera(mCamera);
			new File(mediaFilePath).delete();
		}
	}

	@Override
	public void onBackPressed() {
		if (isRecording)
			stopCapture(true);
		super.onBackPressed();
	}

	public void toggleCapture(boolean quit) {
		if (!startok)
			return;
		if (isRecording) {
			stopCapture(quit);
		} else {
			if (!preparingRecorder) {
				preparingRecorder = true;
				camerastartuphandler.post(new MediaPrepareTask());
			}
		}
	}

	/**
	 * Asynchronous task for preparing the {@link MediaRecorder} since it's a
	 * long blocking operation.
	 */

	protected enum FocusState {
		NORMAL, SUCCESS, FAIL
	};

	abstract protected void updateFocus(FocusState state);

	protected void sendAutoFocus() {
		updateFocus(FocusState.NORMAL);
		camerastartuphandler.post(new Runnable() {

			@Override
			public void run() {
				if (mCamera != null)
					mCamera.autoFocus(AbsCameraPreviewActivity.this);
			}
		});
	}

	protected class MediaPrepareTask implements Runnable {

		@Override
		public void run() {
			if (mCamera == null) {
				preparingRecorder = false;
				return;
			}
			boolean startok = false;
			if (mPreview.isSupportAutoFocus()) {
				sendAutoFocus();
			}
			if (prepareVideoRecorder()) {
				try {
					// Camera is available and unlocked, MediaRecorder is
					// prepared,
					// now you can start recording
					mMediaMux.start();
					new MyAsyncTask().execute();
					startok = true;
				} catch (Exception e) {
					releaseMediaRecorder();
				}
			} else {
				// prepare didn't work, release the camera
				releaseMediaRecorder();
			}
			preparingRecorder = false;
			if (startok) {
				runOnUiThread(new Runnable() {
					@Override
					public void run() {
						isRecording = true;
						// inform the user that recording has started
						onCaptureStarted();
					}
				});
			} else {
				runOnUiThread(new Runnable() {
					@Override
					public void run() {
						isRecording = false;
						Toast.makeText(getApplicationContext(), "无法启动摄像机",
								Toast.LENGTH_SHORT).show();
						AbsCameraPreviewActivity.this.finish();
					}
				});
			}
		}

	}

	protected abstract void onCaptureStarted();

	protected abstract void onCaptureStopped();

	// 处理文件
	protected abstract void onCaptureFileReady(String mediaFilePath);

	@Override
	protected void onDestroy() {
		super.onDestroy();
	}
	
	private short[] mAudioRecordBuffer;
	protected SoxProcesser mProcesser;
	protected AudioRecord mAudioRecord;
	private void configAudioRecoder(int sr, int channelConfig, int audioFormat,int bufferSizeInBytes) {
		try {
			Log.d(TAG,"configAudioRecoder + bufferSizeInBytes "+bufferSizeInBytes);
			mAudioRecordBuffer = new short[bufferSizeInBytes];
			mAudioRecord =  new AudioRecord(AudioSource.MIC, sr, channelConfig, audioFormat, bufferSizeInBytes);
			mProcesser.init();
			mProcesser.setEffect();
		} catch (IllegalArgumentException e) {
			Log.w(TAG,"mAudioRecord create fail....");
		}
	}
	
	private void startAudioRecord() {
		Log.d(TAG,"startAudioRecord");
		if(mAudioRecord!= null) {
			mAudioRecord.startRecording();
		}
	}
	
	private void stopAudioRecord() {
		Log.d(TAG,"stopAudioRecord");
		if(mAudioRecord!= null) {
			mAudioRecord.stop();
			
		}
	}
	
	private void releaseAudioRecord() {
		if(mAudioRecord!= null) {
			mAudioRecord.release();
			mProcesser.release();
		}
		Log.d(TAG,"releaseAudioRecord");
		mAudioRecord = null;
	}

	@Override
	public short[] readAudioRecordBuffers() {
		if (mAudioRecord == null || !mMediaMux.isStarted() ||mAudioRecordBuffer == null) {
			Log.e(TAG,"readAudioRecordBuffers has erros");
			return null;
		}
		Log.d(TAG,"readAudioRecordBuffers "+mAudioRecordBuffer.length);
		int a = mAudioRecord.read(mAudioRecordBuffer, 0, mAudioRecordBuffer.length);
		return mAudioRecordBuffer;
	}
	
	protected void testAudioRecord() {
		new MyAsyncTask().execute();
	}
	private class MyAsyncTask extends AsyncTask<Void, Void, Void> {

		@Override
		protected Void doInBackground(Void... params) {
			Log.d("guolei","addEffect begin...");
			int size = AudioRecord.getMinBufferSize(
					44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
					AudioFormat.ENCODING_PCM_16BIT);
			mAudioRecordBuffer = new short[size];
			Log.d(TAG, "configAudioRecoder min size " + size);
			configAudioRecoder(44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
					AudioFormat.ENCODING_PCM_16BIT, size);
			startAudioRecord();
			mProcesser.init();
			//for example: add effect params
			
			mProcesser.setEffect();
			mProcesser.processEffect(InitApplication.WAV_HEADER,"/sdcard/test_effect.wav");
			mProcesser.release();
			stopAudioRecord();
			releaseAudioRecord();
			Log.d("guolei","addEffect end...");
			return null;
		}
    	
		protected  void onProgressUpdate(Void[] values) {
			
		};
		
		
		protected void onPostExecute(Void result) {
		};
    }
}
