package com.ingenic.glass.camera.live;

import android.app.Activity;
import android.hardware.Camera;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PictureCallback;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.media.AudioManager;
import android.media.AudioManager.OnAudioFocusChangeListener;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.BatteryManager;
import android.os.Environment;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.GestureDetector;
import android.widget.Toast;
import android.widget.GestureDetector.SimpleOnGestureListener;
import android.provider.Settings;
import android.provider.Settings.SettingNotFoundException;

import java.io.IOException;
import java.io.File;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;

import com.ingenic.glass.voicerecognizer.api.VoiceRecognizer;
import com.ingenic.glass.voicerecognizer.api.VoiceRecognizerListener;
import com.ingenic.glass.camera.*;
import com.ingenic.glass.camera.util.Util;
import com.ingenic.glass.camera.util.StorageSpaceUtil;

public class CameraLive extends Activity implements SurfaceHolder.Callback,
    MediaRecorder.OnErrorListener, 
						    MediaRecorder.OnInfoListener,PictureCallback{    
    private final String TAG = "CameraLive";
    private final boolean NO_SCREEN_DEVICE = CameraAppImpl.NO_SCREEN_DEVICE;
    // Send message to GlassSync.LiveModule
    private final String CAMERA_ACTION_SEND_URL = "com.ingenic.glass.camera.live.SEND_URL";
    private final String CAMERA_ACTION_STATUS = "com.ingenic.glass.camera.live.STATUS";
    // JNI message, consistent with definition in frameworks/av/media/libstagefright/LiveWriter.cpp
    private final int MEDIA_RECORDER_TRACK_INFO_LIVE_SERVER_START = 1900;
    private final CameraErrorCallback mErrorCallback = new CameraErrorCallback();
    private final int OUTPUT_FORMAT_LIVE_VIDEO_ONLY = 9; // see media/mediarecorder.h
    private final int OUTPUT_FORMAT_LIVE_VIDEO_AND_AUDIO = 10;
    private static CameraLive mInstance = null;
    private int mLiveMode = CameraAppImpl.LIVE_VIDEO_ONLY;//default
    private ComboPreferences mPreferences;
    private SurfaceHolder mSurfaceHolder = null;
    private boolean mOpenCameraFail = false;
    private MediaRecorder mMediaRecorder;
    private boolean mMediaRecorderRecording = false;
    private boolean mIsSavingFile = false;
    private boolean mStorageEnough = false;
    // The video file that the hardware camera is about to record into
    // (or is recording into.)
    private CamcorderProfile mProfile;
    private boolean mCaptureTimeLapse = false;
    // Default 0. If it is larger than 0, the camcorder is in time lapse mode.
    private int mTimeBetweenTimeLapseFrameCaptureMs = 0;
    private int mDesiredPreviewWidth;
    private int mDesiredPreviewHeight;
    private Parameters mParameters;
    private int mCameraId;
    private GestureDetector mGestureDetector;
    private Object mLock = new Object();
    private boolean mFinished = false;
    private Camera mCameraDevice;
    // check incall state and add tts
    private AudioManager mAudioManager;
    private VoiceRecognizer mVoiceRecognizer = null;
    private CameraAppImpl mApplication;
    private String mCurrentErrorState;
    private int mLiveRecord;
    private String mFileName;
    private Handler mHandler = new Handler() {
	    @Override
	    public void handleMessage(Message msg) {
		if (CameraAppImpl.DEBUG) Log.d(TAG,"handleMessage in msg.what="+msg.what);
		switch (msg.what) {
		case CameraAppImpl.CAMERA_ERROR_PREVIEW:{
		    mCurrentErrorState = getString(R.string.video_record_error);
		    finish();
		    break;
		}
		}
	    }
	};
    
    private BroadcastReceiver mReceiver = null;

    private class MyBroadcastReceiver extends BroadcastReceiver {
        @Override
	public void onReceive(Context context, Intent intent) {
	    if (intent.getAction().equals(Intent.ACTION_BATTERY_CHANGED)) {
		int currentBatteryVoltage = 
		    intent.getIntExtra(BatteryManager.EXTRA_VOLTAGE,CameraAppImpl.LOWEST_BATTERY_VOLTAGE);
		if (CameraAppImpl.DEBUG)
		    Log.d(TAG,"battery has change :: currentBatteryVoltage= "+currentBatteryVoltage);
		if (currentBatteryVoltage <= CameraAppImpl.LOWEST_BATTERY_VOLTAGE) {
		    mCurrentErrorState = getString(R.string.live_error_low_battery); 
		    finish();
		}
	    }else if (intent.getAction().equals(Intent.ACTION_MEDIA_EJECT)) {
		     stopVideoRecording();
		     finish();
	    }
        }
    }
    
    @Override
    public void onCreate(Bundle icicle) {
        super.onCreate(icicle);
	if (CameraAppImpl.DEBUG) Log.d(TAG,"onCreate in");
	CameraLive.setMInstance(this);
	mAudioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
	mCurrentErrorState = getString(R.string.live_no_error);
	mVoiceRecognizer = new VoiceRecognizer(VoiceRecognizer.REC_TYPE_COMMAND, new VoiceRecognizerListener());
	mVoiceRecognizer.setAppName(getString(R.string.camera_live_label));
	mVoiceRecognizer.register();
	if (!checkEnvironmentState()){
	    finish();
	    return;
	}
	setContentView(R.layout.camera_live);
	SurfaceView sv = (SurfaceView)findViewById(R.id.surfaceview);
        SurfaceHolder holder = sv.getHolder();
        holder.addCallback(this);
        holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
	mGestureDetector = new GestureDetector(this, new MySimpleGestureDetectorListener());
        mMediaRecorder = new MediaRecorder();
        mMediaRecorder.setOnErrorListener(this);
        mMediaRecorder.setOnInfoListener(this);
	mApplication = (CameraAppImpl)getApplication();
	init();
	if (mOpenCameraFail) {
	    mCurrentErrorState = getString(R.string.video_record_error); 
	    finish();
	    return;
	}
	Settings.System.putString(this.getContentResolver(), "camera_state","live");
	if (CameraAppImpl.USE_TTS && mAudioManager.getMode() != AudioManager.MODE_IN_CALL) {
		mVoiceRecognizer.playTTS(getString(R.string.tts_live_video_record_start));
	}

	mAudioManager.requestAudioFocus(mAudioFocusListener, AudioManager.STREAM_MUSIC,
					AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
	mVoiceRecognizer.lockRecognizer();
	try {
	    mLiveRecord = Settings.System.getInt(getContentResolver(), "live_record");
	} catch(SettingNotFoundException  e) {
	    e.printStackTrace();
	}
    }
	
    private boolean checkEnvironmentState(){
	int currentBatteryVoltage = 0;
	try{
	    currentBatteryVoltage = Settings.System.getInt(getContentResolver(), "batteryVoltage");
	}catch(SettingNotFoundException  e){
	    Log.e(TAG,"batteryVoltage -- SettingNotFound");
	}

	if(currentBatteryVoltage <= CameraAppImpl.LOWEST_BATTERY_VOLTAGE){
	    mCurrentErrorState = getString(R.string.live_error_low_battery);
	    return false;
	} else if (mAudioManager.getMode() == AudioManager.MODE_IN_CALL && getLiveAudioMode() == 1) {
	    mCurrentErrorState = getString(R.string.not_support_live_with_audio);
	    return false;
	}
	return true;
    }

    private void init(){
        Util.initializeScreenBrightness(getWindow(), getContentResolver());
        mPreferences = new ComboPreferences(this);
        CameraSettings.upgradeGlobalPreferences(mPreferences.getGlobal());
        mCameraId = CameraSettings.readPreferredCameraId(mPreferences);
	if (CameraAppImpl.DEBUG) Log.d(TAG,"---mCameraId="+mCameraId);
        //Testing purpose. Launch a specific camera through the intent extras.
        int intentCameraId = Util.getCameraFacingIntentExtras(this);
        if (intentCameraId != -1) {
            mCameraId = intentCameraId;
        }

        mPreferences.setLocalId(this, mCameraId);
        CameraSettings.upgradeLocalPreferences(mPreferences.getLocal());

	// Register BroadcastReceiver to handle some event
        IntentFilter intentFilter = new IntentFilter(Intent.ACTION_BATTERY_CHANGED);
	intentFilter.addAction(Intent.ACTION_MEDIA_EJECT);
        mReceiver = new MyBroadcastReceiver();
        registerReceiver(mReceiver, intentFilter);

        /*
         * To reduce startup time, we start the preview in another thread.
         * We make sure the preview is started at the end of onCreate.
         */
	Thread startPreviewThread = new Thread(new Runnable() {
		public void run() {
		    try {
			if (CameraAppImpl.DEBUG) Log.d(TAG,"---opencamera mCameraId="+mCameraId);
			mCameraDevice = Util.openCamera(CameraLive.this, mCameraId);
			if (mCameraDevice == null) {
			    mOpenCameraFail = true;
			} else {
			    mCameraDevice.setErrorCallback(mErrorCallback);
			    mErrorCallback.setHandler(mHandler);
			}
		    } catch (Exception e) {
			mOpenCameraFail = true;
		    }
		}
	    });
	startPreviewThread.start();
        Util.enterLightsOutMode(getWindow());       
        // Make sure preview is started.
        try {
            startPreviewThread.join();
        } catch (InterruptedException ex) {
            // ignore
        }
    }

    private void readVideoPreferences(boolean hasAudio) {
        // The preference stores values from ListPreference and is thus string type for all values.
        // We need to convert it to int manually.
        String defaultQuality = CameraSettings.getDefaultVideoQuality(mCameraId,
								      getResources().getString(R.string.pref_video_quality_default));
        String videoQuality = mPreferences.getString(CameraSettings.KEY_VIDEO_QUALITY, defaultQuality);
        int quality = Integer.valueOf(videoQuality);
       	if (!hasAudio) {
	    mTimeBetweenTimeLapseFrameCaptureMs = 40;
	    mCaptureTimeLapse = true;
        } else {
	    mTimeBetweenTimeLapseFrameCaptureMs = 0;
	    mCaptureTimeLapse = false;
	}
        // TODO: This should be checked instead directly +1000.
        if (mCaptureTimeLapse) quality += 1000;
        mProfile = CamcorderProfile.get(mCameraId, quality);
	if (getLiveAudioMode() == 1) {
	    mLiveMode = CameraAppImpl.LIVE_VIDEO_AND_AUDIO;
	    Log.d("wdu","mProfile.fileFormat = OUTPUT_FORMAT_LIVE_VIDEO_AND_AUDIO");
	    mProfile.fileFormat = OUTPUT_FORMAT_LIVE_VIDEO_AND_AUDIO;
	}else {
	    mProfile.fileFormat = OUTPUT_FORMAT_LIVE_VIDEO_ONLY;
	}
        getDesiredPreviewSize();
    }

    private void getDesiredPreviewSize() {
	if (mCameraDevice == null)
	    return;
        mParameters = mCameraDevice.getParameters();
	  //mDesiredPreviewWidth = mProfile.videoFrameWidth;
	  //mDesiredPreviewHeight = mProfile.videoFrameHeight;
	mDesiredPreviewWidth = 768;
	mDesiredPreviewHeight = 432;
    }

    private void closeCamera() {
	if (CameraAppImpl.DEBUG) Log.d(TAG, "closeCamera");
        if (mCameraDevice == null) {
            return;
        }
        mCameraDevice.stopPreview();
        CameraHolder.instance().release();
        mCameraDevice.setErrorCallback(null);
        mCameraDevice = null;
    }

    private void finishRecorderAndCloseCamera() {
	if (CameraAppImpl.DEBUG) Log.d(TAG,"-------stopVideoRecording in");
        stopVideoRecording();
        closeCamera();
    }

    @Override
    protected void onDestroy() {
	super.onDestroy();
	if (CameraAppImpl.DEBUG) Log.d(TAG,"onDestroy in");
	mVoiceRecognizer.unlockRecognizer();
	mAudioManager.abandonAudioFocus(mAudioFocusListener);
	Settings.System.putString(this.getContentResolver(), "camera_state","");
	Intent intent = new Intent(CAMERA_ACTION_STATUS);
	intent.putExtra("status", mCurrentErrorState);
	sendBroadcast(intent);
	if (mVoiceRecognizer != null)
	    mVoiceRecognizer.unRegister();

	if(mApplication != null)
	    mApplication.releaseWakeLock();

        if (mReceiver != null) {
            unregisterReceiver(mReceiver);
            mReceiver = null;
        }
        CameraLive.setMInstance(null);
    }

    private void createAndChangVideoFileMode(String fileName) {
    	try {
		File videoFile = new File(fileName);
		if(!videoFile.exists()) {
			videoFile.createNewFile();
			Runtime.getRuntime().exec("chmod 777 " + videoFile.getAbsolutePath());
		}	
	} catch (Exception e) {
		e.printStackTrace();
	}
    }

    private String createName(long dateTaken) {
        Date date = new Date(dateTaken);
        String strFormat = "'LiveRtsp'_yyyyMMdd_HHmmss";
        SimpleDateFormat dateFormat = new SimpleDateFormat(strFormat, Locale.US);
        return dateFormat.format(date);
    }

    // Prepares media recorder.
    private void initializeRecorder() {
        // If the mCameraDevice is null, then this activity is going to finish
	if (CameraAppImpl.DEBUG) 
	    Log.d(TAG, "mCameraDevice = " + mCameraDevice + " mSurfaceHolder = " + mSurfaceHolder);
        if (mCameraDevice == null || mSurfaceHolder == null) return;
        // Unlock the camera object before passing it to media recorder.
        mCameraDevice.unlock();
        mMediaRecorder.setCamera(mCameraDevice);
        if (!mCaptureTimeLapse) {
            mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
        }
	  //mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
	  //mMediaRecorder.setProfile(mProfile);

	mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
	if (getLiveAudioMode() == 1) {
	    mMediaRecorder.setOutputFormat(OUTPUT_FORMAT_LIVE_VIDEO_AND_AUDIO);
	}else {
	    mMediaRecorder.setOutputFormat(OUTPUT_FORMAT_LIVE_VIDEO_ONLY);
	}
	mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
	mMediaRecorder.setVideoFrameRate(15);
	mMediaRecorder.setVideoSize(mDesiredPreviewWidth, mDesiredPreviewHeight);
	mMediaRecorder.setVideoEncodingBitRate(6000000);
	mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
	mMediaRecorder.setAudioEncodingBitRate(12200);
	mMediaRecorder.setAudioChannels(1);
	mMediaRecorder.setAudioSamplingRate(8000);

        if (mCaptureTimeLapse) {
            mMediaRecorder.setCaptureRate((1000 / (double) mTimeBetweenTimeLapseFrameCaptureMs));
        }
        long dateTaken = System.currentTimeMillis();
        String title = createName(dateTaken);
	mFileName = Storage.DIRECTORY_VIDEO + '/' + title+".mp4";
	createAndChangVideoFileMode(mFileName);
	mMediaRecorder.setOutputFile(mFileName);
        mMediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
        try {
            mMediaRecorder.prepare();
        } catch (IOException e) {
            releaseMediaRecorder();
            throw new RuntimeException(e);
        }
    }

    private void releaseMediaRecorder() {
        if (mMediaRecorder != null) {
            mMediaRecorder.reset();
            mMediaRecorder.release();
            mMediaRecorder = null;
        }
    }

    // from MediaRecorder.OnErrorListener
    @Override
    public void onError(MediaRecorder mr, int what, int extra) {
	Log.e(TAG, "MediaRecorder error. what=" + what + " extra=" + extra);
	mCurrentErrorState = getString(R.string.video_record_error);
	finish();
    }

    // from MediaRecorder.OnInfoListener
    @Override
    public void onInfo(MediaRecorder mr, int what, int extra) {
        if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
            if (mMediaRecorderRecording) stopVideoRecording();
        } else if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
            if (mMediaRecorderRecording) stopVideoRecording();
        } else if (what == MEDIA_RECORDER_TRACK_INFO_LIVE_SERVER_START) {
	    // send start message to Glass.LiveModule
	    Intent intent = new Intent(CAMERA_ACTION_SEND_URL);
	    sendBroadcast(intent);
	}
    }

    private void startVideoRecording() {
	if (mMediaRecorderRecording || mFinished) 
	    return;
	readVideoPreferences(mAudioManager.getMode() != AudioManager.MODE_IN_CALL);
	setCameraParameters();
	if (CameraAppImpl.DEBUG) Log.d(TAG, "startVideoRecording");
	initializeRecorder();
	if (mMediaRecorder == null) {
	    Log.e(TAG, "Fail to initialize media recorder");
	    return;
        }
	try {
	    mMediaRecorder.start(); // Recording is now started
	} catch (RuntimeException e) {
	    Log.e(TAG, "Could not start media recorder. ", e);
	    releaseMediaRecorder();
	    // If start fails, frameworks will not lock the camera for us.
	    mCameraDevice.lock();
	    return;
	}
        mMediaRecorderRecording = true;
	mStorageEnough = StorageSpaceUtil.getAvailableSpace() > 0;
	if(mLiveRecord == 1 && mStorageEnough)
	    startLiveRecoding();
    }
    private void saveFILE(){
	if(mMediaRecorder != null)
	    mMediaRecorder.saveLiveFile(); 
    }
    private void stopVideoRecording() {
	if (CameraAppImpl.DEBUG) Log.d(TAG, "stopVideoRecording");
        if (mMediaRecorderRecording) {
	    if(mLiveRecord == 1 && mStorageEnough)
		stopLiveRecoding();
            try {
		mMediaRecorder.setOnErrorListener(null);
		mMediaRecorder.setOnInfoListener(null);
		mMediaRecorder.stop();
            } catch (RuntimeException e) {
		Log.e(TAG, "stop fail",  e);
            }
	    mMediaRecorderRecording = false;
	    if(mLiveRecord == 1 && mStorageEnough){
		if (CameraAppImpl.IS_CRUISE_BOARD || CameraAppImpl.HANLANG_THUMBNAIL)
		    creatAndSaveVideoThumbnail(mFileName);
	    }
        }
	if(mLiveRecord == 0 || !mStorageEnough){
	    File file = new File(mFileName);
	    if(file.exists()) {
		file.delete();
	    }	
	}
	releaseMediaRecorder();
    }

    @Override
    public void finish() {
	synchronized (mLock) {		    
	    if (mFinished)
		return;
	    mFinished = true;
	}	    
	// 及时closecamera，否则第二次开启会失败，
	// 原先是在onDestroy中做，而onDestroy调用较晚如果在onDestroy
	// 调用之前开启录像就会出错，所以把closecamera提前到finish时做
	finishRecorderAndCloseCamera();
	if (CameraAppImpl.USE_TTS && mAudioManager.getMode() != AudioManager.MODE_IN_CALL) {
	    String tts = null;
	    if (mCurrentErrorState.equals(getString(R.string.live_no_error)) || mCurrentErrorState.equals(CameraButtonIntentReceiver.STATUS_LIVE_NOT_FINISH)) {
		tts = getString(R.string.tts_live_video_record_stop);
	    } else {
		tts = getString(R.string.tts_live_video_record_error);
	    }
	    if (mVoiceRecognizer != null)
		mVoiceRecognizer.playTTS(tts);
	}

	super.finish();
    }

    private void setCameraParameters(){
	if (CameraAppImpl.DEBUG) Log.d(TAG, "set camera_ipu_direct record and restart preview.");
	if (mCameraDevice == null)
	    return;
	if (NO_SCREEN_DEVICE)
	    mParameters.set("preview_mode", CameraAppImpl.NO_SCREEN_LIVE);
	else
	    mParameters.set("preview_mode", CameraAppImpl.LOW_POWER_DISPLAY_VIDEO_LIVE);
	mParameters.setPreviewSize(mDesiredPreviewWidth,mDesiredPreviewHeight);
	mCameraDevice.setParameters(mParameters);
	mApplication.acquireWakeLock();
    }

    private int getLiveAudioMode() {
	int live_audio = 0;
	try {
	    live_audio = Settings.System.getInt(getContentResolver(),"live_audio");	       
	}catch(SettingNotFoundException  e){
	    Log.e(TAG,"LiveAudioMode SettingNotFound");
	}
	return live_audio;
    }

    @Override
    public boolean onTouchEvent(MotionEvent event) {
	return mGestureDetector.onTouchEvent(event);
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
	if (CameraAppImpl.DEBUG)
	    Log.d(TAG, "holder.getSurface() = " + holder.getSurface() + "  mCameraDevice = " + mCameraDevice);
        if (holder.getSurface() == null || mCameraDevice == null) return;
        mSurfaceHolder = holder;
	startVideoRecording();
    }


    private OnAudioFocusChangeListener mAudioFocusListener = new OnAudioFocusChangeListener() {
	    public void onAudioFocusChange(int focusChange) {
	    }
	};

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
	if (CameraAppImpl.DEBUG) Log.d(TAG, "--surfaceDestroyed");
        mSurfaceHolder = null;
    }

    class MySimpleGestureDetectorListener extends GestureDetector.SimpleOnGestureListener{
	@Override
	public boolean onSlideDown(boolean fromPhone){
	    mCurrentErrorState = getString(R.string.live_no_error);
	    finish();
	    return true;
	}
    }

    public synchronized static void setMInstance(CameraLive mInstance) {
	CameraLive.mInstance = mInstance;
    }

    public synchronized static CameraLive getMInstance() {
	return CameraLive.mInstance;
    }
    
    public void setErrorState(String state){
	mCurrentErrorState = state;
    }

    public void takePicture() {
        if (mCameraDevice == null) {
            return ;
        }
        mCameraDevice.takePicture(null, null, null, this);
    }
 
    public void startLiveRecoding() {
	if(mMediaRecorderRecording == false){
	    Toast.makeText(this, "录像失败", Toast.LENGTH_SHORT).show();
	    return;
	}
	if(mIsSavingFile)
	    return;

	try{
	    mMediaRecorder.saveLiveFile();
	} catch (Exception e) {
	    e.printStackTrace();
	}
	  /*
	   * you can add any ui infomation in here
	   */
	
	mIsSavingFile = true;
    }
   
    public void stopLiveRecoding() {
	if(mMediaRecorderRecording == false)
	    return;

	try{
	    mMediaRecorder.saveLiveFile();
	} catch (Exception e) {
	    e.printStackTrace();
	}
	mIsSavingFile = false;
    }

    @Override
	public void onPictureTaken(final byte [] jpegData, final android.hardware.Camera camera) {
	    Util.storeImage(this,jpegData,mParameters.getPictureSize());
    }

    public void creatAndSaveVideoThumbnail(String path) {
	    final String videoPath = path;
	    new Thread(new Runnable() {
		    @Override
			public void run() {
			Log.i(TAG,"creatAndSaveVideoThumbnail thread id="+Thread.currentThread().getId());
			try {
			
			    Bitmap bitmap = Thumbnail.createVideoThumbnail(videoPath, 512);
			
			    if(bitmap == null)
				return;
			    String savePath = videoPath.replace(Storage.DIRECTORY_VIDEO, Storage.DIRECTORY_VIDEO_THUMBNAILS);
			    Log.i(TAG,"savePath="+savePath);
			    File thumbfile = new File(savePath);
			    File videothumbnails = new File(thumbfile.getParent());
			    if (!videothumbnails.exists()) {
				videothumbnails.mkdirs();
				try {
				    Runtime.getRuntime().exec("chmod 777 " + videothumbnails.getAbsolutePath());
				} catch (Exception e) {
				    e.printStackTrace();
				}
			    }

			    thumbfile.delete();
			    thumbfile.createNewFile();
			    thumbfile.setReadable(true, false);
			    thumbfile.setWritable(true, false);
			    thumbfile.setExecutable(true, false);
			
			    FileOutputStream fos = new FileOutputStream(savePath);
			    bitmap.compress(CompressFormat.JPEG, 100, fos);
			    fos.flush();
			    fos.close();
		
			} catch (Exception e) {
			    e.printStackTrace();
			}

		    }
		}).start();
	}
}