/*
 *  UVCCamera
 *  library and sample to access to UVC web camera on non-rooted Android device
 *
 * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *   You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 *   Unless required by applicable law or agreed to in writing, software
 *   distributed under the License is distributed on an "AS IS" BASIS,
 *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *   See the License for the specific language governing permissions and
 *   limitations under the License.
 *
 *  All files in the folder are under this Apache License, Version 2.0.
 *  Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
 *  may have a different license, see the respective files.
 */

package com.serenegiant.usb.common;

import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.usb.UsbDevice;
import android.media.AudioManager;
import android.media.MediaScannerConnection;
import android.media.SoundPool;
import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

import com.tungkong.libusbcamera.R;
import com.serenegiant.glutils.RenderHolderCallback;
import com.serenegiant.glutils.RendererHolder;
import com.serenegiant.usb.Size;
import com.serenegiant.usb.encoder.MediaAudioEncoder;
import com.serenegiant.usb.encoder.MediaEncoder;
import com.serenegiant.usb.encoder.MediaMuxerWrapper;
import com.serenegiant.usb.encoder.MediaSurfaceEncoder;
import com.serenegiant.usb.encoder.MediaVideoBufferEncoder;
import com.serenegiant.usb.encoder.MediaVideoEncoder;
import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.USBMonitor;
import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usb.notify.OnCameraFrameCallBack;
import com.serenegiant.usb.widget.CameraViewInterface;

import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;

abstract class AbstractUVCCameraHandler extends Handler {
    private static final boolean DEBUG = true;	// TODO set false on release
    private static final String TAG = "AbsUVCCameraHandler";

    public interface CameraCallback {
        public void onOpen();
        public void onClose();
        public void onStartPreview();
        public void onStopPreview();
        public void onStartRecording();
        public void onStopRecording();
        public void onError(final Exception e);
    }

    private static final int MSG_OPEN = 0;
    private static final int MSG_CLOSE = 1;
    private static final int MSG_PREVIEW_START = 2;
    private static final int MSG_PREVIEW_STOP = 3;
    private static final int MSG_CAPTURE_STILL = 4;
    private static final int MSG_CAPTURE_START = 5;
    private static final int MSG_CAPTURE_STOP = 6;
    private static final int MSG_MEDIA_UPDATE = 7;
    private static final int MSG_RELEASE = 8;
    private static final int MSG_CAMERA_FOUCS = 9;
    private static final int MSG_SET_PREVIEW_SIZE = 10;
    private static final int MSG_CAPTURE_CUT = 11;

    private final WeakReference<AbstractUVCCameraHandler.CameraThread> mWeakThread;
    private volatile boolean mReleased;

    protected AbstractUVCCameraHandler(final CameraThread thread) {
        mWeakThread = new WeakReference<CameraThread>(thread);
    }

    public int getWidth() {
        final CameraThread thread = mWeakThread.get();
        return thread != null ? thread.getWidth() : 0;
    }

    public int getHeight() {
        final CameraThread thread = mWeakThread.get();
        return thread != null ? thread.getHeight() : 0;
    }

    public List<Size> getSupportedPreviewSizes(){
        final CameraThread thread = mWeakThread.get();
        return thread.getSupportedPreviewList();
    }

    public boolean isOpened() {
        final CameraThread thread = mWeakThread.get();
        return thread != null && thread.isCameraOpened();
    }

    public boolean isPreviewing() {
        final CameraThread thread = mWeakThread.get();
        return thread != null && thread.isPreviewing();
    }

    public boolean isRecording() {
        final CameraThread thread = mWeakThread.get();
        return thread != null && thread.isRecording();
    }

    public boolean isEqual(final UsbDevice device) {
        final CameraThread thread = mWeakThread.get();
        return (thread != null) && thread.isEqual(device);
    }

    protected boolean isCameraThread() {
        final CameraThread thread = mWeakThread.get();
        return thread != null && (thread.getId() == Thread.currentThread().getId());
    }

    protected boolean isReleased() {
        final CameraThread thread = mWeakThread.get();
        return mReleased || (thread == null);
    }

    protected void checkReleased() {
        if (isReleased()) {
            throw new IllegalStateException("already released");
        }
    }

    public void open(final USBMonitor.UsbControlBlock ctrlBlock) {
        checkReleased();
        sendMessage(obtainMessage(MSG_OPEN, ctrlBlock));
    }

    public void close() {
        if (DEBUG) Log.v(TAG, "close:");
        if (isOpened()) {
            stopPreview();
            sendEmptyMessage(MSG_CLOSE);
        }
        if (DEBUG) Log.v(TAG, "close:finished");
    }

    public void resize(final int width, final int height) {
        checkReleased();
        throw new UnsupportedOperationException("does not support now");
    }

    protected void startPreview(final Object surface) {
        checkReleased();
        if (!((surface instanceof SurfaceHolder) || (surface instanceof Surface) || (surface instanceof SurfaceTexture))) {
            throw new IllegalArgumentException("surface should be one of SurfaceHolder, Surface or SurfaceTexture");
        }
        sendMessage(obtainMessage(MSG_PREVIEW_START, surface));
    }

    protected void startBackgroundPreview() {
        checkReleased();
        sendMessage(obtainMessage(MSG_PREVIEW_START, null));
    }

    public void stopPreview() {
        if (DEBUG) Log.v(TAG, "stopPreview:");
        removeMessages(MSG_PREVIEW_START);
        stopRecording();
        if (isPreviewing()) {
            final CameraThread thread = mWeakThread.get();
            if (thread == null) return;
            synchronized (thread.mSync) {
                sendEmptyMessage(MSG_PREVIEW_STOP);
                if (!isCameraThread()) {
                    // wait for actually preview stopped to avoid releasing Surface/SurfaceTexture
                    // while preview is still running.
                    // therefore this method will take a time to execute
                    try {
                        thread.mSync.wait();
                    } catch (final InterruptedException e) {
                    }
                }
            }
        }
        if (DEBUG) Log.v(TAG, "stopPreview:finished");
    }

    public void captureCut(final String path){
        checkReleased();
        sendMessage(obtainMessage(MSG_CAPTURE_CUT, path));
    }

    protected void captureStill() {
        checkReleased();
        sendEmptyMessage(MSG_CAPTURE_STILL);
    }

    protected void captureStill(final String path) {
        checkReleased();
        sendMessage(obtainMessage(MSG_CAPTURE_STILL, path));
    }

    public void startRecording() {
        checkReleased();
        sendEmptyMessage(MSG_CAPTURE_START);
    }

    public void stopRecording() {
        sendEmptyMessage(MSG_CAPTURE_STOP);
    }

    public void release() {
        mReleased = true;
        close();
        sendEmptyMessage(MSG_RELEASE);
    }

    public void startCameraFoucs() {
        sendEmptyMessage(MSG_CAMERA_FOUCS);
    }

    public void startPreviewSize(int[] size) {
        sendMessage(obtainMessage(MSG_SET_PREVIEW_SIZE, size));
    }

    public void addCallback(final CameraCallback callback) {
        checkReleased();
        if (!mReleased && (callback != null)) {
            final CameraThread thread = mWeakThread.get();
            if (thread != null) {
                thread.mCallbacks.add(callback);
            }
        }
    }

    public void removeCallback(final CameraCallback callback) {
        if (callback != null) {
            final CameraThread thread = mWeakThread.get();
            if (thread != null) {
                thread.mCallbacks.remove(callback);
            }
        }
    }

    public void addOnCameraFrameCallBack(final OnCameraFrameCallBack callback) {
        checkReleased();
        if (!mReleased && (callback != null)) {
            final CameraThread thread = mWeakThread.get();
            if (thread != null) {
                thread.mOnCameraFrameCallBacks.add(callback);
            }
        }
    }

    public void removeOnCameraFrameCallBack(final OnCameraFrameCallBack callback) {
        if (callback != null) {
            final CameraThread thread = mWeakThread.get();
            if (thread != null) {
                thread.mOnCameraFrameCallBacks.remove(callback);
            }
        }
    }

    protected void updateMedia(final String path) {
        sendMessage(obtainMessage(MSG_MEDIA_UPDATE, path));
    }

    public boolean checkSupportFlag(final long flag) {
        checkReleased();
        final CameraThread thread = mWeakThread.get();
        return thread != null && thread.mUVCCamera != null && thread.mUVCCamera.checkSupportFlag(flag);
    }

    public int getValue(final int flag) {
        checkReleased();
        final CameraThread thread = mWeakThread.get();
        final UVCCamera camera = thread != null ? thread.mUVCCamera : null;
        if (camera != null) {
            if (flag == UVCCamera.PU_BRIGHTNESS) {
                return camera.getBrightness();
            } else if (flag == UVCCamera.PU_CONTRAST) {
                return camera.getContrast();
            }
        }
        throw new IllegalStateException();
    }

    public int setValue(final int flag, final int value) {
        checkReleased();
        final CameraThread thread = mWeakThread.get();
        final UVCCamera camera = thread != null ? thread.mUVCCamera : null;
        if (camera != null) {
            if (flag == UVCCamera.PU_BRIGHTNESS) {
                camera.setBrightness(value);
                return camera.getBrightness();
            } else if (flag == UVCCamera.PU_CONTRAST) {
                camera.setContrast(value);
                return camera.getContrast();
            }
        }
        throw new IllegalStateException();
    }

    public int resetValue(final int flag) {
        checkReleased();
        final CameraThread thread = mWeakThread.get();
        final UVCCamera camera = thread != null ? thread.mUVCCamera : null;
        if (camera != null) {
            if (flag == UVCCamera.PU_BRIGHTNESS) {
                camera.resetBrightness();
                return camera.getBrightness();
            } else if (flag == UVCCamera.PU_CONTRAST) {
                camera.resetContrast();
                return camera.getContrast();
            }
        }
        throw new IllegalStateException();
    }

    @Override
    public void handleMessage(final Message msg) {
        final CameraThread thread = mWeakThread.get();
        if (thread == null) return;
        switch (msg.what) {
            case MSG_OPEN:
                thread.handleOpen((USBMonitor.UsbControlBlock)msg.obj);
                break;
            case MSG_CLOSE:
                thread.handleClose();
                break;
            case MSG_PREVIEW_START:
                thread.handleStartPreview(msg.obj);
                break;
            case MSG_PREVIEW_STOP:
                thread.handleStopPreview();
                break;
            case MSG_CAPTURE_STILL:
                thread.handleCaptureStill((String)msg.obj);
                break;
            case MSG_CAPTURE_START:
                thread.handleStartRecording();
                break;
            case MSG_CAPTURE_STOP:
                thread.handleStopRecording();
                break;
            case MSG_MEDIA_UPDATE:
                thread.handleUpdateMedia((String)msg.obj);
                break;
            case MSG_RELEASE:
                thread.handleRelease();
                break;
            case MSG_CAMERA_FOUCS:
                thread.handleCameraFoucs();
                break;
            case MSG_SET_PREVIEW_SIZE:
                thread.handlePreviewSize((int[])msg.obj);
                break;
            case MSG_CAPTURE_CUT:
                thread.handlePreviewYUVToJPEG((String)msg.obj);
                break;
            default:
                throw new RuntimeException("unsupported message:what=" + msg.what);
        }
    }

    static final class CameraThread extends Thread {
        private static final String TAG_THREAD = "CameraThread";
        private final Object mSync = new Object();
        private final Class<? extends AbstractUVCCameraHandler> mHandlerClass;
        private final WeakReference<Activity> mWeakParent;
        private final WeakReference<CameraViewInterface> mWeakCameraView;
        private final int mEncoderType;
        private boolean mIsRecordAudio = false;     // 是否录制音频，暂时写死，不录制音频
        private final Set<CameraCallback> mCallbacks = new CopyOnWriteArraySet<CameraCallback>();
        private final Set<OnCameraFrameCallBack> mOnCameraFrameCallBacks = new CopyOnWriteArraySet<OnCameraFrameCallBack>();
        private int mWidth, mHeight, mPreviewMode;
        private float mBandwidthFactor;
        private boolean mIsPreviewing;
        private boolean mIsRecording;
        /**
         * shutter sound
         */
        private SoundPool mSoundPool;
        private int mSoundId;
        private AbstractUVCCameraHandler mHandler;
        /**
         * for accessing UVC camera
         */
        private UVCCamera mUVCCamera;
        /**
         * muxer for audio/video recording
         */
        private MediaMuxerWrapper mMuxer;
        private MediaVideoBufferEncoder mVideoEncoder;

        /**
         * cat camera preview image to jpeg file
         */
        private String mPicturePath;
        private boolean mIsCuted = true;

        private RendererHolder mRendererHolder;

        /**
         *
         * @param clazz Class extends AbstractUVCCameraHandler
         * @param parent parent Activity
         * @param cameraView for still capturing
         * @param encoderType 0: use MediaSurfaceEncoder, 1: use MediaVideoEncoder, 2: use MediaVideoBufferEncoder
         * @param width
         * @param height
         * @param format either FRAME_FORMAT_YUYV(0) or FRAME_FORMAT_MJPEG(1)
         * @param bandwidthFactor
         */
        CameraThread(final Class<? extends AbstractUVCCameraHandler> clazz,
                     final Activity parent, final CameraViewInterface cameraView,
                     final int encoderType, final int width, final int height, final int format,
                     final float bandwidthFactor) {

            super("CameraThread");
            mHandlerClass = clazz;
            mEncoderType = encoderType;
            mWidth = width;
            mHeight = height;
            mPreviewMode = format;
            mBandwidthFactor = bandwidthFactor;
            mWeakParent = new WeakReference<Activity>(parent);
            mWeakCameraView = new WeakReference<CameraViewInterface>(cameraView);
            loadShutterSound(parent);
        }

        @Override
        protected void finalize() throws Throwable {
            Log.i(TAG, "CameraThread#finalize");
            super.finalize();
        }

        public AbstractUVCCameraHandler getHandler() {
            if (DEBUG) Log.v(TAG_THREAD, "getHandler:");
            synchronized (mSync) {
                if (mHandler == null)
                    try {
                        mSync.wait();
                    } catch (final InterruptedException e) {
                        e.printStackTrace();
                    }
            }
            return mHandler;
        }

        public int getWidth() {
            synchronized (mSync) {
                return mWidth;
            }
        }

        public int getHeight() {
            synchronized (mSync) {
                return mHeight;
            }
        }

        public List<Size> getSupportedPreviewList(){
            if(isCameraOpened()){
                return mUVCCamera.getSupportedSizeList();
            }
            return null;
        }

        public boolean isCameraOpened() {
            synchronized (mSync) {
                return mUVCCamera != null;
            }
        }

        public boolean isPreviewing() {
            synchronized (mSync) {
                return mUVCCamera != null && mIsPreviewing;
            }
        }

        public boolean isRecording() {
            synchronized (mSync) {
                return (mUVCCamera != null) && (mMuxer != null);
            }
        }

        public boolean isEqual(final UsbDevice device) {
            return (mUVCCamera != null) && (mUVCCamera.getDevice() != null) && mUVCCamera.getDevice().equals(device);
        }

        public void handleOpen(final USBMonitor.UsbControlBlock ctrlBlock) {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleOpen");

            if (DEBUG) Log.e(TAG_THREAD,"vid:"+ ctrlBlock.getDevice().getVendorId()
                    + ",pid:" + ctrlBlock.getDevice().getProductId()
                    + ",path:" + ctrlBlock.getDevice().getDeviceName()
                    + ",bus:" + ctrlBlock.getBusNum()
                    + ",dev:" + ctrlBlock.getDevNum());
            handleClose();
            try {
                final UVCCamera camera = new UVCCamera();
                camera.open(ctrlBlock);
                synchronized (mSync) {
                    mUVCCamera = camera;
                }
                callOnOpen();
            } catch (final Exception e) {
                callOnError(e);
            }
            if (DEBUG) Log.i(TAG, "supportedSize:" + (mUVCCamera != null ? mUVCCamera.getSupportedSize() : null));
        }

        public void handleClose() {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleClose");
            handleStopRecording();
            final UVCCamera camera;
            synchronized (mSync) {
                camera = mUVCCamera;
                mUVCCamera = null;
            }
            if (camera != null) {
                camera.stopPreview();
                camera.destroy();
                callOnClose();
            }
        }

        public void handleStartPreview(final Object surface) {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleStartPreview");
            if ((mUVCCamera == null) || mIsPreviewing) return;
            try {
                mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, mPreviewMode, mBandwidthFactor);
            } catch (final IllegalArgumentException e) {
                try {
                    // fallback to YUV mode
                    mUVCCamera.setPreviewSize(mWidth, mHeight, 1, 31, UVCCamera.DEFAULT_PREVIEW_MODE, mBandwidthFactor);
                } catch (final IllegalArgumentException e1) {
                    callOnError(e1);
                    return;
                }
            }
            /**
             * 设置摄像头数据函数回调
             */
            mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
            /**
             * 预览摄像头数据分为两种情况，一种是可见界面，一种是不可见界面
             */
            if(surface != null){
                if (surface instanceof SurfaceHolder) {
                    mUVCCamera.setPreviewDisplay((SurfaceHolder)surface);
                } if (surface instanceof Surface) {
                    mUVCCamera.setPreviewDisplay((Surface)surface);
                } else {
                    mUVCCamera.setPreviewTexture((SurfaceTexture)surface);
                }
            } else {
                if (DEBUG) Log.e(TAG_THREAD, "set background preview");
                if(mRendererHolder == null){
                    mRendererHolder  = new RendererHolder(mWidth, mHeight, mRenderHolderCallback);
                }
                Surface surfaceTexture =  mRendererHolder.getSurface();
                if(surfaceTexture != null){
                    if (DEBUG) Log.v(TAG_THREAD, "set surfaceTexture success");
                    mUVCCamera.setPreviewDisplay(surfaceTexture);
                } else {
                    if (DEBUG) Log.v(TAG_THREAD, "surfaceTexture is null");
                }
            }
            mUVCCamera.startPreview();
            mUVCCamera.updateCameraParams();
            synchronized (mSync) {
                mIsPreviewing = true;
            }
            callOnStartPreview();
        }

        public void handleStopPreview() {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleStopPreview");
            if (mIsPreviewing) {
                if (mUVCCamera != null) {
                    mUVCCamera.stopPreview();
                    /**
                     * 设置摄像头数据函数回调-置空
                     */
                    mUVCCamera.setFrameCallback(null, 0);
                    mRendererHolder.getSurface().release();
                }
                synchronized (mSync) {
                    mIsPreviewing = false;
                    mSync.notifyAll();
                }
                callOnStopPreview();
            }
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleStopPreview:finished");
        }

        private final RenderHolderCallback mRenderHolderCallback = new RenderHolderCallback() {
            @Override
            public void onCreate(Surface surface) {
                if (DEBUG) Log.e(TAG, "CameraThread:RenderHolderCallback:onCreate");
            }

            @Override
            public void onFrameAvailable() {
                //if (DEBUG) Log.e(TAG, "CameraThread:RenderHolderCallback:onFrameAvailable");
            }

            @Override
            public void onDestroy() {
                if (DEBUG) Log.e(TAG, "CameraThread:RenderHolderCallback:onDestroy");
            }
        };

        public void handleCaptureStill(final String path) {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleCaptureStill");
            final Activity parent = mWeakParent.get();
            if (parent == null) return;
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:parent != null");
            mSoundPool.play(mSoundId, 0.2f, 0.2f, 0, 0, 1.0f);	// play shutter sound
            try {
                final Bitmap bitmap = mWeakCameraView.get().captureStillImage();
                // get buffered output stream for saving a captured still image as a file on external storage.
                // the file name is came from current time.
                // You should use extension name as same as CompressFormat when calling Bitmap#compress.
                final File outputFile = TextUtils.isEmpty(path)
                        ? MediaMuxerWrapper.getCaptureFile(Environment.DIRECTORY_DCIM, ".png")
                        : new File(path);
                final BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile));
                try {
                    try {
                        bitmap.compress(Bitmap.CompressFormat.JPEG, 100, os);
                        os.flush();
                        mHandler.sendMessage(mHandler.obtainMessage(MSG_MEDIA_UPDATE, outputFile.getPath()));
                    } catch (final IOException e) {
                        e.printStackTrace();
                    }
                } finally {
                    os.close();
                }
            } catch (final Exception e) {
                e.printStackTrace();
                callOnError(e);
            }
        }

        public void handleStartRecording() {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleStartRecording");
            try {
                if ((mUVCCamera == null) || (mMuxer != null)) return;
                final MediaMuxerWrapper muxer = new MediaMuxerWrapper(".mp4");	// if you record audio only, ".m4a" is also OK.
                MediaVideoBufferEncoder videoEncoder = null;
                switch (mEncoderType) {
                    case 1:	// for video capturing using MediaVideoEncoder
                        new MediaVideoEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
                        break;
                    case 2:	// for video capturing using MediaVideoBufferEncoder
                        videoEncoder = new MediaVideoBufferEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
                        break;
                    // case 0:	// for video capturing using MediaSurfaceEncoder
                    default:
                        new MediaSurfaceEncoder(muxer, getWidth(), getHeight(), mMediaEncoderListener);
                        break;
                }
                if (mIsRecordAudio) {
                    // for audio capturing
                    new MediaAudioEncoder(muxer, mMediaEncoderListener);
                }
                muxer.prepare();
                muxer.startRecording();
                if (videoEncoder != null) {
                    mUVCCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_NV21);
                }
                synchronized (mSync) {
                    mMuxer = muxer;
                    mVideoEncoder = videoEncoder;
                }
                callOnStartRecording();
            } catch (final IOException e) {
                callOnError(e);
                Log.e(TAG, "CameraThread:startCapture:", e);
            }
        }

        public void handleStopRecording() {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleStopRecording:mMuxer=" + mMuxer);
            final MediaMuxerWrapper muxer;
            synchronized (mSync) {
                muxer = mMuxer;
                mMuxer = null;
                mVideoEncoder = null;
                if (mUVCCamera != null) {
                    mUVCCamera.stopCapture();
                }
            }
            try {
                mWeakCameraView.get().setVideoEncoder(null);
            } catch (final Exception e) {
                // ignore
            }
            if (muxer != null) {
                muxer.stopRecording();
                mUVCCamera.setFrameCallback(null, 0);
                // you should not wait here
                callOnStopRecording();
            }
        }

        private final IFrameCallback mIFrameCallback = new IFrameCallback() {
            @Override
            public void onFrame(final ByteBuffer frame) {

                Log.e(TAG, "CameraThread:IFrameCallback: " + frame.array().length);

                final MediaVideoBufferEncoder videoEncoder;
                synchronized (mSync) {
                    videoEncoder = mVideoEncoder;
                }
                if (videoEncoder != null) {
                    videoEncoder.frameAvailableSoon();
                    videoEncoder.encode(frame);
                }

                // 以下为个人添加代码-----start

                // 捕捉一张图片
                cutPreviewYUVToJPEG(frame.array());

                // 回调原始图像数据
                callOnPreviewResult(frame.array(),UVCCamera.PIXEL_FORMAT_NV21);

                // 以下为个人添加代码-----end
            }
        };

        public void handleUpdateMedia(final String path) {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleUpdateMedia:path=" + path);
            final Activity parent = mWeakParent.get();
            final boolean released = (mHandler == null) || mHandler.mReleased;
            if (parent != null && parent.getApplicationContext() != null) {
                try {
                    if (DEBUG) Log.i(TAG, "MediaScannerConnection#scanFile");
                    MediaScannerConnection.scanFile(parent.getApplicationContext(), new String[]{ path }, null, null);
                } catch (final Exception e) {
                    Log.e(TAG, "CameraThread:handleUpdateMedia:", e);
                }
                if (released || parent.isDestroyed())
                    handleRelease();
            } else {
                Log.w(TAG, "Activity already destroyed");
                // give up to add this movie to MediaStore now.
                // Seeing this movie on Gallery app etc. will take a lot of time.
                handleRelease();
            }
        }

        public void handleRelease() {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleRelease:mIsRecording=" + mIsRecording);
            handleClose();
            mCallbacks.clear();
            if (!mIsRecording) {
                mHandler.mReleased = true;
                Looper.myLooper().quit();
            }
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleRelease:finished");
        }

        private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
            @Override
            public void onPrepared(final MediaEncoder encoder) {
                if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder);
                mIsRecording = true;
                if (encoder instanceof MediaVideoEncoder)
                    try {
                        mWeakCameraView.get().setVideoEncoder((MediaVideoEncoder)encoder);
                    } catch (final Exception e) {
                        Log.e(TAG, "onPrepared:", e);
                    }
                if (encoder instanceof MediaSurfaceEncoder)
                    try {
                        mWeakCameraView.get().setVideoEncoder((MediaSurfaceEncoder)encoder);
                        mUVCCamera.startCapture(((MediaSurfaceEncoder)encoder).getInputSurface());
                    } catch (final Exception e) {
                        Log.e(TAG, "onPrepared:", e);
                    }
            }

            @Override
            public void onStopped(final MediaEncoder encoder) {
                if (DEBUG) Log.v(TAG_THREAD, "CameraThread:onStopped:encoder=" + encoder);
                if ((encoder instanceof MediaVideoEncoder)
                        || (encoder instanceof MediaSurfaceEncoder))
                    try {
                        mIsRecording = false;
                        final Activity parent = mWeakParent.get();
                        mWeakCameraView.get().setVideoEncoder(null);
                        synchronized (mSync) {
                            if (mUVCCamera != null) {
                                mUVCCamera.stopCapture();
                            }
                        }
                        final String path = encoder.getOutputPath();
                        if (!TextUtils.isEmpty(path)) {
                            mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_MEDIA_UPDATE, path), 1000);
                        } else {
                            final boolean released = (mHandler == null) || mHandler.mReleased;
                            if (released || parent == null || parent.isDestroyed()) {
                                handleRelease();
                            }
                        }
                    } catch (final Exception e) {
                        Log.e(TAG, "onPrepared:", e);
                    }
            }
        };

        /**
         * prepare and load shutter sound for still image capturing
         */
        @SuppressWarnings("deprecation")
        private void loadShutterSound(final Context context) {
            // get system stream type using reflection
            int streamType;
            try {
                final Class<?> audioSystemClass = Class.forName("android.media.AudioSystem");
                final Field sseField = audioSystemClass.getDeclaredField("STREAM_SYSTEM_ENFORCED");
                streamType = sseField.getInt(null);
            } catch (final Exception e) {
                streamType = AudioManager.STREAM_SYSTEM;	// set appropriate according to your app policy
            }
            if (mSoundPool != null) {
                try {
                    mSoundPool.release();
                } catch (final Exception e) {
                }
                mSoundPool = null;
            }
            // load shutter sound from resource
            mSoundPool = new SoundPool(2, streamType, 0);
            mSoundId = mSoundPool.load(context, R.raw.camera_click, 1);
        }

        // 自动对焦
        public void handleCameraFoucs() {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handleCameraFoucs");
            if ((mUVCCamera == null) || !mIsPreviewing)
                return;
            mUVCCamera.setAutoFocus(true);
        }

        // 修改分辨率
        public void handlePreviewSize(int[] size) {
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handlePreviewSize");
            if ((mUVCCamera == null) || !mIsPreviewing)
                return;
            mWidth = size[0];
            mHeight = size[1];
            mUVCCamera.setPreviewSize(mWidth,mHeight);
        }

        // 截取预览的图像
        public void handlePreviewYUVToJPEG(String picturePath){
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:handlePreviewYUVToJPEG");
            this.mIsCuted = false; // prepare cut
            this.mPicturePath = picturePath;
        }

        // 保存图像到指定路径 YUV to JPEG
        private void cutPreviewYUVToJPEG(byte[] data) {
            if (TextUtils.isEmpty(mPicturePath))
                return;
            if (mIsCuted)
                return;
            if (DEBUG) Log.v(TAG_THREAD, "CameraThread:cutPreviewYUVToJPEG");
            mSoundPool.play(mSoundId, 0.2f, 0.2f, 0, 0, 1.0f);	// play shutter sound
            YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);
            ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length);
            boolean result = yuvImage.compressToJpeg(new Rect(0, 0, mWidth, mHeight), 100, bos);
            if (result) {
                byte[] buffer = bos.toByteArray();
                File file = new File(mPicturePath);
                FileOutputStream fos = null;
                try {
                    fos = new FileOutputStream(file);
                    // fixing bm is null bug instead of using BitmapFactory.decodeByteArray
                    fos.write(buffer);
                    fos.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                callOnCaptureResult(mPicturePath);
                mIsCuted = true;
                mPicturePath = null;
            }
            try {
                bos.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        @Override
        public void run() {
            Looper.prepare();
            AbstractUVCCameraHandler handler = null;
            try {
                final Constructor<? extends AbstractUVCCameraHandler> constructor = mHandlerClass.getDeclaredConstructor(CameraThread.class);
                handler = constructor.newInstance(this);
            } catch (final NoSuchMethodException e) {
                Log.w(TAG, e);
            } catch (final IllegalAccessException e) {
                Log.w(TAG, e);
            } catch (final InstantiationException e) {
                Log.w(TAG, e);
            } catch (final InvocationTargetException e) {
                Log.w(TAG, e);
            }
            if (handler != null) {
                synchronized (mSync) {
                    mHandler = handler;
                    mSync.notifyAll();
                }
                Looper.loop();
                if (mSoundPool != null) {
                    mSoundPool.release();
                    mSoundPool = null;
                }
                if (mHandler != null) {
                    mHandler.mReleased = true;
                }
            }
            mCallbacks.clear();
            synchronized (mSync) {
                mHandler = null;
                mSync.notifyAll();
            }
        }

        private void callOnOpen() {
            for (final CameraCallback callback: mCallbacks) {
                try {
                    callback.onOpen();
                } catch (final Exception e) {
                    mCallbacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnClose() {
            for (final CameraCallback callback: mCallbacks) {
                try {
                    callback.onClose();
                } catch (final Exception e) {
                    mCallbacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnStartPreview() {
            for (final CameraCallback callback: mCallbacks) {
                try {
                    callback.onStartPreview();
                } catch (final Exception e) {
                    mCallbacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnStopPreview() {
            for (final CameraCallback callback: mCallbacks) {
                try {
                    callback.onStopPreview();
                } catch (final Exception e) {
                    mCallbacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnStartRecording() {
            for (final CameraCallback callback: mCallbacks) {
                try {
                    callback.onStartRecording();
                } catch (final Exception e) {
                    mCallbacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnStopRecording() {
            for (final CameraCallback callback: mCallbacks) {
                try {
                    callback.onStopRecording();
                } catch (final Exception e) {
                    mCallbacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnError(final Exception e) {
            for (final CameraCallback callback: mCallbacks) {
                try {
                    callback.onError(e);
                } catch (final Exception e1) {
                    mCallbacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnPreviewResult(byte[] data, int type) {
            for (final OnCameraFrameCallBack callback: mOnCameraFrameCallBacks) {
                try {
                    callback.onPreviewResult(data, type);
                } catch (final Exception e) {
                    mOnCameraFrameCallBacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

        private void callOnCaptureResult(String picturePath) {
            for (final OnCameraFrameCallBack callback: mOnCameraFrameCallBacks) {
                try {
                    callback.onCaptureResult(picturePath);
                } catch (final Exception e) {
                    mOnCameraFrameCallBacks.remove(callback);
                    Log.w(TAG, e);
                }
            }
        }

    }
}