/*
 *  UVCCamera
 *  library and sample to access to UVC web camera on non-rooted Android device
 *
 * Copyright (c) 2014-2017 saki t_saki@serenegiant.com
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *   You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 *   Unless required by applicable law or agreed to in writing, software
 *   distributed under the License is distributed on an "AS IS" BASIS,
 *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *   See the License for the specific language governing permissions and
 *   limitations under the License.
 *
 *  All files in the folder are under this Apache License, Version 2.0.
 *  Files in the libjpeg-turbo, libusb, libuvc, rapidjson folder
 *  may have a different license, see the respective files.
 */

package com.youdo.service;

import static com.youdo.uvccamera.CameraConstants.*;

import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaScannerConnection;
import android.media.SoundPool;
import android.os.BatteryManager;
import android.os.Build;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.os.RemoteCallbackList;
import android.os.RemoteException;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;

import com.serenegiant.encoder.MediaAudioEncoder;
import com.serenegiant.encoder.MediaEncoder;
import com.serenegiant.encoder.MediaMuxerWrapper;
import com.serenegiant.encoder.MediaVideoEncoder;
import com.serenegiant.opengl.renderer.IRendererCommon;
import com.serenegiant.opengl.renderer.RenderHolderCallback;
import com.serenegiant.opengl.renderer.RendererHolder;
import com.serenegiant.usb.IFrameCallback;
import com.serenegiant.usb.Size;
import com.serenegiant.usb.USBMonitor.UsbControlBlock;
import com.serenegiant.usb.UVCCamera;
import com.serenegiant.usb.UVCParam;
import com.serenegiant.uvccamera.R;
import com.youdo.uvccamera.CameraConstants;

import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

/**
 * 相机Server
 * <p>
 * 支持分段录屏;支持非充电低电量停止录屏;支持存储空间不足覆盖存储;by ljb 20250303;
 * 增加镜像;by ljj on 2025.11.13
 */
public final class CameraServer extends Handler {
    private static boolean DEBUG = false;
    private static final String TAG = "CameraServer";
    private static final int DEFAULT_WIDTH = CameraConstants.DEFAULT_WIDTH;
    private static final int DEFAULT_HEIGHT = CameraConstants.DEFAULT_HEIGHT;

    private int mFrameWidth = DEFAULT_WIDTH, mFrameHeight = DEFAULT_HEIGHT;
    private String mSaveVideoPath = DEFAULT_SAVE_PATH + "/Video";
    private String mSavePicPath = DEFAULT_SAVE_PATH + "/Picture";
    private int mHorizontal = 0;
    private int mVertical = 0;

    private static class CallbackCookie {
        boolean isConnected;
    }

    private final RemoteCallbackList<IUVCServiceCallback> mCallbacks
            = new RemoteCallbackList<IUVCServiceCallback>();
    private int mRegisteredCallbackCount;

    private RendererHolder mRendererHolder;
    private final WeakReference<CameraThread> mWeakThread;

    public static CameraServer createServer(final Context context, final UsbControlBlock ctrlBlock, final int vid, final int pid) {
        if (DEBUG) Log.d(TAG, "createServer:");
        final CameraThread thread = new CameraThread(context, ctrlBlock);
        thread.start();
        return thread.getHandler();
    }

    private CameraServer(final CameraThread thread) {
        if (DEBUG) Log.d(TAG, "Constructor:");
        mWeakThread = new WeakReference<CameraThread>(thread);
        mRegisteredCallbackCount = 0;
        mRendererHolder = new RendererHolder(mFrameWidth, mFrameHeight, mRenderHolderCallback);
    }

    @Override
    protected void finalize() throws Throwable {
        if (DEBUG) Log.i(TAG, "finalize:");
        release();
        super.finalize();
    }

    public void registerCallback(final IUVCServiceCallback callback) {
        if (DEBUG) Log.d(TAG, "registerCallback:");
        mCallbacks.register(callback, new CallbackCookie());
        mRegisteredCallbackCount++;
    }

    public boolean unregisterCallback(final IUVCServiceCallback callback) {
        if (DEBUG) Log.d(TAG, "unregisterCallback:");
        mCallbacks.unregister(callback);
        mRegisteredCallbackCount--;
        if (mRegisteredCallbackCount < 0) mRegisteredCallbackCount = 0;
        return mRegisteredCallbackCount == 0;
    }

    public void release() {
        if (DEBUG) Log.d(TAG, "release:");
        disconnect();
        mCallbacks.kill();
        if (mRendererHolder != null) {
            mRendererHolder.release();
            mRendererHolder = null;
        }
    }

    //********************************************************************************
//********************************************************************************
    public void resize(final int width, final int height) {
        if (DEBUG) Log.d(TAG, String.format("resize(%d,%d)", width, height));
        if (!isRecording()) {
            mFrameWidth = width;
            mFrameHeight = height;
            if (mRendererHolder != null) {
                mRendererHolder.resize(width, height);
            }
        }
    }

    public void connect() {
        if (DEBUG) Log.d(TAG, "connect:width " + mFrameWidth + ",height:" + mFrameHeight);
        final CameraThread thread = mWeakThread.get();
        if (!thread.isCameraOpened()) {
            sendMessage(obtainMessage(MSG_OPEN));
            sendMessage(obtainMessage(MSG_PREVIEW_START, mFrameWidth, mFrameHeight, mRendererHolder.getSurface()));
        } else {
            if (DEBUG) Log.d(TAG, "already connected, just call callback");
            processOnCameraStart();
        }
    }

    public void connectSlave() {
        if (DEBUG) Log.d(TAG, "connectSlave:");
        final CameraThread thread = mWeakThread.get();
        if (thread.isCameraOpened()) {
            processOnCameraStart();
        }
    }

    public void disconnect() {
        if (DEBUG) Log.d(TAG, "disconnect:");
        stopRecording();
        final CameraThread thread = mWeakThread.get();
        if (thread == null) return;
        synchronized (thread.mSync) {
            sendEmptyMessage(MSG_PREVIEW_STOP);
            sendEmptyMessage(MSG_CLOSE);
            // wait for actually preview stopped to avoid releasing Surface/SurfaceTexture
            // while preview is still running.
            // therefore this method will take a time to execute
            try {
                thread.mSync.wait();
            } catch (final InterruptedException e) {
            }
        }
    }

    public boolean isConnected() {
        final CameraThread thread = mWeakThread.get();
        return (thread != null) && thread.isCameraOpened();
    }

    public boolean isRecording() {
        final CameraThread thread = mWeakThread.get();
        return (thread != null) && thread.isRecording();
    }

    public void addSurface(final int id, final Surface surface, final boolean isRecordable, final IUVCServiceOnFrameAvailable onFrameAvailableListener) {
        if (DEBUG) Log.d(TAG, "addSurface:id=" + id + ",surface=" + surface);
        if (mRendererHolder != null)
            mRendererHolder.addSurface(id, surface, isRecordable);
    }

    public void removeSurface(final int id) {
        if (DEBUG) Log.d(TAG, "removeSurface:id=" + id);
        if (mRendererHolder != null)
            mRendererHolder.removeSurface(id);
    }

    public void startRecording() {
        if (!isRecording())
            sendEmptyMessage(MSG_CAPTURE_START);
    }

    public void stopRecording() {
        if (isRecording())
            sendEmptyMessage(MSG_CAPTURE_STOP);
    }

    public void captureStill(final String path) {
        if (mRendererHolder != null) {
            final File dir = new File(mSavePicPath);
            if (!dir.exists()) {
                dir.mkdirs();
            }
            mRendererHolder.captureStill(mSavePicPath + "/" + path);
            sendMessage(obtainMessage(MSG_CAPTURE_STILL, path));
        }
    }

    public CameraAction getAttribute(int attributeType) {
        if (DEBUG) Log.d(TAG, "getAttribute:" + attributeType);
        switch (attributeType) {
            case ACTION_GET_SAVE_MOVIES_PATH:
                return new CameraAction(attributeType, mSaveVideoPath);
            case ACTION_GET_SAVE_PICTURE_PATH:
                return new CameraAction(attributeType, mSavePicPath);
        }
        return new CameraAction(attributeType, "");
    }

    public void setAction(CameraAction action) {
        sendMessage(obtainMessage(MSG_ACTION_SET, action));// #CameraAction4
        if (action.getType() == ACTION_SET_SAVE_PATH) {
            mSaveVideoPath = action.getString() + "/Video";
            mSavePicPath = action.getString() + "/Picture";
        }
    }

    public void setOrientation(int open, int horizontal) {
        if (horizontal == 1) {
            mVertical = open;
        } else {
            mHorizontal = open;
        }
        if (mHorizontal == 1 && mVertical == 1) {
            mRendererHolder.setMirror(IRendererCommon.MIRROR_BOTH);
        } else if (mHorizontal == 1) {
            mRendererHolder.setMirror(IRendererCommon.MIRROR_HORIZONTAL);
        } else if (mVertical == 1) {
            mRendererHolder.setMirror(IRendererCommon.MIRROR_VERTICAL);
        } else {
            mRendererHolder.setMirror(IRendererCommon.MIRROR_NORMAL);
        }
        mRendererHolder.setOrientation(open, horizontal);
        sendMessage(obtainMessage(MSG_SET_ORIENTATION, open, horizontal));
    }

    //********************************************************************************
    private void processOnCameraStart() {
        if (DEBUG) Log.d(TAG, "processOnCameraStart:");
        try {
            final int n = mCallbacks.beginBroadcast();
            for (int i = 0; i < n; i++) {
                if (!((CallbackCookie) mCallbacks.getBroadcastCookie(i)).isConnected)
                    try {
                        mCallbacks.getBroadcastItem(i).onConnected();
                        ((CallbackCookie) mCallbacks.getBroadcastCookie(i)).isConnected = true;
                    } catch (final Exception e) {
                        Log.e(TAG, "failed to call IOverlayCallback#onFrameAvailable");
                    }
            }
            mCallbacks.finishBroadcast();
        } catch (final Exception e) {
            Log.w(TAG, e);
        }
    }

    private void processOnCameraStop() {
        if (DEBUG) Log.d(TAG, "processOnCameraStop:");
        final int n = mCallbacks.beginBroadcast();
        for (int i = 0; i < n; i++) {
            if (((CallbackCookie) mCallbacks.getBroadcastCookie(i)).isConnected)
                try {
                    mCallbacks.getBroadcastItem(i).onDisConnected();
                    ((CallbackCookie) mCallbacks.getBroadcastCookie(i)).isConnected = false;
                } catch (final Exception e) {
                    Log.e(TAG, "failed to call IOverlayCallback#onDisConnected");
                }
        }
        mCallbacks.finishBroadcast();
    }

    public void processOnCameraChange(int status, String msg) {
        if (DEBUG) Log.d(TAG, "processOnCameraChange: " + status + ",msg: " + msg);
        final int n = mCallbacks.beginBroadcast();
        for (int i = 0; i < n; i++) {
            try {
                mCallbacks.getBroadcastItem(i).onCameraChange(status, msg);
            } catch (final Exception e) {
                Log.e(TAG, "failed to call IOverlayCallback#onCameraChange");
            }
        }
        mCallbacks.finishBroadcast();
    }

    public void processOnFrameReceived(byte[] result, int frameId) {
        int length = result.length;
        if (DEBUG) Log.d(TAG, "processOnFrameReceived: frameId " + frameId + ",length: " + length);

        final int n = mCallbacks.beginBroadcast();
        for (int i = 0; i < n; i++) {
            try {
                mCallbacks.getBroadcastItem(i).onFrameReceived(new BufferWrapper(result, frameId, length));
            } catch (final Exception e) {
                Log.e(TAG, "failed to call IOverlayCallback#processOnFrameReceived");
            }
        }
        mCallbacks.finishBroadcast();
    }
//**********************************************************************

    @Override
    public void handleMessage(final Message msg) {
        final CameraThread thread = mWeakThread.get();
        if (thread == null) return;
        switch (msg.what) {
            case MSG_OPEN:
                thread.handleOpen();
                break;
            case MSG_CLOSE:
                thread.handleClose();
                break;
            case MSG_PREVIEW_START:// 开始渲染
                thread.handleStartPreview(msg.arg1, msg.arg2, (Surface) msg.obj);
                break;
            case MSG_PREVIEW_STOP:
                thread.handleStopPreview();
                break;
            case MSG_CAPTURE_STILL:
                thread.handleCaptureStill((String) msg.obj);
                break;
            case MSG_CAPTURE_START:
                thread.handleStartRecording();
                break;
            case MSG_CAPTURE_STOP:
                thread.handleStopRecording(2, "手动停止录像1");
                break;
            case MSG_MEDIA_UPDATE:
                thread.handleUpdateMedia((String) msg.obj);
                break;
            case MSG_ACTION_SET:
                thread.handleSetAction((CameraAction) msg.obj);// #CameraAction5
                break;
            case MSG_SET_ORIENTATION:
                thread.handleSetOrientation(msg.arg1, msg.arg2);
                break;
            case MSG_RELEASE:
                thread.handleRelease();
                break;
            default:
                throw new RuntimeException("unsupported message:what=" + msg.what);
        }
    }

    private final RenderHolderCallback mRenderHolderCallback
            = new RenderHolderCallback() {
        @Override
        public void onCreate(final Surface surface) {
        }

        @Override
        public void onFrameAvailable() {
            final CameraThread thread = mWeakThread.get();
            if ((thread != null) && (thread.mVideoEncoder != null)) {
                try {
                    thread.mVideoEncoder.frameAvailableSoon();
                } catch (final Exception e) {
                    //
                }
            }
        }

        @Override
        public void onDestroy() {
        }
    };

    private static final class CameraThread extends Thread {
        private static final String TAG_THREAD = "CameraThread";
        private final Object mSync = new Object();
        private boolean mIsRecording;
        private final WeakReference<Context> mWeakContext;
        private int mEncoderSurfaceId;
        private int mFrameWidth, mFrameHeight;

        private int mHorizontal = 0;
        private int mVertical = 0;
        /**
         * 刚开始使用 CameraServer.postDelayed()
         * 定时循环 停止录像再重新启动录像
         * 因为没延迟，会造成偶尔 mMuxer 启动录像失败，直接停止，
         * 出现大部分分段录屏成功,部分分段录屏 失败;
         */
        private int mSegmentPeriod;// 分段时间
        private boolean mEnableFrameCallback = false;//  UVCCamera.PIXEL_FORMAT_YUV
        private boolean mAudioRecord = false;//  UVCCamera.PIXEL_FORMAT_YUV
        private int mRecordMode = 0;//  录像模式,0-空间不够删除,1-极端模式(只能有2个文件)
        private String mSavePath = DEFAULT_SAVE_PATH;//  录像保存路径
        /**
         * shutter sound
         */
        private SoundPool mSoundPool;
        private int mSoundId;
        private CameraServer mHandler;
        private UsbControlBlock mCtrlBlock;
        /**
         * for accessing UVC camera
         */
        private volatile UVCCamera mUVCCamera;
        /**
         * muxer for audio/video recording
         */
        private MediaMuxerWrapper mMuxer;
        private MediaVideoEncoder mVideoEncoder;
        private ScheduledExecutorService executor;
        private int mFrameId = 0;// 帧id
        private final Runnable mSegmentRecordRunnable = new Runnable() {// 分段录制
            @Override
            public synchronized void run() {
                if (isRecording()) {
                    try {
                        if (!isEnableRecord(true)) {
                            return;
                        }
                        mMuxer.stopRecording();
                        mWeakContext.get().sendBroadcast(new Intent("com.youdo.scan.file"));
                        try {
                            Thread.sleep(1000);// 需要延迟下,否则会出现录制失败
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                        if (isRecording()) {
                            mMuxer = new MediaMuxerWrapper(mRecordMode, mSavePath, ".mp4").setDebugMode(DEBUG);
                            if (DEBUG) Log.d(TAG, "准备录制下一段视频:" + mMuxer.getOutputPath());
                            mHandler.processOnCameraChange(3, "准备录制下一段视频:" + mMuxer.getOutputPath());
                            new MediaVideoEncoder(mMuxer, mFrameWidth, mFrameHeight, mHorizontal, mVertical, mMediaEncoderListener);
                            if (mAudioRecord) {
                                new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
                            }
                            mMuxer.prepare();
                            mMuxer.startRecording();
                        }
                    } catch (final IOException e) {
                        Log.e(TAG, "SegmentRecord:", e);
                    }
                }
            }
        };
        private final BroadcastReceiver mBatteryTracker = new BroadcastReceiver() {// 低电量且不充电停止录制
            @Override
            public void onReceive(Context context, Intent intent) {
                if (intent != null) {
                    String action = intent.getAction();
                    //Log.e(TAG, "-------------广播,action " + action);
                    if (Intent.ACTION_BATTERY_CHANGED.equals(action)) { // 电量
                        boolean isCharging = intent.getIntExtra(BatteryManager.EXTRA_PLUGGED, 0) != 0;
                        int scale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
                        if (scale != 0) {// 广播收到电量75%时,直接读取到的电量是70%,广播收到电量5%时,直接读取到的电量是29%,波动性很大
                            int level = (int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / scale);
                            if (DEBUG)
                                Log.e(TAG, "onReceive: 当前电量: " + level + "%,是否充电 " + isCharging);
                            if (!isCharging && level < 5) {
                                handleStopRecording(1, "低电量停止录屏1");
                            }
                        }
                    }
                }
            }
        };

        private CameraThread(final Context context, final UsbControlBlock ctrlBlock) {
            super("CameraThread");
            if (DEBUG) Log.d(TAG_THREAD, "Constructor:");
            mWeakContext = new WeakReference<Context>(context);
            mCtrlBlock = ctrlBlock;
            loadShutterSound(context);
        }

        @Override
        protected void finalize() throws Throwable {
            Log.i(TAG_THREAD, "CameraThread#finalize");
            super.finalize();
        }

        public CameraServer getHandler() {
            if (DEBUG) Log.d(TAG_THREAD, "getHandler:");
            synchronized (mSync) {
                if (mHandler == null)
                    try {
                        mSync.wait();
                    } catch (final InterruptedException e) {
                        e.getStackTrace();
                    }
            }
            return mHandler;
        }

        public boolean isCameraOpened() {
            return mUVCCamera != null;
        }

        public boolean isRecording() {
            return (mUVCCamera != null) && (mMuxer != null);
        }

        public void handleOpen() {
            if (DEBUG) Log.d(TAG_THREAD, "handleOpen:");
            handleClose();
            synchronized (mSync) {
                Size size = new Size(UVCCamera.FRAME_FORMAT_MJPEG, UVCCamera.DEFAULT_PREVIEW_WIDTH, UVCCamera.DEFAULT_PREVIEW_HEIGHT,
                        UVCCamera.DEFAULT_PREVIEW_FPS, new ArrayList<>(UVCCamera.DEFAULT_PREVIEW_FPS));
                mUVCCamera = new UVCCamera(new UVCParam(size, 0));// 不能出图,by ljb 20250217;
                mUVCCamera.open(mCtrlBlock);
                if (DEBUG) Log.i(TAG, "supportedSize:" + mUVCCamera.getSupportedSize());
            }
            mHandler.processOnCameraStart();
        }

        public void handleClose() {
            if (DEBUG) Log.d(TAG_THREAD, "handleClose:");
            handleStopRecording(2, "手动停止录像2");
            boolean closed = false;
            synchronized (mSync) {
                if (mUVCCamera != null) {
                    mUVCCamera.stopPreview();
                    mUVCCamera.destroy();
                    mUVCCamera = null;
                    closed = true;
                }
                mSync.notifyAll();
            }
            if (closed)
                mHandler.processOnCameraStop();
            if (DEBUG) Log.d(TAG_THREAD, "handleClose:finished");
        }

        public void handleStartPreview(final int width, final int height, final Surface surface) {
            if (DEBUG)
                Log.d(TAG_THREAD, "handleStartPreview 2:width " + width + ",height:" + height);
            synchronized (mSync) {
                if (mUVCCamera == null) return;
                try {
                    mUVCCamera.setPreviewSize(width, height, UVCCamera.FRAME_FORMAT_MJPEG);// 2.1
                } catch (final IllegalArgumentException e) {
                    try {
                        // fallback to YUV mode
                        mUVCCamera.setPreviewSize(width, height, UVCCamera.DEFAULT_PREVIEW_FRAME_FORMAT);// 2.2
                    } catch (final IllegalArgumentException e1) {
                        mUVCCamera.destroy();
                        mUVCCamera = null;
                    }
                }
                if (mUVCCamera == null) return;

                mFrameWidth = width;
                mFrameHeight = height;
                mUVCCamera.setPreviewDisplay(surface);
                mUVCCamera.startPreview();
                Log.d(TAG_THREAD, "setFrameCallback : " + mEnableFrameCallback);
                if (mEnableFrameCallback) {
                    //mUVCCamera.setFrameCallback(mIFrameCallback, mPixelFormat);
                    prepareVideoEncoder();
                }
            }
        }

        public void handleStopPreview() {
            if (DEBUG) Log.d(TAG_THREAD, "handleStopPreview:");
            synchronized (mSync) {
                if (mUVCCamera != null) {
                    mUVCCamera.stopPreview();
                }
            }
        }

        private void handleResize(final int width, final int height, final Surface surface) {
            synchronized (mSync) {
                if (mUVCCamera != null) {
                    final Size sz = mUVCCamera.getPreviewSize();
                    if ((sz != null) && ((width != sz.width) || (height != sz.height))) {
                        mUVCCamera.stopPreview();
                        try {
                            mUVCCamera.setPreviewSize(width, height);
                        } catch (final IllegalArgumentException e) {
                            try {
                                mUVCCamera.setPreviewSize(sz.width, sz.height);
                            } catch (final IllegalArgumentException e1) {
                                // unexpectedly #setPreviewSize failed
                                mUVCCamera.destroy();
                                mUVCCamera = null;
                            }
                        }
                        if (mUVCCamera == null) return;
                        mFrameWidth = width;
                        mFrameHeight = height;
                        mUVCCamera.setPreviewDisplay(surface);
                        mUVCCamera.startPreview();
                    }
                }
            }
        }

        public void handleCaptureStill(final String path) {
            if (DEBUG) Log.d(TAG_THREAD, "handleCaptureStill:");

            mSoundPool.play(mSoundId, 0.2f, 0.2f, 0, 0, 1.0f);    // play shutter sound

            mWeakContext.get().sendBroadcast(new Intent("com.youdo.scan.file"));
        }

        public synchronized boolean isEnableRecord(boolean isAutoStop) {
            final Context context = mWeakContext.get();
            BatteryManager batteryManager = (BatteryManager) context.getSystemService(Context.BATTERY_SERVICE);
            int level = batteryManager.getIntProperty(BatteryManager.BATTERY_PROPERTY_CAPACITY);
            boolean isCharging = true;
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
                isCharging = batteryManager.isCharging();
            }
            if (DEBUG)
                Log.d(TAG_THREAD, "isEnableRecord:电量:" + level + "%,充电 " + isCharging + ",mMuxer =" + mMuxer);
            if (!isCharging && level < 5) {
                if (isAutoStop) {
                    handleStopRecording(1, "低电量停止录像2");
                } else {
                    mHandler.processOnCameraChange(1, "低电量禁止录屏");
                }
                return false;
            }
            // 优化视频的存储路径
            MediaMuxerWrapper.initMoviesDirectory(mRecordMode, mSavePath);// /storage/emulated/0
            return true;
        }

        public synchronized void handleStartRecording() {
            if (!isEnableRecord(false)) {
                return;
            }
            try {
                if ((mUVCCamera == null) || (mMuxer != null)) return;
                setBroadcastReceiver(true);
                mMuxer = new MediaMuxerWrapper(mRecordMode, mSavePath, ".mp4").setDebugMode(DEBUG);    // if you record audio only, ".m4a" is also OK.
//				new MediaSurfaceEncoder(mFrameWidth, mFrameHeight, mMuxer, mMediaEncoderListener);
                new MediaVideoEncoder(mMuxer, mFrameWidth, mFrameHeight, mHorizontal, mVertical, mMediaEncoderListener);
                if (mAudioRecord) {
                    // for audio capturing
                    new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
                }
                mMuxer.prepare();
                mMuxer.startRecording();
                if (mSegmentPeriod > 0) {
                    executor = Executors.newSingleThreadScheduledExecutor();
                    executor.scheduleAtFixedRate(mSegmentRecordRunnable, mSegmentPeriod, mSegmentPeriod, TimeUnit.MINUTES);
                }
                mHandler.processOnCameraChange(0, "手动开始录像");
            } catch (final IOException e) {
                Log.e(TAG, "startCapture:", e);
            }
        }

        public synchronized void handleStopRecording(int status, String msg) {
            boolean isRecording = isRecording();
            if (DEBUG)
                Log.d(TAG_THREAD, "handleStopRecording:mMuxer=" + mMuxer + ",mUVCCamera=" + mUVCCamera + ",isRecording=" + isRecording);
            if (!isRecording) {
                return;
            }
            mHandler.processOnCameraChange(status, msg);
            if (executor != null) {
                executor.shutdownNow();
                executor = null;
            }
            setBroadcastReceiver(false);
            if (mMuxer != null) {
                synchronized (mSync) {
                    if (mUVCCamera != null) {
                        mUVCCamera.stopCapture();
                    }
                }
                mMuxer.stopRecording();
                mMuxer = null;
                // you should not wait here
                mWeakContext.get().sendBroadcast(new Intent("com.youdo.scan.file"));
            }
        }

        private void setBroadcastReceiver(boolean isRegister) {
            final Context context = mWeakContext.get();
            if (context != null) {
                if (isRegister) {
                    IntentFilter ifilter = new IntentFilter();
                    ifilter.addAction(Intent.ACTION_BATTERY_CHANGED);
                    context.registerReceiver(mBatteryTracker, ifilter);
                } else {
                    try {
                        context.unregisterReceiver(mBatteryTracker);
                    } catch (IllegalArgumentException e) {// Receiver not registered
                        if (DEBUG) Log.e(TAG, "StopRecording:", e);
                    }
                }
            }
        }

        public void handleUpdateMedia(final String path) {
            if (DEBUG) Log.d(TAG_THREAD, "handleUpdateMedia:path=" + path);
            final Context context = mWeakContext.get();
            if (context != null) {
                try {
                    if (DEBUG) Log.i(TAG, "MediaScannerConnection#scanFile");
                    MediaScannerConnection.scanFile(context, new String[]{path}, null, null);
                } catch (final Exception e) {
                    Log.e(TAG, "handleUpdateMedia:", e);
                }
            } else {
                Log.w(TAG, "MainActivity already destroyed");
                // give up to add this movice to MediaStore now.
                // Seeing this movie on Gallery app etc. will take a lot of time.
                handleRelease();
            }
        }

        public void handleSetAction(final CameraAction action) {
            switch (action.getType()) {
                case ACTION_SET_DEBUG_MODE:
                    DEBUG = action.getBoolean();
                    break;
                case ACTION_SET_SEGMENT_PERIOD:
                    mSegmentPeriod = action.getInt();
                    break;
                case ACTION_SET_FRAME_CALLBACK:
                    mEnableFrameCallback = action.getBoolean();
                    break;
                case ACTION_SET_SAVE_MODE:
                    mRecordMode = action.getInt();
                    break;
                case ACTION_SET_SAVE_PATH:// #CameraAction6
                    String path = action.getString() + "/Video";
                    if (!TextUtils.isEmpty(path) && !path.equals(mSavePath)) {// 路径发生改变
                        mSavePath = path;
                        mSegmentRecordRunnable.run();
                    }
                    break;
                case ACTION_SET_AUDIO_RECORD:
                    mAudioRecord = action.getBoolean();
                    break;
            }
            if (DEBUG)
                Log.d(TAG_THREAD, "handleSetAction:" + action.getTypeStr() + ",Period:" + mSegmentPeriod + ",Frame:" + mEnableFrameCallback + ",Mode:" + mRecordMode + ",Path:" + mSavePath);
        }

        public void handleRelease() {
            if (DEBUG) Log.d(TAG_THREAD, "handleRelease:");
            handleClose();
            if (mCtrlBlock != null) {
                mCtrlBlock.close();
                mCtrlBlock = null;
            }
            if (!mIsRecording)
                Looper.myLooper().quit();
        }

        public void handleSetOrientation(int open, int horizontal) {
            if (DEBUG) Log.d(TAG_THREAD, "handleSetOrientation:");
            if (horizontal == 1) {
                mVertical = open;
            } else {
                mHorizontal = open;
            }
        }

        // if you need frame data as ByteBuffer on Java side, you can use this callback method with UVCCamera#setFrameCallback
        private final IFrameCallback mIFrameCallback = new IFrameCallback() {

            @Override
            public void onFrame(final ByteBuffer frame) {
                byte[] result = new byte[frame.remaining()];
                frame.get(result);
                if (mFrameId >= 0x0fffffff) {// Integer.MAX_VALUE
                    mFrameId = 0;
                } else {
                    mFrameId++;
                }
                mHandler.processOnFrameReceived(result, mFrameId);
            }
        };

        private final IUVCServiceOnFrameAvailable mOnFrameAvailable = new IUVCServiceOnFrameAvailable() {
            @Override
            public IBinder asBinder() {
                if (DEBUG) Log.d(TAG_THREAD, "asBinder:");
                return null;
            }

            @Override
            public void onFrameAvailable() throws RemoteException {
//				if (DEBUG) Log.d(TAG_THREAD, "onFrameAvailable:");
                if (mVideoEncoder != null)
                    mVideoEncoder.frameAvailableSoon();
            }
        };

        private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
            @Override
            public void onPrepared(final MediaEncoder encoder) {
                if (DEBUG) Log.d(TAG, "onPrepared:encoder=" + encoder);
                mIsRecording = true;
                if (encoder instanceof MediaVideoEncoder)
                    try {
                        mVideoEncoder = (MediaVideoEncoder) encoder;
                        final Surface encoderSurface = mVideoEncoder.getInputSurface();
                        mEncoderSurfaceId = encoderSurface.hashCode();
                        mHandler.mRendererHolder.addSurface(mEncoderSurfaceId, encoderSurface, true);
                    } catch (final Exception e) {
                        Log.e(TAG, "onPrepared:", e);
                    }
            }

            @Override
            public void onStopped(final MediaEncoder encoder) {
                if (DEBUG) Log.v(TAG_THREAD, "onStopped:encoder=" + encoder);
                if ((encoder instanceof MediaVideoEncoder))
                    try {
                        mIsRecording = false;
                        if (mEncoderSurfaceId > 0) {
                            try {
                                mHandler.mRendererHolder.removeSurface(mEncoderSurfaceId);
                            } catch (final Exception e) {
                                Log.w(TAG, e);
                            }
                        }
                        mEncoderSurfaceId = -1;
                        synchronized (mSync) {
                            if (mUVCCamera != null) {
                                mUVCCamera.stopCapture();
                            }
                        }
                        mVideoEncoder = null;
                        final String path = encoder.getOutputPath();
                        if (!TextUtils.isEmpty(path)) {
                            mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_MEDIA_UPDATE, path), 1000);
                        }
                    } catch (final Exception e) {
                        Log.e(TAG, "onPrepared:", e);
                    }
            }
        };

        /**
         * prepare and load shutter sound for still image capturing
         */
        @SuppressWarnings("deprecation")
        private void loadShutterSound(final Context context) {
            if (DEBUG) Log.d(TAG_THREAD, "loadShutterSound:");
            // get system stream type using refrection
            int streamType;
            try {
                final Class<?> audioSystemClass = Class.forName("android.media.AudioSystem");
                final Field sseField = audioSystemClass.getDeclaredField("STREAM_SYSTEM_ENFORCED");
                streamType = sseField.getInt(null);
            } catch (final Exception e) {
                streamType = AudioManager.STREAM_SYSTEM;    // set appropriate according to your app policy
            }
            if (mSoundPool != null) {
                try {
                    mSoundPool.release();
                } catch (final Exception e) {
                }
                mSoundPool = null;
            }
            // load sutter sound from resource
            mSoundPool = new SoundPool(2, streamType, 0);
            mSoundId = mSoundPool.load(context, R.raw.camera_click, 1);
        }

        @Override
        public void run() {
            if (DEBUG) Log.d(TAG_THREAD, "run:");
            Looper.prepare();
            synchronized (mSync) {
                mHandler = new CameraServer(this);
                mSync.notifyAll();
            }
            Looper.loop();
            synchronized (mSync) {
                mHandler = null;
                mSoundPool.release();
                mSoundPool = null;
                mSync.notifyAll();
            }
            if (DEBUG) Log.d(TAG_THREAD, "run:finished");
        }

        //--------------------------帧数据压缩
        private MediaCodec mediaCodec;

        private void prepareVideoEncoder() {
            try {
                MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, UVCCamera.DEFAULT_PREVIEW_WIDTH, UVCCamera.DEFAULT_PREVIEW_HEIGHT);
                format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
                format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
                format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); // 关键帧间隔
                format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                        MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);

                mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
                mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                Surface inputSurface = mediaCodec.createInputSurface();
                mediaCodec.start();
                mUVCCamera.startCapture(inputSurface);
                // 启动编码器输出线程
                new Thread(this::encodeAndSendVideo).start();
            } catch (IOException | IllegalStateException e) {
                e.printStackTrace();
            }
        }

        private static final int FRAME_RATE = 10;//15;
        private static final float BPP = 0.50f;

        private int calcBitRate() {
            final int bitrate = (int) (BPP * FRAME_RATE * UVCCamera.DEFAULT_PREVIEW_WIDTH * UVCCamera.DEFAULT_PREVIEW_HEIGHT);
            Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
            return bitrate;
        }

        private void encodeAndSendVideo() {
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

            while (true) {
                int outputBufferId = mediaCodec.dequeueOutputBuffer(bufferInfo, 10000);
                if (outputBufferId >= 0) {
                    ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferId);
                    if (outputBuffer != null) {
                        byte[] data = new byte[bufferInfo.size];
                        outputBuffer.get(data);
                        if (mFrameId >= 0x0fffffff) {// Integer.MAX_VALUE
                            mFrameId = 0;
                        } else {
                            mFrameId++;
                        }
                        mHandler.processOnFrameReceived(data, mFrameId);

                    }
                    mediaCodec.releaseOutputBuffer(outputBufferId, false);
                }
            }
        }
    }

}
