package com.camera.yl.views

import android.annotation.SuppressLint
import android.content.BroadcastReceiver
import android.content.ContentValues
import android.content.Context
import android.content.Intent
import android.content.IntentFilter
import android.net.Uri
import android.os.Build
import android.os.CountDownTimer
import android.os.Environment
import android.provider.MediaStore
import android.util.Log
import android.view.Surface
import com.camera.base.BaseApp
import com.camera.base.Log.YLogImpl
import com.camera.base.bean.CameraCfg
import com.camera.base.bean.CameraRecordResult
import com.camera.base.bean.CameraResult
import com.camera.base.bean.MarkWaterType
import com.camera.base.bean.YLLocation
import com.camera.base.inerface.CameraServerFinshListener
import com.camera.base.inerface.FullEventListener
import com.camera.base.inerface.FullEventListener.VideoStroageSupplier
import com.camera.base.inerface.ICPInterface
import com.camera.base.inerface.RecordElapsedTimeListener
import com.camera.base.inerface.WebModelFactory
import com.camera.base.util.AudioPlayer
import com.camera.base.util.DefaultWebModelFactory
import com.camera.base.util.EventEnum
import com.camera.base.util.FileUtil
import com.camera.base.util.ToastUtil
import com.camera.base.util.YLCameraConfig
import com.camera.base.v.models.SystemNavBarViewModel
import com.camera.routemodule.routeinterface.BDLocationListener
import com.camera.routemodule.routeinterface.RouterUtil
import com.camera.routemodule.routeinterface.YLWebSocketInterface
import com.camera.yl.yinterface.CameraWindowInterface
import com.google.gson.Gson
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import java.io.File
import java.text.SimpleDateFormat
import java.util.Calendar
import java.util.Date
import java.util.Locale

@SuppressLint("StaticFieldLeak")
object CameraManager {
    val TAG = CameraManager::class.java.simpleName
    var context:Context?= null
    var cameraWindowInterface : CameraWindowInterface? = null
//    var mCameraMarkMarksBeans = ArrayList<MarksBean>() //水印的实体类集合  每一条水印对应一个MarksBean
    var icpInterface: ICPInterface? = null
    var mWebModelFactory: WebModelFactory = DefaultWebModelFactory()
    var mYLWebSocketInterface: YLWebSocketInterface? = null
    var mConcreteSupplier: VideoStroageSupplier = ConcreteStroageSupplier()
    var filaPath:String? = null
    var preFilePath:String? = null
    var capturePath:String? = null
//    private var mWindowManager: WindowManager? = null
    private var recordTimer: CountDownTimer? = null
    private var timerForSecond: CountDownTimer? = null //秒计时器
    private var timeElapsedInSeconds: Long = 0
    var setsTimeListeners: MutableSet<RecordElapsedTimeListener> = HashSet()
    var preRecordCount = 1 //预录文件名
    var mSystemNavBarViewModel: SystemNavBarViewModel? = null
    val mBDLocationListener: MBDLocationListener = MBDLocationListener()
    var mYLLocation = YLLocation(-1.1,-1.1)
    var keyEventBoradCastReceiver: BroadcastReceiver? = null
    var zoomCurrent = 1
    var flashCurrent = 0;

    // 存储每个按键事件的上次处理时间
    private val lastClickTimeMap = mutableMapOf<String, Long>() // 按键防抖
    private val debounceTime: Long = 1500 // 防抖时间间隔，单位为毫秒
    var fAddSurface = false

    fun relesaeCameraServer(mICPInterface: ICPInterface){
        mICPInterface?.let {
            it.setNecessarySupplier(null)
            it.removeEventListener(mCameraBinderEventListener)
        }
    }

    fun initCameraServer(mICPInterface: ICPInterface){
        this.icpInterface = mICPInterface;

        mICPInterface?.let {
            it.setCameraServerFinshCallBack(object :CameraServerFinshListener{
                override fun done() {
                    setMarksConfig()
                }
            })

            it.addEventListener(mCameraBinderEventListener)
//            it.setMarksBeans(mCameraMarkMarksBeans)
            it.setNecessarySupplier(mConcreteSupplier)
            if (it.preRecordTimeSwitch){
                //如果预录开关打开 那么就需要立马打开摄像头 进行预录
                it.startPreRecord()
            }
            keyEventBoradCastReceiver = KeyEventBroadCastRevicer()
            val keyIntentFilter = IntentFilter()
            if (BaseApp.ylBean.keyEvents.size > 0) {
                for (keyEvent in BaseApp.ylBean.keyEvents) {
                    if (keyEvent.todo == "recordVideo") {
                        keyIntentFilter.addAction(keyEvent.action)
                    }
                }
            }
            BaseApp.getApplication().registerReceiver(keyEventBoradCastReceiver, keyIntentFilter)
        }

    }
    fun addEventListener(event:FullEventListener){
        icpInterface?.let {
            it.addEventListener(event)
        }
    }
    fun removeEventListener(event:FullEventListener){
        icpInterface?.let {
            it.removeEventListener(event)
        }
    }
    fun setWebSocketServer(mYLWebSocketInterface: YLWebSocketInterface){
        this.mYLWebSocketInterface = mYLWebSocketInterface;
    }
    fun openCamera(){
        icpInterface?.let {
            if (!it.isICPCameraOpen) {
                it.openICPCamera()
                fAddSurface = false;
            }
        }
    }
    /**
     *  开启摄像头的预览   此方法是没有基于Activity的预览  直接在WindowManager 上显示摄像头画面
     *  所以此方法一般不需要使用  除非是给第三方APP需要画面的时候  写一个接口指定位置显示摄像头画面类似 微信通话的小屏画面
     * @param context Context
     * @param mode Boolean
     * @param width Int
     * @param height Int
     * @param x Int
     * @param y Int
     */
    fun startCameraPreview(context:Context, mode:Boolean, width:Int, height:Int, x:Int, y:Int,isProportion: Boolean){
        this.context = context
        if (cameraWindowInterface == null) {
            cameraWindowInterface = createCameraWindowInterface()
        }
        cameraWindowInterface?.startCameraPreview(context,mode,width,height,x,y,isProportion)
    }

    fun disCameraPreview(context:Context){
        cameraWindowInterface?.disCameraPreview(context)
        cameraWindowInterface = null
    }
    var cameraWindowInterfaceMode = 0;
    fun createCameraWindowInterface(): CameraWindowInterface{
        return CameraWindowImp()
//        return when(cameraWindowInterfaceMode){
//            0 -> CameraWindowImp()
//            1 -> CameraWindowForTYXAImp()
//            else -> CameraWindowImp()
//        }
    }
    fun releaseCamera(){
        icpInterface!!.closeICPCamera()
    }




    /**
     * 获取摄否在录制
     * @return Boolean
     */
    fun isRecord():Boolean{
        icpInterface?.let {
            return it.isRecord
        }
        return false
    }

    /**
     * 录像
     */
    fun startRecord(callback: (CameraRecordResult) -> Unit){
        icpInterface?.startRecord{ result ->
            Log.e(TAG, "startRecord: ${result.success} state:${result.state}  msg:${result.msg}")
            callback.invoke(result)
            if (result.success){
//                AudioPlayer.playAudio(BaseApp.getApplication(), com.camera.device.R.raw.start_record_video)
            }else{

            }
        }
    }
    /**
     * 停止录像
     */
    fun stopRecord(){
//        AudioPlayer.playAudio(BaseApp.getApplication(), com.camera.device.R.raw.stop_record_video)
        icpInterface?.stopRecord()
    }

    /**
     * 拍照
     */
    fun capture(){
        icpInterface?.capture ({
            if (it.success) {
                //启动拍照成功 添加这一步是为了 判断SD卡的状态 和 循环存储逻辑
                AudioPlayer.playAudio(BaseApp.getApplication(), com.camera.device.R.raw.cpature)
            }else{
                GlobalScope.launch(Dispatchers.Main) {
                    ToastUtil.showLong(it.msg)
                }
            }
        },null)
    }

    /**
     * 获取摄像头列表
     * @return List<CameraCfg>?
     */
    fun getCameras():List<CameraCfg>?{
        return icpInterface?.cameras
    }

    /**
     * 切换摄像头
     */
    fun switchCamera(){
        icpInterface?.switchCamera()
    }

    /**
     * 指定摄像头ID切换摄像头
     * @param id Int
     */
    fun switchCamera(id:Int){
        icpInterface?.switchCamera(id)
    }

    fun zoomUp(){
        zoomCurrent = zoomCurrent + icpInterface?.zoomMax!!.div(10)
        if (zoomCurrent > icpInterface?.zoomMax!!){
            zoomCurrent = icpInterface?.zoomMax!!
        }
        icpInterface?.handleZoom(zoomCurrent)
    }
    fun zoomDown(){
        zoomCurrent = zoomCurrent - icpInterface?.zoomMax!!.div(10)
        if (zoomCurrent < 1){
            zoomCurrent = 1
        }
        icpInterface?.handleZoom(zoomCurrent)
    }

    fun flash(){
        if (flashCurrent == 0){
            flashCurrent = 2
        }else{
            flashCurrent = 0
        }
        icpInterface?.handleFlash(flashCurrent)
    }




    /**
     * 自定义的水印控制接口 实际录制或者推流时  会根据配置 默认900ms 会调用mark方法来更新视频流上的水印
     */
//    class CameraMarkSupplierContorl: CameraMarkSupplier() {
//        override fun mark(position:Int): String? {
//            if (position <= markTxts.size){
//                return markTxts.get(position);
//            }
//            return ""
//        }
//    }

    var mCameraBinderEventListener = object:FullEventListener(){
        override fun onFullEventListener(type: Int, data: Any?) {
            YLogImpl.i(TAG, "onFullEventListener: $type   $data")
            when(type){
                EventEnum.RESULT_OPEN_CAMERA ->{
                    addRecordTimerListener(mSystemNavBarViewModel!!.mRecordElapsedTimeListener)
                    startSecondTimer()
                    startLocation()
                    zoomCurrent = 1
                    var webModel = mWebModelFactory.createResultWebModel()
                    webModel.cameraWebModel.cameraResultConfig.code = type
                    webModel.cameraWebModel.cameraResultConfig.isState = data as Boolean
                    val jsonString = Gson().toJson(webModel)
                    mYLWebSocketInterface?.sendMessage(jsonString)

                    if (fAddSurface) {
                        cameraWindowInterface?.addSurfaceView(context)
                    }
                }
                EventEnum.RESULT_CLOSE_CAMERA ->{
                    stopLocation()
                    removeRecordTimerListener(mSystemNavBarViewModel!!.mRecordElapsedTimeListener)
                    //摄像头已经关闭
                    stopSecondTimer()
                    var webModel = mWebModelFactory.createResultWebModel()
                    webModel.cameraWebModel.cameraResultConfig.code = type
                    webModel.cameraWebModel.cameraResultConfig.isState = true
                    val jsonString = Gson().toJson(webModel)
                    mYLWebSocketInterface?.sendMessage(jsonString)
                }
                EventEnum.RESULT_RECORD_NEW ->{
                    //开始一个新的文件录制
                    var webModel = mWebModelFactory.createResultWebModel()
                    webModel.cameraWebModel.cameraResultConfig.code = type
                    webModel.cameraWebModel.cameraResultConfig.path = (data as CameraResult).filePath
                    webModel.cameraWebModel.cameraResultConfig.name = (data as CameraResult).fileName
                    val jsonString = Gson().toJson(webModel)
                    mYLWebSocketInterface?.sendMessage(jsonString)
                }
                EventEnum.RESULT_RECORD_DONE ->{
                    //结束一个文件录制  (data as String)   data就是文件路径
                    Log.e("CameraManager", "onCameraBinderEventListener: " + type + "-" + (data as String) )
                    //// 通知系统媒体库扫描新增的视频文件
                    toScanMedia( data)

//                    var webModel = mWebModelFactory.createResultWebModel()
//                    webModel.cameraWebModel.cameraResultConfig.code = type
//                    webModel.cameraWebModel.cameraResultConfig.isState = true
//                    val jsonString = Gson().toJson(webModel)
//                    mYLWebSocketInterface?.sendMessage(jsonString)
                }
                EventEnum.RESULT_STOP_RECORD ->{
                    stopRecordTimer()
                    Log.e("CameraManager", "onCameraBinderEventListener: 停止录制"  )
                    var webModel = mWebModelFactory.createResultWebModel()
                    webModel.cameraWebModel.cameraResultConfig.code = type
                    webModel.cameraWebModel.cameraResultConfig.isState = true
                    val jsonString = Gson().toJson(webModel)
                    mYLWebSocketInterface?.sendMessage(jsonString)
                }
                EventEnum.RESULT_GET_NEXT_FILE ->{
                    //websocket接口功能  如果没有用websocket  无需使用
                    var webModel = mWebModelFactory.createResultWebModel()
                    webModel.cameraWebModel.cameraResultConfig.code = type
                    val jsonString = Gson().toJson(webModel)
                    mYLWebSocketInterface?.sendMessage(jsonString)
                }
                EventEnum.RESULT_RECORD_ERROR ->{
                    stopRecordTimer()
                    AudioPlayer.playAudio(BaseApp.getApplication(), com.camera.device.R.raw.stop_record_video)

                    //websocket接口功能  如果没有用websocket  无需使用
                    var webModel = mWebModelFactory.createResultWebModel()
                    webModel.cameraWebModel.cameraResultConfig.code = type
                    val jsonString = Gson().toJson(webModel)
                    mYLWebSocketInterface?.sendMessage(jsonString)

                }
                EventEnum.RESULT_CAPTURE_SUCCESS ->{
                    //拍照成功  返回路径String
                    Log.e(TAG,"拍照成功  返回路径String")
                    toScanMedia(data as String)
                }
                EventEnum.RESULT_CAPTURE_FAIL ->{
                    //拍照失败返回报错String
                }
                EventEnum.RESULT_START_RECORD ->{
                    startRecordTimer()
                }
            }
        }
    }
    // 创建一个继承自NecessarySupplier的具体类
    class ConcreteStroageSupplier : VideoStroageSupplier() {
//        override fun baseDir(): String? {
//            // SDK 会自动获取内置或外置SD卡根 目录此方法获取的是根目录之后的自定义路径文件  请严格按照下面的格式  以/结尾
//            return "YLFile/CustomFile/"
//        }

        override fun getFullVideoCacheFilePath(basePath: String?): String? {
            // 你可以根据baseDir和fileNameExpansion的返回值来构建完整的缓存文件路径
            val baseCache = FileUtil.getBaseVideoPath()
            val date = Date()
            val ymd:String = SimpleDateFormat("yyyyMMdd").format(date)
            val saveDir:String = baseCache + File.separator + ymd
            val dir = File(saveDir)
            if (!dir.exists()){
                dir.mkdir()
            }

            val ymdp:String = SimpleDateFormat("yyyyMMdd_HH-mm-ss").format(date)
            val builder = StringBuilder()
            builder.append("DSJ")
            builder.append("_")
            builder.append(ymdp)
            builder.append("_")
            builder.append("test")
            builder.append(".mp4")
            val fileName = builder.toString()
            val file = File(saveDir, fileName)
            if (FileUtil.isFileWritable(file)){
                return file.absolutePath.also { CameraManager.filaPath = it }
            }else{
                return null
            }
        }

        override fun getPreVideoCacheFilePath(basePath: String?): String? {
            val baseCache = FileUtil.getBaseVideoPath();
            val calendar = Calendar.getInstance()
            calendar.add(Calendar.SECOND, YLCameraConfig.getRecordTime()/1000)

            val ymd:String = SimpleDateFormat("yyyyMMdd").format(calendar.time)
            val saveDir:String = baseCache + File.separator + ymd
            val dir = File(saveDir)
            if (!dir.exists()){
                dir.mkdir()
            }
            val ymdp:String = SimpleDateFormat("yyyyMMdd_HH-mm-ss").format(calendar.time)
            val builder = StringBuilder()
            builder.append("DSJ")
            builder.append("_")
            builder.append(ymdp)
            builder.append("_")
            builder.append("test")
            builder.append(".mp4")
            val fileName = builder.toString()
            Log.e("TAG", "getPreVideoCacheFilePath: "+ File(saveDir, fileName).absolutePath)
            return File(saveDir, fileName).absolutePath.also { CameraManager.preFilePath = it }
        }
        //拉去预录路径
        override fun getPreRecordCacheFilePath(basePath: String?): String? {
            val baseCache = FileUtil.getBasePreVideoPath();
            val dir = File(baseCache)
            if (!dir.exists()){
                dir.mkdir()
            }
            //1 2 3.mp4文件循环
            val file = File(dir, "$preRecordCount.mp4")
            //这个数字不能设定太小  如果太小 然后预录处理时间长 可能没有处理完  然后新的预录又来了 就会出BUG
            if (preRecordCount == 20){
                preRecordCount = 1
            }else{
                preRecordCount ++
            }
            Log.e("TAG", "getPreRecordCacheFilePath: "+ file.absolutePath)
            return file.absolutePath
        }

        override fun getCaptureFilePath(basePath: String?): String? {
            val date: Date = Date()
            val ymd:String = SimpleDateFormat("yyyyMMdd").format(date)
            val saveDir:String = FileUtil.getBaseCapturePath() + File.separator + ymd
            val dir:File = java.io.File(saveDir)
            if (!dir.exists()){
                dir.mkdir()
            }
            val ymdp:String = SimpleDateFormat("yyyyMMdd_HH-mm-ss").format(date)
            val builder = StringBuilder()
            builder.append("DSJ")
            builder.append("_")
            builder.append(ymdp)
            builder.append("_")
            builder.append("test")
            builder.append(".jpg")
            val fileName = builder.toString()
            return File(saveDir, fileName).absolutePath.also { CameraManager.capturePath = it }
        }
    }



    /**
     * 把预览的surface添加给摄像头接口以承载预览画面
     * @param surface Surface
     * @param width Int
     * @param height Int
     */
    fun add2CameraSurface(surface: Surface,width: Int,height: Int){
        icpInterface?.addOtherSurface(surface,width,height)
    }

    /**
     * 去除预览surface
     * @param surface Surface
     */
    fun removeWCameraSurface(surface: Surface){
        icpInterface?.removeOtherSurface(surface)
    }
    /**
     * 从配置文件 重新
     *
     */
    fun configMarksAll(){
        icpInterface?.configMarksBeans()
    }
    /**
     * 修改单个 水印配置
     */
    fun setMarksConfig(){
        icpInterface?.let {
            it.markWaterTypeHashMap[MarkWaterType.GB_NAME] =
                RouterUtil.getGBManagerImpl()?.getGBName() ?: ""
        }
    }

    /**
     *
     * @param cameraMarkTxtConfig List<CameraMarkTxtConfig>
     */
//    fun setMarksConfigTxt(cameraMarkTxtConfig:List<CameraMarkTxtConfig>){
//        for (cameraMarkTxt in cameraMarkTxtConfig) {
//            icpInterface?.marksTxts?.let {
//                for (s in it) {
//                    //可以类似这样修改水印文字
//                    it.set(cameraMarkTxt.pos,cameraMarkTxt.txt)
//                }
//            }
//        }
//    }

    fun addRecordTimerListener(m:RecordElapsedTimeListener){
        setsTimeListeners.add(m)
    }
    fun removeRecordTimerListener(m:RecordElapsedTimeListener){
        setsTimeListeners.add(m)
    }
    private fun startRecordTimer() {
        GlobalScope.launch(Dispatchers.Main) {
            recordTimer = object : CountDownTimer(Long.MAX_VALUE, 1000) {
                override fun onTick(millisUntilFinished: Long) {
                    timeElapsedInSeconds++
                    updateTimerText()
                }

                override fun onFinish() {
                    // Do nothing
                }
            }.start()
            setsTimeListeners.forEach { listener ->
                // 在这里处理每个 listener
                listener.onStart()
            }
        }

    }
    fun stopRecordTimer() {
        GlobalScope.launch(Dispatchers.Main) {
            timeElapsedInSeconds = 0;
            setsTimeListeners.forEach { listener ->
                // 在这里处理每个 listener
                listener.onStop()
            }
            recordTimer?.cancel()
        }
    }

    private fun updateTimerText() {
        val seconds = timeElapsedInSeconds % 60
        val minutes = (timeElapsedInSeconds / 60) % 60
        val hours = (timeElapsedInSeconds / 3600)
        val timeString = String.format(Locale.getDefault(), "%02d:%02d:%02d", hours, minutes, seconds)
        setsTimeListeners.forEach { listener ->
            // 在这里处理每个 listener
            listener.onTime(timeString)
        }
    }
    private fun startSecondTimer() {
        timerForSecond = object : CountDownTimer(Long.MAX_VALUE, 1000) {
            override fun onTick(millisUntilFinished: Long) {
                val time  = SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(Date())
//                for ((index, s) in markTxts.withIndex()) {
//                    markTxts.set(index,time.toString())
//                }
                icpInterface?.markWaterTypeHashMap?.set(MarkWaterType.TIME,time.toString())
            }

            override fun onFinish() {
                // Do nothing
            }
        }.start()
    }
    private fun stopSecondTimer() {
        timerForSecond?.cancel()
    }

    fun setSystemNavBarViewModel(systemNavBarViewModel: SystemNavBarViewModel){
        mSystemNavBarViewModel = systemNavBarViewModel
    }
    private fun startLocation() {
        RouterUtil.getDBLocationImpl()?.startLocation()
        RouterUtil.getDBLocationImpl()?.addLocationListener(mBDLocationListener)
    }
    private fun stopLocation() {
        RouterUtil.getDBLocationImpl()?.removeLocationListener(mBDLocationListener)
        RouterUtil.getDBLocationImpl()?.stopLocation()
    }

    class MBDLocationListener : BDLocationListener {
        override fun onLocationChange(location: YLLocation) {
            Log.e(TAG, "onLocationChange: Lat:${mYLLocation.latitude}  Lon:${mYLLocation.longitude}" )
            mYLLocation = location
            icpInterface?.markWaterTypeHashMap?.set(MarkWaterType.LOCATION,"Lat:${mYLLocation.latitude}  Lon:${mYLLocation.longitude}")
        }
    }
    class KeyEventBroadCastRevicer : BroadcastReceiver() {
        override fun onReceive(context: Context, intent: Intent) {
            val keycode = intent.getIntExtra("keycode", 0)
            val type = intent.getIntExtra("type", 0)
            val action = intent.action ?: return // 获取按键事件的 action
            Log.e(TAG, "onReceive:  action:$action  type:$type  keycode:$keycode" )
            // 获取当前时间
            val currentTime = System.currentTimeMillis()
            // 检查是否需要防抖
            val lastClickTime = lastClickTimeMap[action+keycode+type] ?: 0
            if (currentTime - lastClickTime < debounceTime) {
                return // 如果时间间隔小于防抖时间，直接返回
            }
            lastClickTimeMap[action+keycode+type] = currentTime // 更新当前按键事件的上次处理时间

            if (BaseApp.ylBean.keyEvents.size > 0) {
                for (keyEvent in BaseApp.ylBean.keyEvents) {
                    if (action == keyEvent.action && keyEvent.keyCode == keycode && keyEvent.keyEventAction == type) {
                        when(keyEvent.todo){
                            "recordVideo" ->{
                                if (!isRecord()) {
                                    startRecord {}
                                }else{
                                    stopRecord()
                                }
                            }
                            "capture" ->{
                                capture()
                            }
                        }
                    }
                }
            }
        }
    }

    fun toScanMedia(path:String?){
        if (path == null) return
        val file = File(path)
        if (!file.exists()) return

        val mimeType = when (file.extension.toLowerCase(Locale.ROOT)) {
            "mp4" -> "video/mp4"
            "jpg", "jpeg" -> "image/jpeg"
            "png" -> "image/png"
            else -> return // 不支持的文件类型
        }

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
            // Android 10及以上使用MediaStore API
            BaseApp.getApplication()?.let { ctx ->
                val values = ContentValues().apply {
                    put(MediaStore.MediaColumns.DISPLAY_NAME, file.name)
                    put(MediaStore.MediaColumns.MIME_TYPE, mimeType)
                    if (mimeType.startsWith("video/")) {
                        put(MediaStore.MediaColumns.RELATIVE_PATH, Environment.DIRECTORY_MOVIES)
                    } else if (mimeType.startsWith("image/")) {
                        put(MediaStore.MediaColumns.RELATIVE_PATH, Environment.DIRECTORY_PICTURES)
                    }
                    put(MediaStore.MediaColumns.IS_PENDING, 1)
                }

                val resolver = ctx.contentResolver
                val collection = if (mimeType.startsWith("video/")) {
                    MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY)
                } else {
                    MediaStore.Images.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY)
                }
                val uri = resolver.insert(collection, values)

                uri?.let {
                    resolver.openOutputStream(it)?.use { os ->
                        file.inputStream().use { input ->
                            input.copyTo(os)
                        }
                    }

                    values.clear()
                    values.put(MediaStore.MediaColumns.IS_PENDING, 0)
                    resolver.update(it, values, null, null)
                }
            }
        } else {
            // Android 9及以下使用广播方式，确保文件在公共目录下才能被扫描到
            // 检查文件是否在公共目录下
            val intent = Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE)
            val uri = Uri.fromFile(file)
            intent.setData(uri)
            BaseApp.getApplication()?.sendBroadcast(intent)
        }
    }
}