package com.yunquan.ohana.ui.editPhoto.video

import android.annotation.SuppressLint
import android.graphics.Bitmap
import android.media.MediaMetadataRetriever
import android.os.Bundle
import android.os.Handler
import android.os.Looper
import android.os.Message
import android.text.Editable
import android.text.TextWatcher
import android.view.View
import com.google.gson.Gson
import com.google.gson.reflect.TypeToken
import com.shuyu.gsyvideoplayer.GSYVideoManager
import com.shuyu.gsyvideoplayer.builder.GSYVideoOptionBuilder
import com.yunquan.ohana.BR
import com.yunquan.ohana.R
import com.yunquan.ohana.base.bus.RxBus
import com.yunquan.ohana.base.ui.BaseActivity
import com.yunquan.ohana.databinding.ActivityCropVideoBinding
import com.yunquan.ohana.entity.EventEntity
import com.yunquan.ohana.entity.EventType
import com.yunquan.ohana.entity.FramesEntity
import com.yunquan.ohana.entity.PhotoResultEntity
import com.yunquan.ohana.entity.UploadRequestEntity
import com.yunquan.ohana.ui.photos.PhotosActivity
import com.yunquan.ohana.utils.DateUtils
import com.yunquan.ohana.utils.FileUtils
import com.yunquan.ohana.utils.L
import com.yunquan.ohana.utils.NetworkUtils
import com.yunquan.ohana.utils.ToastUtils
import com.yunquan.ohana.utils.Utils
import io.microshow.rxffmpeg.RxFFmpegCommandList
import io.microshow.rxffmpeg.RxFFmpegInvoke
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.async
import kotlinx.coroutines.awaitAll
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import java.io.File
import java.util.concurrent.TimeUnit

/**
 * 视频剪辑
 */
class CropVideoActivity : BaseActivity<ActivityCropVideoBinding, CropVideoViewModel>() {

    companion object {
        private const val FRAME_COUNT = 10
        private const val MAX_TIME = 60 * 1000L
    }

    private var mBitmaps = mutableListOf<Bitmap>()
    private var mData: PhotoResultEntity? = null
    private val mFrames = mutableListOf<FramesEntity>()
    private var mStartTime: Long = 0
    private var mEndTime: Long = 60

    private val mHandler = object : Handler(Looper.getMainLooper()) {
        override fun handleMessage(msg: Message) {
            super.handleMessage(msg)
            when (msg.what) {
                0 -> {
                    dismissLoading()
                    setResultAndFinish()
                }
            }
        }
    }

    override fun initContentView(savedInstanceState: Bundle?): Int {
        return R.layout.activity_crop_video
    }

    override fun initVariableId(): Int {
        return BR.cropVideoViewModel
    }

    override fun initParam() {
        super.initParam()
        window.statusBarColor = resources.getColor(R.color.white)
        window.decorView.systemUiVisibility = View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR
    }

    override fun onResume() {
        super.onResume()
        GSYVideoManager.onResume(false)
    }

    override fun onPause() {
        super.onPause()
        GSYVideoManager.onPause()
    }

    override fun onDestroy() {
        mData = null
        mBitmaps.clear()
        GSYVideoManager.releaseAllVideos()
        super.onDestroy()
    }

    @SuppressLint("SetTextI18n")
    override fun initData(savedInstanceState: Bundle?) {
        super.initData(savedInstanceState)
        initVideoView()
        val bundle = intent.extras
        bundle?.let { b ->
            val photosJson = b.getString("photos")
            val framesJson = b.getString("frames")

            val photosType = object : TypeToken<List<PhotoResultEntity>>() {}.type
            val photos: List<PhotoResultEntity> = Gson().fromJson(photosJson, photosType)
            mData = photos.first()

            val framesType = object : TypeToken<List<FramesEntity>>() {}.type
            val frames: List<FramesEntity> = Gson().fromJson(framesJson, framesType)
            mFrames.addAll(frames)
        }
        if (mData != null) {
            updateTime(0f, 1f)
            val m = 3f / (mData!!.duration!! / 1000) * Utils.instance.getScreenWidth()
            binding!!.viewOverlay.setThumbMargin(m)
            showLoading()
            extractFrames()
        }

        binding!!.tvCount.text = "0/100"
    }

    override fun initViewObservable() {
        super.initViewObservable()
        binding!!.etMessage.addTextChangedListener(object : TextWatcher {
            override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {}

            @SuppressLint("SetTextI18n")
            override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {
                binding!!.tvCount.text = "${s?.length ?: 0}/100"
            }

            override fun afterTextChanged(s: Editable?) {}
        })
        binding!!.viewOverlay.setOnCropRangeChangedListener { start, end ->
            updateTime(start, end)
        }
        binding!!.btnSend.setOnClickListener {
            if (!NetworkUtils.isNetworkAvailable()) {
                ToastUtils.showShort(getString(R.string.not_network_available))
                return@setOnClickListener
            }

            if (mData == null) {
                ToastUtils.showShort(getString(R.string.no_content_sent))
                return@setOnClickListener
            }
            binding!!.videoContent.release()
            showLoading()
            clipVideo()
        }
    }

    private fun initVideoView() {
        binding!!.videoContent.titleTextView.visibility = View.GONE
        binding!!.videoContent.backButton.visibility = View.GONE
        binding!!.videoContent.fullscreenButton.visibility = View.INVISIBLE
    }

    private fun initVideoListener() {
        val builder = GSYVideoOptionBuilder()
        builder.setUrl(mData!!.path)
            .setCacheWithPlay(false)
            .setLooping(false)
            .setAutoFullWithSize(false)
            .setShowFullAnimation(true)
            .setReleaseWhenLossAudio(false)
            .setThumbPlay(true)
            .setNeedLockFull(true)
            .setIsTouchWigetFull(true)
            .setIsTouchWiget(true)
            .setHideKey(false)
            .build(binding!!.videoContent)
        binding!!.videoContent.setGSYVideoProgressListener { progress, secProgress, currentPosition, duration ->
            if (mEndTime != -1L && currentPosition >= mEndTime) {
                try {
                    GSYVideoManager.instance().seekTo(mStartTime)
                } catch (e: Exception) {
                    e.printStackTrace()
                }
            }
        }
        binding!!.videoContent.startPlayLogic()
    }

    @SuppressLint("DefaultLocale", "SetTextI18n")
    private fun updateTime(start: Float, end: Float) {
        if (mData!!.duration!! <= 0) {
            val duration = getVideoDurationUsingFFmpeg(mData!!.path!!)
            mData!!.duration = duration
        }
        mStartTime = (mData!!.duration!! * start).toLong()
        mEndTime = (mData!!.duration!! * end).toLong()
        if (mEndTime > MAX_TIME) {
            mEndTime = MAX_TIME
        }
        val startMinutes = TimeUnit.MILLISECONDS.toMinutes(mStartTime) % 60
        val startSeconds = TimeUnit.MILLISECONDS.toSeconds(mStartTime) % 60
        val endMinutes = TimeUnit.MILLISECONDS.toMinutes(mEndTime) % 60
        val endSeconds = TimeUnit.MILLISECONDS.toSeconds(mEndTime) % 60
        binding!!.tvTimeStart.text = String.format("%02d:%02d", startMinutes, startSeconds)
        binding!!.tvTimeEnd.text = String.format("%02d:%02d", endMinutes, endSeconds)
        binding!!.videoContent.seekTo(mStartTime)
    }

    /**
     * 获取视频帧列表
     */
    private fun extractFrames() {
        if (mData == null) {
            dismissLoading()
            return
        }
        mBitmaps.clear()

        val retriever = MediaMetadataRetriever()
        retriever.setDataSource(mData!!.path)

        val duration = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)?.toLong() ?: 0L

        val interval = duration / FRAME_COUNT

        GlobalScope.launch(Dispatchers.IO) {
            try {
                val deferredFrames = (0 until FRAME_COUNT).map { i ->
                    async {
                        val frameTime = i * interval * 1000
                        retriever.getFrameAtTime(frameTime, MediaMetadataRetriever.OPTION_CLOSEST)
                    }
                }
                deferredFrames.awaitAll().forEach { bitmap ->
                    bitmap?.let { mBitmaps.add(it) }
                }
            } catch (e: Exception) {
                e.printStackTrace()
            } finally {
                retriever.release()
                withContext(Dispatchers.Main) {
                    dismissLoading()
                    if (mData != null) {
                        binding!!.viewOverlay.setVideoFrames(mBitmaps)
                        initVideoListener()
                    }
                }
            }
        }
    }

    private fun setResultAndFinish() {
        mData!!.content = binding!!.etMessage.text.toString()
        val bean = UploadRequestEntity(
            type = 2,
            photos = listOf(mData!!),
            frames = mFrames,
        )
         RxBus.getDefault().post(EventEntity(EventType.SEND_TO_FRAMES, bean))
        setResult(PhotosActivity.EDIT_RESULT_CODE)
        finish()
    }

    /**
     * 裁剪视频
     */
    private fun clipVideo() {
        if (mData == null) return
        val outputDirectory = FileUtils.getFilePath()
        val outputCompressed = File(outputDirectory, "COMPRESSED_${System.currentTimeMillis()}.mp4")
        val compressedPath = outputCompressed.absolutePath
        val start = DateUtils.toTime(mStartTime, "HH:mm:ss")
        val end = DateUtils.toTime(mEndTime, "HH:mm:ss")
        L.i("裁剪：$start - $end 的视频")
        trimVideo(mData!!.path!!, compressedPath, start, end,
            onSuccess = {
                mHandler.postDelayed({
                    mData!!.cropPath = compressedPath
                    mHandler.sendEmptyMessage(0)
                    L.i("FFmpeg 压缩完成")
                }, 1000)
            },
            onFailure = {})
    }

    private fun trimVideo(
        inputPath: String,
        outputPath: String,
        startTime: String,
        endTime: String,
        onSuccess: () -> Unit,
        onFailure: (String) -> Unit
    ) {
        val cmdList = RxFFmpegCommandList()
        cmdList.append("-y")
        cmdList.append("-ss").append(startTime)
        cmdList.append("-to").append(endTime)
        cmdList.append("-i").append(inputPath)
//        cmdList.append("-vf")
//        if (mData!!.width!! > mData!!.height!!) {
//            cmdList.append("scale=1280:800")
//        } else {
//            cmdList.append("scale=800:1280")
//        }
        cmdList.append("-c:v").append("libx264")
        cmdList.append("-b:v").append("1000k")
        cmdList.append("-preset").append("fast")
        cmdList.append("-crf").append("28")
        cmdList.append("-c:a").append("aac")
        cmdList.append("-b:a").append("128k")
        cmdList.append(outputPath)
        val cmd = cmdList.build()
        RxFFmpegInvoke.getInstance().runCommandAsync(cmd, object : RxFFmpegInvoke.IFFmpegListener {
            override fun onFinish() {
                L.d("FFmpeg 裁剪完成")
                onSuccess()
            }

            override fun onProgress(progress: Int, progressTime: Long) {
                L.d("FFmpeg 裁剪进度: $progress%")
            }

            override fun onCancel() {
                L.d("FFmpeg 取消裁剪")
            }

            override fun onError(message: String) {
                L.d("FFmpeg 裁剪失败：$message")
                onFailure("裁剪失败: $message")
            }
        })
    }

    private fun getVideoDurationUsingFFmpeg(videoPath: String): Long {
        val command = arrayOf("ffprobe", "-i", videoPath, "-show_entries", "format=duration", "-v", "quiet", "-of", "csv=p=0")
        val process = Runtime.getRuntime().exec(command)
        val result = process.inputStream.bufferedReader().use { it.readText() }
        return TimeUnit.SECONDS.toMillis(result.trim().toDouble().toLong())
    }
}