package com.shenhangye.pushlive.render

import android.content.Context
import android.graphics.SurfaceTexture
import android.opengl.EGL14
import android.opengl.GLSurfaceView
import android.os.Environment
import android.util.Log
import android.view.ViewGroup
import com.shenhangye.pushlive.camera.Camera2FrameCallback
import com.shenhangye.pushlive.camera.Camera2Wrapper
import com.shenhangye.pushlive.camera.CameraUtil.getFitInScreenSize
import com.shenhangye.pushlive.codec.MediaRecorder
import com.shenhangye.pushlive.filter.CameraFboFilter
import com.shenhangye.pushlive.filter.ScreenFilter
import com.shenhangye.pushlive.opengl.openglutil
import com.shenhangye.pushlive.opengl.openglutil.checkGlError
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import java.io.File
import javax.microedition.khronos.egl.EGLConfig
import javax.microedition.khronos.opengles.GL10

class CameraRender1(var context:Context, var mGLSurfaceView:GLSurfaceView):GLSurfaceView.Renderer {


    var mCamera2Wrapper:Camera2Wrapper?=null
    var mCameraFilter: CameraFboFilter?=null

    var mScreenFilter:ScreenFilter?=null
    var oes=IntArray(1)

    var mMediaRecorder: MediaRecorder? = null


    fun onstop(){
        mMediaRecorder?.stop()
    }
    fun startRecord(){

        mMediaRecorder?.start()
    }
    private  val TAG = "CameraRender"
    var mSurfaceTexture:SurfaceTexture?=null
    override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {

        mCamera2Wrapper= Camera2Wrapper(context,object : Camera2FrameCallback{
            override fun onPreviewFrame(data: ByteArray?, width: Int, height: Int) {

            }

            override fun onCaptureFrame(data: ByteArray?, width: Int, height: Int) {

            }

        })
        GlobalScope.launch (Dispatchers.Main){
        val displayMetrics =context.resources.displayMetrics;
        val widthScreen = displayMetrics.widthPixels;
        val heightScreen = displayMetrics.heightPixels;
        mCamera2Wrapper?.previewSize?.let {
            Log.e(TAG,"previewSize"+it.width+"---"+it.height)
            getFitInScreenSize(it.width,it.height,widthScreen,heightScreen).apply {
                Log.e(TAG, "$width--surfaceCreated-$height")
//                setAspectRatio(width,height)
                val lp: ViewGroup.LayoutParams = mGLSurfaceView.layoutParams
                lp.width = width // required width
                lp.height = height // required height
                mGLSurfaceView.setLayoutParams(lp)
            }
        }
        }


        mCameraFilter= CameraFboFilter(context)
        mScreenFilter=ScreenFilter(context)

        openglutil.createOes(oes)
        mSurfaceTexture =SurfaceTexture(oes[0])
        Log.e(TAG,oes[0].toString()+"oes");
        mSurfaceTexture?.setOnFrameAvailableListener {
            mGLSurfaceView.requestRender()
        }

        mCamera2Wrapper?.startCamera(mSurfaceTexture)
        context.getExternalFilesDir(null)
        //当前线程

        //当前线程
        val path = File(Environment.getExternalStorageDirectory(), "inp222ut.mp4").absolutePath

//        val path = File("/storage/emulated/0/Download/tt.mp4").absolutePath
        val eglContext = EGL14.eglGetCurrentContext()
        mMediaRecorder = MediaRecorder(context, path, 480, 640, eglContext)

    }

    override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
         mCameraFilter?.setSize(width,height)
        mScreenFilter?.setSize(width,height)
    }
    var mtx= FloatArray(16)
    override fun onDrawFrame(gl: GL10?) {
        checkGlError("glBindTextu222223222re23333232323232")
        mSurfaceTexture?.updateTexImage()
        mSurfaceTexture?.getTransformMatrix(mtx)
        mCameraFilter?.setMatrix(mtx)
        //        纹理ID  摄像头的数据
        val id: Int? = mCameraFilter?.onDraw(oes[0])
        id?.let { mScreenFilter?.onDraw(it) }


        id?.let { mSurfaceTexture?.timestamp?.let { it1 -> mMediaRecorder?.encodeFrame(it, it1) } }

    }
}