package com.jacky.demo.func;

import android.app.ProgressDialog;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.Toast;

import com.jacky.demo.BaseActivity;
import com.jacky.demo.R;
import com.jacky.log.Logger;
import com.tencent.liteav.basic.log.TXCLog;
import com.tencent.rtmp.ITXLivePushListener;
import com.tencent.rtmp.TXLiveConstants;
import com.tencent.rtmp.TXLivePushConfig;
import com.tencent.rtmp.TXLivePusher;
import com.tencent.rtmp.ui.TXCloudVideoView;

import org.json.JSONException;
import org.json.JSONObject;

import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.concurrent.TimeUnit;

import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;

/**
 * Created by lixinquan on 2019/8/1.
 */
public class TxWaterMaskActivity extends BaseActivity implements ITXLivePushListener, View.OnClickListener {

    private TXLivePushConfig mLivePushConfig;
    private TXLivePusher mLivePusher;

    ImageView mImageView;
    TXCloudVideoView mVideoView;
    String playerUrl;
    VideoProcessListener mListener;
    private boolean isCameraFont = false;

    @Override
    protected int getContentView() {
        return R.layout.tx_watermask_activity;
    }

    @Override
    protected void initCreate() {
        getSupportActionBar().setTitle("直播推流");
        getSupportActionBar().setDisplayHomeAsUpEnabled(true);

        mImageView = findViewById(R.id.logo);
        mVideoView = findViewById(R.id.video_view);
        findViewById(R.id.btn_start).setOnClickListener(this);
        findViewById(R.id.btn_new).setOnClickListener(this);
        findViewById(R.id.btn_switch).setOnClickListener(this);
        initLivePusher();
    }

    @Override
    public void onPushEvent(int i, Bundle bundle) {
        String msg = bundle.getString(TXLiveConstants.EVT_DESCRIPTION);
        Log.d( "receive event: ", + i + ", " + msg);
    }

    @Override
    public void onNetStatus(Bundle bundle) {
        String str = getNetStatusString(bundle);
        mLivePusher.onLogRecord("[net state]:\n" + str + "\n");
    }
    //公用打印辅助函数
    protected String getNetStatusString(Bundle status) {
        return String.format("%-14s %-14s %-12s\n%-8s %-8s %-8s %-8s\n%-14s %-14s %-12s\n%-14s %-14s",
                "CPU:" + status.getString(TXLiveConstants.NET_STATUS_CPU_USAGE),
                "RES:" + status.getInt(TXLiveConstants.NET_STATUS_VIDEO_WIDTH) + "*" + status.getInt(TXLiveConstants.NET_STATUS_VIDEO_HEIGHT),
                "SPD:" + status.getInt(TXLiveConstants.NET_STATUS_NET_SPEED) + "Kbps",
                "JIT:" + status.getInt(TXLiveConstants.NET_STATUS_NET_JITTER),
                "FPS:" + status.getInt(TXLiveConstants.NET_STATUS_VIDEO_FPS),
                "GOP:" + status.getInt(TXLiveConstants.NET_STATUS_VIDEO_GOP) + "s",
                "ARA:" + status.getInt(TXLiveConstants.NET_STATUS_AUDIO_BITRATE) + "Kbps",
                "QUE:" + status.getInt(TXLiveConstants.NET_STATUS_CODEC_CACHE) + "|" + status.getInt(TXLiveConstants.NET_STATUS_CACHE_SIZE),
                "DRP:" + status.getInt(TXLiveConstants.NET_STATUS_CODEC_DROP_CNT) + "|" + status.getInt(TXLiveConstants.NET_STATUS_DROP_SIZE),
                "VRA:" + status.getInt(TXLiveConstants.NET_STATUS_VIDEO_BITRATE) + "Kbps",
                "SVR:" + status.getString(TXLiveConstants.NET_STATUS_SERVER_IP),
                "AUDIO:" + status.getString(TXLiveConstants.NET_STATUS_AUDIO_INFO));
    }


    private void initLivePusher() {
        TXCLog.setLevel(TXCLog.LOG_ERROR);

        mLivePusher = new TXLivePusher(this);
        mLivePusher.setPushListener(this);

        mLivePushConfig = new TXLivePushConfig();
        mLivePushConfig.setHardwareAcceleration(TXLiveConstants.ENCODE_VIDEO_HARDWARE);
        Bitmap bm = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_launcher);
//        mLivePushConfig.setWatermark(bm , 10, 10);
//        mImageView.setImageBitmap(bm);

        mLivePushConfig.setFrontCamera(isCameraFont);
        mLivePusher.setConfig(mLivePushConfig);
        mLivePusher.startCameraPreview(mVideoView);
        mLivePusher.setVideoProcessListener(mListener = new VideoProcessListener());
    }

    @Override
    public void onClick(View view) {
        switch (view.getId()) {
            case R.id.btn_new : getRTMPPusherFromServer(); break;
            case R.id.btn_start :
                startLivePush("", 0);break;
            case R.id.btn_switch :
                mLivePusher.switchCamera();
                isCameraFont = !isCameraFont;
                mLivePusher.setMirror(isCameraFont);//这句话是个关键
                break;
        }
    }

    private void startLivePush(String url, int maxTime) {
        String rtmpUrl = playerUrl;
        mLivePusher.startPusher(rtmpUrl.trim());
    }

    @Override
    public void onPause() {
        super.onPause();
        if (mVideoView != null) {
            mVideoView.onPause();
        }
        if (mLivePusher != null) {
            mLivePusher.pausePusher();
        }
    }

    @Override
    public void onResume() {
        super.onResume();
        if (mVideoView != null) {
            mVideoView.onResume();
        }
        if (mLivePusher != null) {
            mLivePusher.resumePusher();
        }
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        mLivePusher.setPushListener(null);
        mLivePusher.stopPusher();
        mLivePusher.stopCameraPreview(true);
    }

    ProgressDialog mFetchProgressDialog;
    OkHttpClient mOkHttpClient;

    public  void getRTMPPusherFromServer(){
        if (mFetchProgressDialog == null) {
            mFetchProgressDialog = new ProgressDialog(this);
            mFetchProgressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);// 设置进度条的形式为圆形转动的进度条
            mFetchProgressDialog.setCancelable(false);// 设置是否可以通过点击Back键取消
            mFetchProgressDialog.setCanceledOnTouchOutside(false);// 设置在点击Dialog外是否取消Dialog进度条
        }
        mFetchProgressDialog.show();

        if (mOkHttpClient == null) {
            mOkHttpClient = new OkHttpClient().newBuilder()
                    .connectTimeout(10, TimeUnit.SECONDS)
                    .readTimeout(10, TimeUnit.SECONDS)
                    .writeTimeout(10, TimeUnit.SECONDS)
                    .build();
        }
        String reqUrl = "https://lvb.qcloud.com/weapp/utils/get_test_pushurl";
        Request request = new Request.Builder()
                .url(reqUrl)
                .addHeader("Content-Type", "application/json; charset=utf-8")
                .build();
        Log.d("demo", "start fetch push url");
        mOkHttpClient.newCall(request).enqueue(new okhttp3.Callback() {
            @Override
            public void onFailure(okhttp3.Call call, IOException e) {
                mFetchProgressDialog.dismiss();
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        Toast.makeText(TxWaterMaskActivity.this, "获取推流地址失败", Toast.LENGTH_SHORT).show();
                    }
                });
            }

            @Override
            public void onResponse(okhttp3.Call call, Response response) throws IOException {
                mFetchProgressDialog.dismiss();
                if (response.isSuccessful()) {
                    JSONObject jsonRsp = null;
                    try {
                        jsonRsp = new JSONObject(response.body().string());
                        final String rtmpPushUrl = jsonRsp.optString("url_push");            // RTMP 推流地址
                        final String rtmpPlayUrl = jsonRsp.optString("url_play_rtmp");   // RTMP 播放地址
                        final String flvPlayUrl = jsonRsp.optString("url_play_flv");     // FLA  播放地址
                        final String hlsPlayUrl = jsonRsp.optString("url_play_hls");     // HLS  播放地址
                        final String realtimePlayUrl = jsonRsp.optString("url_play_acc");// RTMP 加速流地址

                        playerUrl = rtmpPushUrl;
                        Log.e("demo url", rtmpPlayUrl);
                    } catch (JSONException e) {
                        e.printStackTrace();
                    }
                }
            }
        });
    }

    public static class VideoProcessListener implements TXLivePusher.VideoCustomProcessListener {

        private static final int BITMAP_WIDTH = 416;
        private static final int BITMAP_HEIGHT = 64;

        private long serverTime, mobileTime;
        private int mProgramWaterMask;
        int[] waterTexture = new int[1];
        private int[] fFrame;

        Paint paint = new Paint();
        Bitmap bm;

        private FloatBuffer waterVertexBuffer, waterTextureBuffer;

        public VideoProcessListener() {
            paint.setColor(0xffffffff);
            paint.setTextSize(30);
            serverTime = mobileTime = System.currentTimeMillis();
        }

        public void syncTime(long serverTime) {
            this.serverTime = serverTime;
            this.mobileTime = System.currentTimeMillis();
        }

        @Override
        public int onTextureCustomProcess(int textureId, int width, int height) {
            bindFrameTexture(width, height);
            Bitmap bm = buildWatermask();
            if(serverTime > 0) storeImage(bm, textureId, width, height);
            return textureId;
        }

        @Override
        public void onDetectFacePoints(float[] floats) {}

        @Override
        public void onTextureDestoryed() {
            Logger.i("texture destory...");
            GLES20.glDeleteTextures(1, waterTexture, 0);
            if(fFrame != null) GLES20.glDeleteFramebuffers(1, fFrame, 0);
            GLES20.glDeleteProgram(mProgramWaterMask);
            fFrame = null;
            if(bm != null) bm.recycle();
        }

        private void bindFrameTexture(int width, int height) {
            if(fFrame != null) return;
            Log.d("demo", "init texture");

            bm = Bitmap.createBitmap(BITMAP_WIDTH, BITMAP_HEIGHT, Bitmap.Config.ARGB_8888);
            fFrame = new int[1];
            waterTextureBuffer = allocateFloatBuffer(waterMaskTextureData);
            waterVertexBuffer = allocateFloatBuffer(waterMaskVertexData2);
            mProgramWaterMask = createProgram(waterVertexShader, waterFragmentShader);

            GLES20.glDeleteFramebuffers(1, fFrame, 0);
            GLES20.glDeleteTextures(1, waterTexture, 0);

            GLES20.glGenTextures(1, waterTexture, 0);
            /**创建一个帧染缓冲区对象*/
            GLES20.glGenFramebuffers(1,fFrame,0);
        }

        private void storeImage(Bitmap bitmap,int textureId, int screenWidth, int screenHeight) {
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fFrame[0]);
            GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);

            int left = (screenWidth - BITMAP_WIDTH) / 2;
            //绘制水印
            GLES20.glEnable(GLES20.GL_BLEND);
            GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
            GLES20.glViewport(left, (int) (screenHeight * 0.1f), BITMAP_WIDTH, BITMAP_HEIGHT);

            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, waterTexture[0]);
            glTexParameter(false);
            GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);

            drawGLTexture(mProgramWaterMask, waterTexture[0], waterTextureBuffer, waterVertexBuffer);
            GLES20.glDisable(GLES20.GL_BLEND);

            if(GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
                Logger.w("ERROR::FRAMEBUFFER:: Framebuffer is not complete!");
            }
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,0);
        }

        private void drawGLTexture(int p, int texture, Buffer texureBuffer, Buffer vertextBuffer) {
//        Logger.v("draw waterTexture" , texture , p);
            GLES20.glUseProgram(p);

            int mPositionHandle = GLES20.glGetAttribLocation(p, "af_Position");
            GLES20.glEnableVertexAttribArray(mPositionHandle);
            GLES20.glVertexAttribPointer(mPositionHandle, 2, GLES20.GL_FLOAT, false, 8, texureBuffer);
            //纹理坐标
            int mTextureCoordHandle = GLES20.glGetAttribLocation(p, "av_Position");
            GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
            GLES20.glVertexAttribPointer(mTextureCoordHandle, 2, GLES20.GL_FLOAT, false, 8, vertextBuffer);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
            GLES20.glDisableVertexAttribArray(mPositionHandle);
            GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
//        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
        }

        private Bitmap buildWatermask() {
            bm.eraseColor(0x00000000);
            Canvas canvas = new Canvas(bm);

            long ntime = System.currentTimeMillis() - mobileTime + serverTime;
            SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.CHINA);
            String time = format.format(new Date(ntime));
            String ts = String.format("北京时间 %s", time);
            canvas.drawText(ts, 0, 40, paint);
            return bm;
        }


        static final String waterVertexShader =
                "attribute vec4 av_Position;" +
                        "attribute vec2 af_Position;" +
                        "varying vec2 v_texPo;" +
                        "void main() {" +
                        "    v_texPo = af_Position;" +
                        "    gl_Position = av_Position;" +
                        "}";

        static final String waterFragmentShader =
                "precision mediump float;" +
                        "varying vec2 v_texPo;" +
                        "uniform sampler2D sTexture;" +
                        "void main() {" +
                        "    gl_FragColor=texture2D(sTexture, v_texPo);" +
                        "}";

        public static int loadShader(int type, String shaderCode) {
            // 创造顶点着色器类型(GLES20.GL_VERTEX_SHADER)
            // 或者是片段着色器类型 (GLES20.GL_FRAGMENT_SHADER)
            int shader = GLES20.glCreateShader(type);
            // 添加上面编写的着色器代码并编译它
            GLES20.glShaderSource(shader, shaderCode);
            GLES20.glCompileShader(shader);
            return shader;
        }

        public static int createProgram(String verTextShader, String fragmentShader) {
            int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, verTextShader);
            int fragShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
            //创建一个空的OpenGLES程序
            int program = GLES20.glCreateProgram();
            //将顶点着色器加入到程序
            GLES20.glAttachShader(program, vertexShader);
            //将片元着色器加入到程序中
            GLES20.glAttachShader(program, fragShader);
            //连接到着色器程序
            GLES20.glLinkProgram(program);
            return program;
        }

        public static FloatBuffer allocateFloatBuffer(float[] vertices) {
            return allocateFloatBuffer(null, vertices);
        }

        public static FloatBuffer allocateFloatBuffer(FloatBuffer vertexBuffer, float[] vertices) {
            if(vertexBuffer == null) {
                ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
                byteBuffer.order(ByteOrder.nativeOrder());
                vertexBuffer = byteBuffer.asFloatBuffer();
            } else {
                vertexBuffer.clear();
            }
            vertexBuffer.put(vertices);
            vertexBuffer.position(0);
            return vertexBuffer;
        }
        static void glTexParameter(boolean isOes) {
            int target = isOes ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D;
            //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
            GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
            //设置放大过滤为使用纹理中坐标最接近的若干个颜色，通过加权平均算法得到需要绘制的像素颜色
            GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
            //设置环绕方向S，截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
            GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
            //设置环绕方向T，截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
            GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        }
        static float waterMaskVertexData[] = {
                1f, -1f, // bottom right
                -1f, -1f, // bottom left
                1f, 1f,  // top right
                -1f, 1f, // top left
        };
        static float waterMaskVertexData2[] = {
                -1f, -1f, // bottom left
                1f, -1f, // bottom right
                -1f, 1f, // top left
                1f, 1f,  // top right
        };
        //纹理坐标  对应顶点坐标  与之映射
        static float waterMaskTextureData[] = {
                0f, 0f, // top left
                1f, 0f,  // top right
                0f, 1f, // bottom left
                1f, 1f, // bottom right
        };

    }
}
