package com.paperless.sdk;

import android.media.MediaCodec;
import android.os.Build;

import com.blankj.utilcode.util.LogUtils;
import com.mogujie.tt.protobuf.InterfaceMacro;
import com.paperless.model.CallBusType;
import com.paperless.model.CallValue;
import com.paperless.model.EventMessage;
import com.paperless.model.FrameData;

import org.greenrobot.eventbus.EventBus;

import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;

/**
 * author : Administrator
 * date : 2022/4/25 16:02
 * description : 方法名修改后在对应cpp文件中也要修改 jni_wallet_netinit.cpp
 */
public class Call {

    static {
        LogUtils.i("当前是 "+Build.CPU_ABI+" 架构");
        System.loadLibrary("avcodec-57");
        System.loadLibrary("avdevice-57");
        System.loadLibrary("avfilter-6");
        System.loadLibrary("avformat-57");
        System.loadLibrary("avutil-55");
        System.loadLibrary("postproc-54");
        System.loadLibrary("swresample-2");
        System.loadLibrary("swscale-4");
//        if (Build.CPU_ABI.equals("armeabi-v7a")) {
            System.loadLibrary("SDL2");
            System.loadLibrary("main");
//        } else {
//            LogUtils.i("当前架构：${Build.CPU_ABI}");
//        }
        System.loadLibrary("NetClient");
        System.loadLibrary("Codec");
        System.loadLibrary("ExecProc");
        System.loadLibrary("Device-OpenSles");
        System.loadLibrary("meetcoreAnd");
        System.loadLibrary("PBmeetcoreAnd");
        System.loadLibrary("meetAnd");
        System.loadLibrary("native-lib");
        System.loadLibrary("z");
    }

    private Call() {
        CallValue.decodeMap.put(CallValue.resource_id_0, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_1, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_2, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_3, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_4, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_5, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_6, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_7, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_8, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_9, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_10, new LinkedBlockingQueue<>(CallValue.CAPACITY));
        CallValue.decodeMap.put(CallValue.resource_id_11, new LinkedBlockingQueue<>(CallValue.CAPACITY));
    }

    private static final class MagHolder {
        static final Call mag = new Call();
    }

    public static Call getInstance() {
        return MagHolder.mag;
    }

    /**
     * 初始化之前调用，切换议题3模式
     *
     * @param enable =1 强制开启议题3
     */
    public native void enable_agendav3(int enable);

    public void InitSetDirectBuf() {
        if (BuildConfig.isUseNewWayPlay) {
            m_dbuf = java.nio.ByteBuffer.allocateDirect(1024 * 500);
            m_dexbuf = java.nio.ByteBuffer.allocateDirect(600);
            SetDirectBuf(m_dbuf, m_dexbuf);
        }
    }

    private java.nio.ByteBuffer m_dbuf;
    private java.nio.ByteBuffer m_dexbuf;

    public native void SetDirectBuf(java.nio.ByteBuffer dbuf, java.nio.ByteBuffer dexbuf);

    //初始化无纸化接口
    //data 参考无纸化接口对照表
    //成功返回0 失败返回-1
    public native int Init_walletSys(byte... data);

    public native int call_checkcache(int type, int id, int cacheflag);

    //无纸化功能接口中调用
    //type 功能类型
    //method 功能类型的方法
    //data  方法需要的参数 参考无纸化接口对照表
    //成功返回对应的数组 失败返回null数组
    public native byte[] call_method(int type, int method, byte[] data);

    public native void enablebackgroud(int type);

    /**
     * 初始化桌面、摄像头采集
     *
     * @param type         流类型
     * @param channelindex 流通道索引值
     * @return 成功返回0 失败返回-1
     */
    public native int InitAndCapture(int type, int channelindex);

    //初始化桌面、摄像头采集
    //type 流类型
    //data 采集数据
    //成功返回0 失败返回-1
    public native int call(int type, int iskeyframe, long pts, byte[] data);

//    public native byte[] NV21ToI420(byte[] data, int w, int h);
//
//    public native byte[] NV21ToNV12(byte[] data, int w, int h);
//
//    public native byte[] YV12ToNV12(byte[] data, int w, int h);

    /**
     * @param mode =0记录方法 =1注册线程 =2重置
     * @param name 方法/线程名称,不能超过128个字符
     */
    public native void crashinit(int mode, String name);

    public native int bytebuffercall(int type, int iskeyframe, long pts, ByteBuffer buffer, int length);

    public native int NV21ToI420(ByteBuffer src, ByteBuffer dst, int w, int h);

    public native int NV21ToNV12(ByteBuffer src, ByteBuffer dst, int w, int h);

    public native int I420ToNV12(ByteBuffer src, ByteBuffer dst, int w, int h);

    /**
     * //rgbmode= 0://(abgr in memory)
     * //rgbmode= 1://(bgra in memory)
     * //rgbmode= 2://(argb in memory)
     * //rgbmode= 3://(rgba in memory)
     * //rgbmode= 4://(bgr in memory)
     * //rgbmode= 5://(rgb in memory)
     */
    public native int RGBToI420(int mode, ByteBuffer src, ByteBuffer dst, int w, int h);

    public native byte[] RGBToNV12BA(int mode, byte[] src, int srcW, int srcH, int dstW, int dstH);

    public native int RGBToNV12(int mode, ByteBuffer src, ByteBuffer dst, int srcW, int srcH, int dstW, int dstH, int rowStride);

    public native int RGBToNV12EX(ByteBuffer src, ByteBuffer dst, int srcW, int srcH, int dstW, int dstH);

    public native int I420Scale(ByteBuffer src, ByteBuffer dst, int srcW, int srcH, int dstW, int dstH);

    public native int ARGBScale(ByteBuffer src, ByteBuffer dst, int srcW, int srcH, int dstW, int dstH);

    public native int ARGBToXRGB(ByteBuffer src, ByteBuffer dst, int mode, int srcW, int srcH);

    public native int ARGBToNV21(ByteBuffer src, ByteBuffer dst, int srcW, int srcH);

    public int callback_startdisplay(int res) {
        return 0;
    }

    public int callback_stopdisplay(int res) {
        return 0;
    }

    /**
     * 初始化一个下载会话
     *
     * @param mediaId 文件媒体id
     * @return 成功返回一个会议索引，失败返回-1
     */
    public native int user_downloadinit(long mediaId);

    /**
     * 下载文件数据
     *
     * @param opindex  会话索引
     * @param dbuf     用来接收数据的ByteBuffer
     * @param readsize 需要的数据大小
     * @return 返回读取的大小
     */
    public native int user_downloadread(int opindex, java.nio.ByteBuffer dbuf, int readsize);

    /**
     * 设置下载的偏移
     *
     * @param opindex 会议索引
     * @param offset  偏移量
     * @return 成功返回0，失败返回-1
     */
    public native int user_downloadseek(int opindex, long offset);

    /**
     * 关闭会话
     *
     * @param opindex 会话索引
     */
    public native void user_downloadclose(int opindex);

    public void error_ret(int type, int method, int ret) {
//        if (ret <= 0) {//失败或者不正常的情况下才进行打印
        LogUtils.e("error_ret \ntype:" + type + "  " + ErrorRet.getTypeMessage(type)
                + "\nmethod:" + method + "  " + ErrorRet.getMethodMessage(method)
                + "\nret:" + ret + " " + ErrorRet.getRetMessage(ret));
//        }
    }

    //JNI获取桌面、摄像头的参数
    //type 流类型
    //oper 参数标识 用于区分获取的数据类型
    //成功返回操作属性对应的值 失败返回-1
    public static int COLOR_FORMAT = 0;

    public int callback(int type, int oper) {
        switch (oper) {
            case 1://pixel format
            {
                if (type == 2) return 1;
                return COLOR_FORMAT;
            }
            case 2://width
            {
                switch (type) {
                    case 2:
                        return CallValue.record_width;
                    case 3:
                        return CallValue.camera_width;
                }

            }
            case 3://height
            {
                switch (type) {
                    case 2:
                        return CallValue.record_height;
                    case 3:
                        return CallValue.camera_height;
                }
            }
            case 4://start capture
            {
                LogUtils.i("通知采集流 type= " + type + ",CallValue.currentFaceStatus=" + CallValue.currentFaceStatus);
                if (CallValue.currentFaceStatus != CallValue.FACE_MEET) {
                    return -1;
                }
                EventBus.getDefault().post(new EventMessage.Builder().type(CallBusType.TYPE_START_CAPTURE_INFORM).objects(type).build());
                return 0;
            }
            case 5://releaseMediaCodec capture
            {
                LogUtils.i("通知停止采集流 type= " + type);
                EventBus.getDefault().post(new EventMessage.Builder().type(CallBusType.TYPE_STOP_CAPTURE_INFORM).objects(type).build());
                return 0;
            }
            default:
                return 0;
        }
    }

    /**
     * 解码后的视频数据,这里对应的是YUV2420每个平面的大小,如{1920, 960, 960}
     *
     * @return
     */
    public int callback_yuvdisplay(int res, int w, int h, byte[] y, byte[] u, byte[] v) {
        if (mYuvDataCallback != null) {
            mYuvDataCallback.onYuvData(res, w, h, y, u, v);
        } else {
            EventBus.getDefault().post(new EventMessage.Builder().type(CallBusType.TYPE_YUV_DISPLAY).objects(res, w, h, y, u, v).build());
        }
        return 0;
    }

    private DecodeDataCallback mDecodeDataCallback;
    private YuvDataCallback mYuvDataCallback;

    public void setYuvDataCallback(YuvDataCallback callback) {
        mYuvDataCallback = callback;
    }

    public void setDecodeDataCallback(DecodeDataCallback callback) {
        mDecodeDataCallback = callback;
    }

    public interface DecodeDataCallback {
        /**
         * 后台返回的解码数据
         *
         * @param isKeyframe 是否是关键帧
         * @param res        播放资源ID {@link  CallValue#resource_id_0}
         * @param codecid    解码器类型 (h264=27,h265=173,mpeg4=12,vp8=139,vp9=167)
         * @param w          视频的宽
         * @param h          视频的高
         * @param packet     解码数据
         * @param pts        解码时间戳 {@link MediaCodec.BufferInfo#presentationTimeUs}
         * @param codecdata  存放的解码数据配置信息
         */
        void onDecodeData(int isKeyframe, int res, int codecid, int w, int h, byte[] packet, long pts, byte[] codecdata);
    }

    public interface YuvDataCallback {
        /**
         * 后台返回的yuv播放数据
         *
         * @param res 播放资源ID {@link  CallValue#resource_id_0}
         * @param w   视频宽
         * @param h   视频高
         * @param y   y分量
         * @param u   u分量
         * @param v   v分量
         */
        void onYuvData(int res, int w, int h, byte[] y, byte[] u, byte[] v);
    }

    /**
     * @param res     播放资源ID
     * @param codecid 解码器类型 (h264=27,h265=173,mpeg4=12,vp8=139,vp9=167)
     * @param w       视频的宽
     * @param h       视频的高
     * @param packet  解码数据
     */
    public int callback_videodecode(int isKeyframe, int res, int codecid, int w, int h, byte[] packet, long pts, byte[] codecdata) {
        if (mDecodeDataCallback != null) {
            mDecodeDataCallback.onDecodeData(isKeyframe, res, codecid, w, h, packet, pts, codecdata);
        } else {
            if (packet != null) {
                if (!CallValue.decodeMap.containsKey(res)) {
                    LinkedBlockingQueue<FrameData> decodeQueue = new LinkedBlockingQueue<>();
                    CallValue.decodeMap.put(res, decodeQueue);
                }
                if (res == CallValue.resource_id_0) {
                    CallValue.frame_count++;
                }
                FrameData frameData = CallValue.frameDataPool.poll();
                if (frameData == null) {
                    //对象池中没有了就新建一个
                    frameData = new FrameData();
                    LogUtils.i("对象池中没有了就新建一个");
                }
                frameData.setIsKeyFrame(isKeyframe);
                frameData.setRes(res);
                frameData.setCodecid(codecid);
                frameData.setW(w);
                frameData.setH(h);
                frameData.setPts(pts);
                frameData.setBytes(packet);
                frameData.setCodecdata(codecdata);
                LinkedBlockingQueue<FrameData> decodeQueue = CallValue.decodeMap.get(res);
                if (decodeQueue != null) {
                    //超过了阈值，添加会失败
                    if (!decodeQueue.offer(frameData)) {
                        //添加失败就把最旧的数据删除后再添加
                        if (decodeQueue.poll() != null) {
                            boolean offer = decodeQueue.offer(frameData);
                            LogUtils.i("添加失败就把最旧的数据删除后再添加，offer=" + offer);
                        }
                    }
                }
            }
        }
        return 0;
    }

    public int callback_directvideodecode(int isKeyframe, int res, int codecid, int w, int h, int datalen, long pts, int codecdatalen) {
        if (m_dbuf == null) {
            m_dbuf = java.nio.ByteBuffer.allocateDirect(1024 * 500);
        }
        if (m_dexbuf == null) {
            m_dexbuf = java.nio.ByteBuffer.allocateDirect(600);
        }
        m_dbuf.position(0);
        m_dbuf.limit(datalen);
        m_dexbuf.position(0);
        m_dexbuf.limit(codecdatalen);
//        LogUtils.i("后台接收 datalen：" + datalen);
        if (datalen > 1024 * 500) {
            LogUtils.i("收到一个大数据：" + datalen);
            return 0;
        }
        if (res == CallValue.resource_id_0) {
            CallValue.frame_count++;
        }
        LinkedBlockingQueue<FrameData> decodeQueue = CallValue.decodeMap.get(res);
        FrameData frameData = CallValue.frameDataPool.poll();
        if (frameData == null) {
            frameData = new FrameData();
            LogUtils.i("新建对象 size=" + decodeQueue.size());
        }
        frameData.setIsKeyFrame(isKeyframe);
        frameData.setRes(res);
        frameData.setCodecid(codecid);
        frameData.setW(w);
        frameData.setH(h);
        frameData.setPts(pts);
        frameData.setPacketBuffer(m_dbuf);
        frameData.setCodecbf(m_dexbuf);
        if (decodeQueue != null) {
            //超过了阈值，添加会失败
            if (!decodeQueue.offer(frameData)) {
                //添加失败就把最旧的数据删除后再添加
                if (decodeQueue.poll() != null) {
                    decodeQueue.offer(frameData);
                }
            }
        }
        // TODO: 2023/4/21 不能完全注释，还没适配好有使用的地方，fiontu、boling、projection
        EventBus.getDefault().post(new EventMessage.Builder().type(CallBusType.TYPE_VIDEO_DECODE).objects(res, codecid, w, h, datalen, pts, codecdatalen).build());
        return 0;
    }


    //无纸化功能接口回调接口
    //type 功能类型
    //method 功能类型的方法
    //data  方法需要的参数 参考无纸化接口对照表
    //datalen data有数据时 datalen就有长度
    //返回0即可
    public int callback_method(int type, int method, byte[] data, int datalen) {
        if (type != 1) {
            LogUtils.i("callback_method \ntype:" + type + "  " + ErrorRet.getTypeMessage(type)
                    + "\nmethod:" + method + "  " + ErrorRet.getMethodMessage(method));
        }
        if (type == InterfaceMacro.Pb_Type.Pb_TYPE_MEET_INTERFACE_UPDATE_VALUE) {
            //升级包下载完成通知
            EventBus.getDefault().postSticky(new EventMessage.Builder().type(type).method(method).objects(data, datalen).build());
        } else {
            if (CallValue.isTableCardEnable) {
                //桌牌应用只处理这几个通知
                if (type == InterfaceMacro.Pb_Type.Pb_TYPE_MEET_INTERFACE_DEVICEVALIDATE_VALUE
                        || type == InterfaceMacro.Pb_Type.Pb_TYPE_MEET_INTERFACE_READY_VALUE
                        || type == InterfaceMacro.Pb_Type.Pb_TYPE_MEET_INTERFACE_DOWNLOAD_VALUE
                        || type == InterfaceMacro.Pb_Type.Pb_TYPE_MEET_INTERFACE_DEVICECONTROL_VALUE
                        || type == InterfaceMacro.Pb_Type.Pb_TYPE_MEET_INTERFACE_DEVICEINFO_VALUE
                ) {
                    EventBus.getDefault().post(new EventMessage.Builder().type(type).method(method).objects(data, datalen).build());
                }
            } else {
                EventBus.getDefault().post(new EventMessage.Builder().type(type).method(method).objects(data, datalen).build());
            }
        }
        return 0;
    }


}
