#include <sys/types.h>
#include <sys/socket.h>
#include <stdio.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#include <string>
#include <stdlib.h>
#include <fcntl.h>
#include <vector>
#include <android/bitmap.h>
#include "jnimp4v2.h"
#include "mylog.h"
#include "mp4v2/mp4v2.h"
#include "MP4Encoder.h"
#include "ImageProc.h"
#include "convert_jpeg.h"
#include <libyuv.h>

using std::string;
using std::vector;

//MP4TrackId video;
//MP4TrackId audio;
//MP4FileHandle fileHandle;
////unsigned char sps_pps_640[17] = { 0x67, 0x42, 0x40, 0x1F, 0x96, 0x54, 0x05,
////		0x01, 0xED, 0x00, 0xF3, 0x9E, 0xA0, 0x68, 0xCE, 0x38, 0x80 }; //存储sps和pps
//int video_width = 640;
//int video_height = 480;
//uint8_t ubuffer[2];
////unsigned char sps_pps[17];

enum NETTYPE {
    NETTYPE_TCP,
    NETTYPE_UDP
};

#define CAMERANUM 5
#define MIPI_CAMID    4

MP4Encoder mp4Encoders[CAMERANUM];
MP4FileHandle hMp4files[CAMERANUM] = {NULL, NULL, NULL, NULL, NULL};
RWLock rwlock[CAMERANUM];

string rtp_ip[CAMERANUM];
int rtp_port[CAMERANUM];
int rtp_datatype[CAMERANUM];
int rtp_streamtype[CAMERANUM];
int rtp_sendstatus[CAMERANUM] = {0, 0, 0, 0, 0};
int rtp_file_sendstatus1078[CAMERANUM] = {0, 0, 0, 0, 0};
RWLock rtp_rwlock[CAMERANUM];
int rtp_sock[CAMERANUM];
struct sockaddr_in servaddr[CAMERANUM];
unsigned char rtp_sendbuf_PTType[CAMERANUM] = {10, 10, 10, 10,
                                               10};                    //发送平台  1：深圳交委   3:佛山905
unsigned char rtp_sendbuf_ISU[CAMERANUM][6];                    //发送ISU编号
int rtp_sendbuf_TDH[CAMERANUM] = {0, 0, 0, 0, 0};                    //发送逻辑通道号
int rtp_sendbuf_SPLX[CAMERANUM] = {0, 0, 0, 0, 0};                    //发送视频类型 0：实时视频  1：历史视频
int rtp_sendbuf_max[CAMERANUM] = {1400, 1400, 1400, 1400,
                                  1400};    //透传数据最大包长度 根据平台或者TCP UDP确定  深圳：1400  佛山：无限制
char *rtp_sendbuf[CAMERANUM] = {NULL, NULL, NULL, NULL, NULL};
int rtp_sendbuf_len[CAMERANUM] = {0, 0, 0, 0, 0};
NETTYPE rtp_sendbuf_nettype[CAMERANUM] = {NETTYPE_TCP, NETTYPE_TCP, NETTYPE_TCP, NETTYPE_TCP,
                                          NETTYPE_TCP};                //连接网络模式，NETTYPE_TCP：TCP  NETTYPE_UDP：UDP
unsigned short rtp_pkuid[CAMERANUM];
unsigned char *rtp_Cmd1209[CAMERANUM] = {NULL, NULL, NULL, NULL, NULL};
int rtp_Cmd1209Len[CAMERANUM];
char *rtp_tempbuf[CAMERANUM] = {NULL, NULL, NULL, NULL, NULL};
int rtp_tempbuf_len[CAMERANUM] = {0, 0, 0, 0, 0};
string rtp_files[CAMERANUM];
int rtp_fileKJKT[CAMERANUM];
pthread_t rtp_thread_id[CAMERANUM] = {0, 0, 0, 0, 0};

AVCodecContext *pCodecCtx[CAMERANUM];
AVCodecParserContext *pCodecParserCtx[CAMERANUM];
AVCodec *pCodec[CAMERANUM];
AVFrame *pFrame[CAMERANUM];
AVPacket packet[CAMERANUM];
int jpeg_status[CAMERANUM] = {0, 0, 0, 0, 0};
RWLock jpeg_rwlock[CAMERANUM];


// 寻找解码器 
AVCodec *pCodec_H264_MIPI = NULL;
AVCodecContext *pCodecCtx_H264_MIPI = NULL;
AVFrame *pFrame_H264_MIPI = NULL;
AVFrame *pFrameRGB_H264_MIPI = NULL;
SwsContext *img_convert_ctx_H264_MIPI = NULL;
uint8_t *out_buffer_H264_MIPI = NULL;
AVPacket packet_H264_MIPI;

AVCodec *pCodec_H264_MIPI_EN = NULL;
AVCodecContext *pCodecCtx_H264_MIPI_EN = NULL;
AVPacket packet_H264_MIPI_EN;
bool H264_MIPI_INIT = false;

struct jpegwork {
    int workid;
    jmethodID callback;
    string filename;
    jobject jobj;
};
vector<jpegwork> jpeg_workid[CAMERANUM];    //拍照任务ID

//1078
int rtp_sendbuf_max_1078[CAMERANUM] = {950, 950, 950, 950, 950};    //透传数据最大包长度 根据平台或者TCP UDP确定  深圳：1400  佛山：无限制
char *rtp_sendbuf_1078[CAMERANUM] = {NULL, NULL, NULL, NULL, NULL};
char *buffer_1078[CAMERANUM] = {NULL, NULL, NULL, NULL, NULL};
int rtp_sendbuf_len_1078[CAMERANUM] = {0, 0, 0, 0, 0};
int rtp_sign[CAMERANUM];   //分包标志
int rtp_sim1;   //SIM卡的第一个BCD码
int rtp_sim2;   //SIM卡的第二个BCD码
int rtp_sim3;   //SIM卡的第三个BCD码
int rtp_sim4;   //SIM卡的第四个BCD码
int rtp_sim5;   //SIM卡的第五个BCD码
int rtp_sim6;   //SIM卡的第六个BCD码
int rtp_isSending[CAMERANUM];   //标记是否正在发送数据包
long long timestamp_history;   //上一帧的时间戳
long long timestamp_history_I;   //上一I帧的时间戳
unsigned int pkid_1078[CAMERANUM] = {0, 0, 0, 0, 0};
int is1078device = 0;
int mipiDataisUploading = 0;   //MIPI数据是否正在上传，如果没有上传了就在mp4packvideo方法中return掉

void send1078Data(int id,int currentSize,int datalen_1078,unsigned char *sendbuff_1078,int Frametype,unsigned char *buf_1078);
int writeRTPData(const char *path, unsigned char *data, unsigned int len);


#define CAMERANUM_USB    4
Imageproc image_video[CAMERANUM_USB];
RWLock rwlock_video[CAMERANUM_USB];

jlong currentTimeMillis() {
    timeval now;
    gettimeofday(&now, NULL);
    return now.tv_sec * 1000LL + now.tv_usec / 1000;
}

// 控制是否传入图像开始人脸识别
int faceStatus = 0;

//
void (*NativeDrawFrame)(JNIEnv *env, jobject thiz, jlong addrRgba, jint w, jint h) = NULL;

// 检测人脸数图像函数指针
void
(*nativeGetFaceNumber)(JNIEnv *env, jobject thiz, jlong addrBGR, jint w, jint h, jint len) = NULL;


pthread_t face_threadid;

// 用做人脸识别线程参数传递
struct Threadpara {
    JNIEnv *env;//参数1
    jobject thiz;//参数2
};

//定义一个缓存MJPEG图像帧结构体
struct BGRBuffer {
    uint8_t *bgrBuffer;    //空指针
    size_t bgrBufferSize;    //长度大小
    size_t width;    //宽
    size_t height;    //高
};

// 储存BGR图像帧
std::vector<BGRBuffer> BGRBufferList;

//视频录制的调用,实现初始化
JNIEXPORT jlong JNICALL Java_com_hxrk_jni_Mp4v2Native_mp4init(JNIEnv *env,
                                                              jobject thiz, jint id, jstring title,
                                                              jint w, jint h, jint timeScale,
                                                              jint frameRate, jint A_timeScale,
                                                              jint A_sampleDuration, jint debug) {

    const char *local_title = env->GetStringUTFChars(title, NULL);

    rwlock[id].write();
    hMp4files[id] = mp4Encoders[id].CreateMP4File(local_title, w, h, timeScale, frameRate, A_timeScale, A_sampleDuration);
    mp4Encoders[id].m_debug = debug;
    rwlock[id].unlock();


    env->ReleaseStringUTFChars(title, local_title);
    return (long) hMp4files[id];
}


const char DIGITS_UPPER[] = {'0', '1', '2', '3', '4', '5',
                             '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};

int BCDtoHex(char *dataDest, int destLen, const unsigned char *dataSrc, int SrcLen) {
    int i, j;
    memset(dataDest, 0, destLen);
    for (i = 0, j = 0; i < SrcLen && j < (destLen - 1); i++, j += 2) {
        dataDest[j] = DIGITS_UPPER[dataSrc[i] >> 4 & 0xF];
        dataDest[j + 1] = DIGITS_UPPER[dataSrc[i] & 0xF];
    }
    return j;
}

void getTimeBcd6(unsigned char *outbcd, int Len) {

    time_t t = time(NULL);

    struct tm tmnow;

    char timestr[13];

    if (NULL != localtime_r(&t, &tmnow)) {
        sprintf(timestr, "%02d%02d%02d%02d%02d%02d", tmnow.tm_year + 1900 - 2000, tmnow.tm_mon + 1,
                tmnow.tm_mday, tmnow.tm_hour, tmnow.tm_min, tmnow.tm_sec);
    }

    int j = 12 - 1;
    for (int i = Len - 1; i >= 0 && j > 0; i--) {
        outbcd[i] = (((timestr[j - 1] - 0x30) << 4) + ((timestr[j] - 0x30) % 10));
        j -= 2;
    }
}

//关闭rtp
void closertp(int id) {
    rtp_rwlock[id].write();
    if (rtp_sock[id] != 0){
        close(rtp_sock[id]);
        rtp_sendstatus[id] = 0;
        rtp_sock[id] = 0;
    }
    //清理内存
    if (rtp_sendbuf[id] != NULL) {
        LOGE("closertp 清理内存 清理内存 清理内存 清理内存");
        delete rtp_sendbuf[id];
        rtp_sendbuf[id] = NULL;
        rtp_sendbuf_len[id] = 0;
    }
    LOGI("rtp关闭  id:%d", id);
    rtp_rwlock[id].unlock();
}

void rtp_videosend(int id, unsigned char *buf, int size, int Frametype, int streamtype) {

    //上传视频
    if (rtp_datatype[id] == 0 || rtp_datatype[id] == 1) {
        if (Frametype == -1) {
            MP4ENC_NaluUnit nalu;
            int pos = 0, len = 0;
            while (len = MP4Encoder::ReadOneNaluFromBuf(buf, size, pos, nalu)) {
                if (nalu.type == 0x07) // sps
                {
                    Frametype = 0;
                } else if (nalu.type == 0x08) // pps
                {
                    Frametype = 0;
                } else if (nalu.type == 0x05) // I
                {
                    Frametype = 0;
                } else // P or B
                {
                    Frametype = 1;
                }
                pos += len;
            }
        }

        long long timestamp = currentTimeMillis();
        unsigned char jdsj[6];
        getTimeBcd6(jdsj, 6);
      //  LOGE(" rtp_videosend id=%d,size=%d,pttype=%d streamtype=%d rtp_sendstatus=%d,rtp_streamtype=%d",
         //       id, size, rtp_sendbuf_PTType[id],streamtype,rtp_sendstatus[id],rtp_streamtype[id]);
        if (rtp_sendstatus[id] == 1 && streamtype == rtp_streamtype[id] && is1078device == 0) {
            LOGE(" 准备上传数据--->808,905，id=%d,size=%d,pttype=%d", id, size, rtp_sendbuf_PTType[id]);
            if (Frametype != -1) {
                rtp_rwlock[id].write();
                //参考数据长度分配内存
                int newlen = rtp_sendbuf_max[id] == 0 ? (size + 33) : (rtp_sendbuf_max[id] + 33);
                if (rtp_sendbuf[id] == NULL) {
                    rtp_sendbuf_len[id] = newlen;
                    rtp_sendbuf[id] = new char[rtp_sendbuf_len[id]];
                } else if (rtp_sendbuf_len[id] < newlen) {
                    delete rtp_sendbuf[id];
                    rtp_sendbuf_len[id] = newlen;
                    rtp_sendbuf[id] = new char[rtp_sendbuf_len[id]];
                }

                unsigned char *sendbuff = (unsigned char *) rtp_sendbuf[id];
                int pkid = 0;
                int datalen = 0;
                if (rtp_sendbuf_PTType[id] == 1) {
                    //深圳平台
                    for (int i = 0; i < size || size == 0;) {
                        if (rtp_sendbuf_max[id] == 0)
                            datalen = size - i;
                        else
                            datalen = (i + rtp_sendbuf_max[id] > size) ? (size - i)
                                                                       : rtp_sendbuf_max[id];

                        //包头
                        sendbuff[0] = 0x30;
                        sendbuff[1] = 0x31;
                        sendbuff[2] = 0x63;
                        sendbuff[3] = 0x64;

                        //包序号
                        sendbuff[4] = ((pkid >> 8) & 0xFF);
                        sendbuff[5] = (pkid & 0xFF);

                        //帧ID
                        sendbuff[6] = ((rtp_pkuid[id] >> 8) & 0xFF);
                        sendbuff[7] = (rtp_pkuid[id] & 0xFF);

                        //数据类型
                        sendbuff[8] = Frametype;             //

                        //负载类型
                        sendbuff[9] = 0x62;             //98,H264

                        //时间戳
                        sendbuff[10] = (timestamp >> 56) & 0xFF;
                        sendbuff[11] = (timestamp >> 48) & 0xFF;
                        sendbuff[12] = (timestamp >> 40) & 0xFF;
                        sendbuff[13] = (timestamp >> 32) & 0xFF;
                        sendbuff[14] = (timestamp >> 24) & 0xFF;
                        sendbuff[15] = (timestamp >> 16) & 0xFF;
                        sendbuff[16] = (timestamp >> 8) & 0xFF;
                        sendbuff[17] = timestamp & 0xFF;

                        //绝对时间
                        memcpy(&sendbuff[18], jdsj, 6);

                        //数据体长度
                        sendbuff[24] = ((datalen >> 8) & 0xFF);
                        sendbuff[25] = (datalen & 0xFF);

                        if (datalen > 0) {
                            memcpy(sendbuff + 26, buf + i, datalen);

                            pkid++;
                            i += datalen;
                        }

                        //发送数据
                        int ret = 0;
                        if (rtp_sendbuf_nettype[id] == NETTYPE_TCP) {
                            if ((ret = send(rtp_sock[id], sendbuff, datalen + 26, 0)) < 0) {
                                LOGI("rtp连接断开  ret:%d  id:%d", ret, id);
                                closertp(id);
                                break;
                            }
                        } else {
                            if ((ret = sendto(rtp_sock[id], sendbuff, datalen + 26, 0,
                                              (struct sockaddr *) &servaddr[id],
                                              sizeof(struct sockaddr))) < 0) {
                                LOGI("rtp连接断开  ret:%d  id:%d", ret, id);
                                closertp(id);
                                break;
                            }
                        }

                        if (datalen == 0)
                            break;
                    }
                } else if (rtp_sendbuf_PTType[id] == 3) {
                    //佛山平台
                    for (int i = 0; i < size || size == 0;) {
                        if (rtp_sendbuf_max[id] == 0)
                            datalen = size - i;
                        else
                            datalen = (i + rtp_sendbuf_max[id] > size) ? (size - i)
                                                                       : rtp_sendbuf_max[id];

                        //包头
                        sendbuff[0] = 0x30;
                        sendbuff[1] = 0x31;
                        sendbuff[2] = 0x63;
                        sendbuff[3] = 0x64;

                        //ISU编号
                        memcpy(sendbuff + 4, rtp_sendbuf_ISU[id], 6);

                        //通道号
                        sendbuff[10] = rtp_sendbuf_TDH[id];

                        //包序号
                        sendbuff[11] = ((pkid >> 8) & 0xFF);
                        sendbuff[12] = (pkid & 0xFF);

                        //帧ID
                        sendbuff[13] = ((rtp_pkuid[id] >> 8) & 0xFF);
                        sendbuff[14] = (rtp_pkuid[id] & 0xFF);

                        //数据类型
                        sendbuff[15] = Frametype;             //

                        int FZLX = 0x62;
                        ////拦截MJPEG数据，如果上传子码流，则切换上传
                        //if (buf_id >= 0 && buf_id < CAMERANUM_USB) {
                        //	if (image_video[buf_id].fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG)
                        //		FZLX = 0x66;
                        //	else if (image_video[buf_id].fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_H264)
                        //		FZLX = 0x62;
                        //}
                        //负载类型
                        sendbuff[16] = FZLX;             //98,H264

                        //时间戳
                        sendbuff[17] = (timestamp >> 56) & 0xFF;
                        sendbuff[18] = (timestamp >> 48) & 0xFF;
                        sendbuff[19] = (timestamp >> 40) & 0xFF;
                        sendbuff[20] = (timestamp >> 32) & 0xFF;
                        sendbuff[21] = (timestamp >> 24) & 0xFF;
                        sendbuff[22] = (timestamp >> 16) & 0xFF;
                        sendbuff[23] = (timestamp >> 8) & 0xFF;
                        sendbuff[24] = timestamp & 0xFF;

                        //绝对时间
                        memcpy(&sendbuff[25], jdsj, 6);

                        //数据体长度
                        sendbuff[31] = ((datalen >> 8) & 0xFF);
                        sendbuff[32] = (datalen & 0xFF);

                        if (datalen > 0) {
                            memcpy(sendbuff + 33, buf + i, datalen);

                            pkid++;
                            i += datalen;
                        }

                        //发送数据
                        int ret = 0;
                        if (rtp_sendbuf_nettype[id] == NETTYPE_TCP) {
                            if ((ret = send(rtp_sock[id], sendbuff, datalen + 33, 0)) < 0) {
                                LOGI(" rtp连接断开  ret:%d  id:%d", ret, id);
                                closertp(id);
                                break;
                            }
                        } else {
                            if ((ret = sendto(rtp_sock[id], sendbuff, datalen + 33, 0,
                                              (struct sockaddr *) &servaddr[id],
                                              sizeof(struct sockaddr))) < 0) {
                                LOGI(" rtp连接断开  ret:%d  id:%d", ret, id);
                                closertp(id);
                                break;
                            }
                        }

                        if (datalen == 0)
                            break;
                    }
                }
                LOGI(" rtp视频传输 id:%d   size:%d   包:%d   帧ID:%d", id, size, pkid, rtp_pkuid[id]);
                rtp_rwlock[id].unlock();
                rtp_pkuid[id]++;
            } else {
                LOGI(" 未知数据帧");
            }
        } else {
           // LOGE(" 准备上传数据--->1078，id=%d,size=%d,pttype=%d,Frametype=%d", id, size, rtp_sendbuf_PTType[id],Frametype);
            int currentSize = 0;
            unsigned char *buf_1078;
            if (Frametype != -1) {
                int newlen_1078 = rtp_sendbuf_max_1078[id] == 0 ? (size + 30) : (rtp_sendbuf_max_1078[id] + 30);
                if (rtp_sendbuf_1078[id] == NULL) {
                    rtp_sendbuf_len_1078[id] = newlen_1078;
                    rtp_sendbuf_1078[id] = new char[rtp_sendbuf_len_1078[id]];
                } else if (rtp_sendbuf_len_1078[id] < newlen_1078) {
                    delete rtp_sendbuf_1078[id];
                    rtp_sendbuf_len_1078[id] = newlen_1078;
                    rtp_sendbuf_1078[id] = new char[rtp_sendbuf_len_1078[id]];
                }
                unsigned char *sendbuff_1078 = (unsigned char *) rtp_sendbuf_1078[id];
                int datalen_1078 = 0;

                if (rtp_sendbuf_PTType[id] == 4) {

                    //第一帧必须为I帧(从pps开始上传数据，pps后面一定为I帧)  或者已经开始上传才往下走
                    if (((buf[0] & 0xFF) == 0x00 && (buf[1] & 0xFF) == 0x00 && (buf[2] & 0xFF) == 0x00 && (buf[3] & 0xFF) == 0x01 &&
                         (buf[4] & 0xFF) == 0x67 && rtp_isSending[id] == 0) || rtp_isSending[id] == 1) {

                        //MIPI和视频回放在辅助帧前面也加了sps，pps,应该去掉
                        if(size > 27 && (buf[27] & 0xFF)==0x00 && (buf[28] & 0xFF)==0x00 && (buf[29] & 0xFF)==0x00 && (buf[30] & 0xFF)==0x01 && (buf[31] & 0xFF)==0x41){
                            currentSize = size -27;
                            buffer_1078[id] = new char[currentSize];
                            buf_1078 = (unsigned char *) buffer_1078[id];
                            memcpy(buf_1078,buf+27,currentSize);
                            send1078Data(id,currentSize,datalen_1078,sendbuff_1078,Frametype,buf_1078);
                        } else{
                            send1078Data(id,size,datalen_1078,sendbuff_1078,Frametype,buf);
                        }
                    }

                }
            }

        }
    }

}

void send1078Data(int id,int currentSize,int datalen_1078,unsigned char *sendbuff_1078,int Frametype,unsigned char *buf_1078){
    for (int i = 0; i < currentSize || currentSize == 0;) {
        int timestamp = currentTimeMillis();
        if (rtp_sendbuf_max_1078[id] == 0) {
            datalen_1078 = currentSize - i;
        } else {
            datalen_1078 = (i + rtp_sendbuf_max_1078[id] > currentSize) ? (currentSize - i) : rtp_sendbuf_max_1078[id];
        }
        //包头
        sendbuff_1078[0] = 0x30;
        sendbuff_1078[1] = 0x31;
        sendbuff_1078[2] = 0x63;
        sendbuff_1078[3] = 0x64;

        sendbuff_1078[4] = 0x81;   //10000001‬ V P X CC

        if (currentSize <= rtp_sendbuf_max_1078[id])
            rtp_sign[id] = 0;   //原子包
        else if (i == 0 && currentSize > rtp_sendbuf_max_1078[id])
            rtp_sign[id] = 1;   //分包处理时的第一个包
        else if (i == currentSize - (currentSize % rtp_sendbuf_max_1078[id] == 0 ? rtp_sendbuf_max_1078[id] : currentSize % rtp_sendbuf_max_1078[id]))
            rtp_sign[id] = 2;   //分包处理时的最后一个包；
        else rtp_sign[id] = 3;    //中间包

        if (rtp_sign[id] == 0 || rtp_sign[id] == 2) {
            sendbuff_1078[5] = 0xE2;   //11100010‬ M PT   完整数据帧
        } else {
            sendbuff_1078[5] = 0x62;   //01100010 M PT   非完整数据帧
        }

        //包序号
        sendbuff_1078[6] = ((pkid_1078[id] >> 8) & 0xFF);
        sendbuff_1078[7] = (pkid_1078[id] & 0xFF);

        //SIM卡号
        sendbuff_1078[8] = rtp_sim1 & 0xFF;
        sendbuff_1078[9] = rtp_sim2 & 0xFF;
        sendbuff_1078[10] = rtp_sim3 & 0xFF;
        sendbuff_1078[11] = rtp_sim4 & 0xFF;
        sendbuff_1078[12] = rtp_sim5 & 0xFF;
        sendbuff_1078[13] = rtp_sim6 & 0xFF;

        //逻辑通道号
        sendbuff_1078[14] = rtp_sendbuf_TDH[id];

        //数据类型 分包处理标志
        if (Frametype == 0) {   //I帧
            sendbuff_1078[15] = rtp_sign[id] & 0xFF;
        } else if (Frametype == 1) {
            sendbuff_1078[15] = ((Frametype << 4) + (rtp_sign[id])) & 0xFF;
        }


        //时间戳
        sendbuff_1078[16] = (timestamp >> 56) & 0xFF;
        sendbuff_1078[17] = (timestamp >> 48) & 0xFF;
        sendbuff_1078[18] = (timestamp >> 40) & 0xFF;
        sendbuff_1078[19] = (timestamp >> 32) & 0xFF;
        sendbuff_1078[20] = (timestamp >> 24) & 0xFF;
        sendbuff_1078[21] = (timestamp >> 16) & 0xFF;
        sendbuff_1078[22] = (timestamp >> 8) & 0xFF;
        sendbuff_1078[23] = timestamp & 0xFF;

        //Last I Frame Interval
        sendbuff_1078[24] = ((timestamp - timestamp_history_I) >> 8) & 0xFF;
        sendbuff_1078[25] = (timestamp - timestamp_history_I) & 0xFF;

        //Last Frame Interval
        sendbuff_1078[26] = ((timestamp - timestamp_history) >> 8) & 0xFF;
        sendbuff_1078[27] = (timestamp - timestamp_history) & 0xFF;

        //数据体长度
        sendbuff_1078[28] = ((datalen_1078 >> 8) & 0xFF);
        sendbuff_1078[29] = (datalen_1078 & 0xFF);

        if (Frametype == 0){
            timestamp_history_I = timestamp;
        } else{
            timestamp_history = timestamp;
        }

        if (datalen_1078 > 0) {
            memcpy(sendbuff_1078 + 30, buf_1078 + i, datalen_1078);
            pkid_1078[id]++ % 4294967295;
            i += datalen_1078;
        }

        //发送数据
        int ret = 0;
        if (rtp_sendbuf_nettype[id] == NETTYPE_TCP) {
            rtp_isSending[id] = 1;
            if ((ret = send(rtp_sock[id], sendbuff_1078, datalen_1078 + 30,0)) < 0) {
                closertp(id);
                rtp_isSending[id] = 0;
                break;
            } else {
                rtp_isSending[id] = 1;
                if (id == 0) {
//                                        writeRTPData("/sdcard/DidiMain/send_0.264", sendbuff_1078, datalen_1078 + 30);
//                                        writeRTPData("/sdcard/DidiMain/send_01.264", sendbuff_1078 + 30, datalen_1078);
                } else {
//                                        writeRTPData("/sdcard/DidiMain/send_4.264", sendbuff_1078, datalen_1078 + 30);
//                                        writeRTPData("/sdcard/DidiMain/send_41.264", sendbuff_1078 + 30, datalen_1078);
                }
            }

        } else {
            if ((ret = sendto(rtp_sock[id], sendbuff_1078, datalen_1078 + 30, 0,
                              (struct sockaddr *) &servaddr[id],
                              sizeof(struct sockaddr))) < 0) {
                LOGI("rtp连接断开  ret:%d  id:%d", ret, id);
                closertp(id);
                break;
            }
        }

        if (datalen_1078 == 0)
            break;
    }
}

/** 
 * 将AVFrame(YUV420格式)保存为JPEG格式的图片 
 * 
 * @param width YUV420的宽 
 * @param height YUV42的高 
 * 
 */
int MyWriteJPEG(AVFrame *pFrame, int width, int height, string out_file) {
    // 分配AVFormatContext对象
    AVFormatContext *pFormatCtx = avformat_alloc_context();

    // 设置输出文件格式  
    pFormatCtx->oformat = av_guess_format("mjpeg", NULL, NULL);
    // 创建并初始化一个和该url相关的AVIOContext  
    if (avio_open(&pFormatCtx->pb, out_file.c_str(), AVIO_FLAG_READ_WRITE) < 0) {
        LOGE("Couldn't open output  file  %s.", out_file.c_str());
        return -1;
    }

    // 构建一个新stream  
    AVStream *pAVStream = avformat_new_stream(pFormatCtx, 0);
    if (pAVStream == NULL) {
        return -1;
    }

    // 设置该stream的信息  
    AVCodecContext *pCodecCtx = pAVStream->codec;

    pCodecCtx->codec_id = pFormatCtx->oformat->video_codec;
    pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
    pCodecCtx->pix_fmt = AV_PIX_FMT_YUVJ420P;
    pCodecCtx->width = width;
    pCodecCtx->height = height;
    pCodecCtx->time_base.num = 1;
    pCodecCtx->time_base.den = 25;

    // Begin Output some information  
    av_dump_format(pFormatCtx, 0, out_file.c_str(), 1);
    // End Output some information  

    // 查找解码器  
    AVCodec *pCodec = avcodec_find_encoder(pCodecCtx->codec_id);
    if (!pCodec) {
        LOGE("Codec not found.");
        return -1;
    }
    // 设置pCodecCtx的解码器为pCodec  
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGE("Could not open codec.");
        return -1;
    }

    //Write Header  
    avformat_write_header(pFormatCtx, NULL);

    int y_size = pCodecCtx->width * pCodecCtx->height;

    //Encode  
    // 给AVPacket分配足够大的空间  
    AVPacket pkt;
    av_new_packet(&pkt, y_size * 3);

    //   
    int got_picture = 0;
    int ret = avcodec_encode_video2(pCodecCtx, &pkt, pFrame, &got_picture);
    if (ret < 0) {
        LOGE("Encode Error.\n");
        return -1;
    }
    if (got_picture == 1) {
        //pkt.stream_index = pAVStream->index;  
        ret = av_write_frame(pFormatCtx, &pkt);
    }

    av_free_packet(&pkt);

    //Write Trailer  
    av_write_trailer(pFormatCtx);

    LOGI("Encode Successful %s.", out_file.c_str());

    if (pAVStream) {
        avcodec_close(pAVStream->codec);
    }
    avio_close(pFormatCtx->pb);
    avformat_free_context(pFormatCtx);

    return 0;
}

extern uint8_t ubuffer[2];

/**
    *  Add ADTS header at the beginning of each and every AAC packet.
    *  This is needed as MediaCodec encoder generates a packet of raw
    *  AAC data.
    *
    *  Note the packetLen must count in the ADTS header itself !!! .
    *注意，这里的packetLen参数为raw aac Packet Len + 7; 7 bytes adts header
    **/
void addADTStoPacket(unsigned char *packet, int packetLen) {
    int profile = 2;  //AAC LC，MediaCodecInfo.CodecProfileLevel.AACObjectLC;
    int freqIdx = 8;  //32K, 见后面注释avpriv_mpeg4audio_sample_rates中32000对应的数组下标，来自ffmpeg源码
    int chanCfg = 1;  //见后面注释channel_configuration，Stero双声道立体声

    /*int avpriv_mpeg4audio_sample_rates[] = {
        96000, 88200, 64000, 48000, 44100, 32000,
                24000, 22050, 16000, 12000, 11025, 8000, 7350
    };
    channel_configuration: 表示声道数chanCfg
    0: Defined in AOT Specifc Config
    1: 1 channel: front-center
    2: 2 channels: front-left, front-right
    3: 3 channels: front-center, front-left, front-right
    4: 4 channels: front-center, front-left, front-right, back-center
    5: 5 channels: front-center, front-left, front-right, back-left, back-right
    6: 6 channels: front-center, front-left, front-right, back-left, back-right, LFE-channel
    7: 8 channels: front-center, front-left, front-right, side-left, side-right, back-left, back-right, LFE-channel
    8-15: Reserved
    */

    // fill in ADTS data
    packet[0] = (unsigned char) 0xFF;
    packet[1] = (unsigned char) 0xF9;
    packet[2] = (unsigned char) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
    packet[3] = (unsigned char) (((chanCfg & 3) << 6) + (packetLen >> 11));
    packet[4] = (unsigned char) ((packetLen & 0x7FF) >> 3);
    packet[5] = (unsigned char) (((packetLen & 7) << 5) + 0x1F);
    packet[6] = (unsigned char) 0xFC;
}

//添加音频帧的方法
int mp4packAudio(int id, jlong pdata, int size) {
    unsigned char *buf = (unsigned char *) pdata;

    int ret = 0;
    //rwlock[id].read();
    rwlock[id].write();
    if (hMp4files[id] != NULL) {
        ret = mp4Encoders[id].WriteAACData(hMp4files[id], buf, size);
    }
    rwlock[id].unlock();

    //上传音频
    if (rtp_sendstatus[id] == 1 && rtp_sendbuf_SPLX[id] == 0 &&
        (rtp_datatype[id] == 0 || rtp_datatype[id] == 3) && size > 0) {

        //size -= 7;
        //buf += 7;
        //if (rtp_tempbuf[id] == NULL)
        //{
        //	rtp_tempbuf_len[id] = (size + 4) * 2;
        //	rtp_tempbuf[id] = new char[rtp_tempbuf_len[id]];
        //} else if (rtp_tempbuf_len[id] < (size + 4))
        //{
        //	delete rtp_tempbuf[id];
        //	rtp_tempbuf_len[id] = (size + 4) * 2;
        //	rtp_tempbuf[id] = new char[rtp_tempbuf_len[id]];
        //}
        //memcpy(rtp_tempbuf[id] + 4, buf, size);

        //rtp_tempbuf[id][0] = 0x00;
        //rtp_tempbuf[id][1] = 0x10;
        //rtp_tempbuf[id][2] = (size & 0x1fe0) >> 5;
        //rtp_tempbuf[id][3] = (size & 0x1f) << 3;

        //size += 4;

        if (rtp_tempbuf[id] == NULL) {
            rtp_tempbuf_len[id] = (size + 7) * 2;
            rtp_tempbuf[id] = new char[rtp_tempbuf_len[id]];
        } else if (rtp_tempbuf_len[id] < (size + 4)) {
            delete rtp_tempbuf[id];
            rtp_tempbuf_len[id] = (size + 7) * 2;
            rtp_tempbuf[id] = new char[rtp_tempbuf_len[id]];
        }
        addADTStoPacket((unsigned char *) rtp_tempbuf[id], size + 7);
        memcpy(rtp_tempbuf[id] + 7, buf, size);
        size += 7;

        int Frametype = 3;
        jlong timestamp = currentTimeMillis();
        unsigned char jdsj[6];
        getTimeBcd6(jdsj, 6);
        unsigned char *sendbuff = (unsigned char *) rtp_sendbuf[id];
        int pkid = 0;
        int datalen = 0;
        rtp_rwlock[id].write();
        if (rtp_sendbuf_PTType[id] == 1) {
            for (int i = 0; i < size;) {
                if (rtp_sendbuf_max[id] == 0)
                    datalen = size - i;
                else
                    datalen = (i + rtp_sendbuf_max[id] > size) ? (size - i) : rtp_sendbuf_max[id];

                //包头
                sendbuff[0] = 0x30;
                sendbuff[1] = 0x31;
                sendbuff[2] = 0x63;
                sendbuff[3] = 0x64;

                //包序号
                sendbuff[4] = ((pkid >> 8) & 0xFF);
                sendbuff[5] = (pkid & 0xFF);

                //帧ID
                sendbuff[6] = ((rtp_pkuid[id] >> 8) & 0xFF);
                sendbuff[7] = (rtp_pkuid[id] & 0xFF);

                //数据类型
                sendbuff[8] = Frametype;             //

                //负载类型
                sendbuff[9] = 24;             //LCACC

                //时间戳
                sendbuff[10] = (timestamp >> 56) & 0xFF;
                sendbuff[11] = (timestamp >> 48) & 0xFF;
                sendbuff[12] = (timestamp >> 40) & 0xFF;
                sendbuff[13] = (timestamp >> 32) & 0xFF;
                sendbuff[14] = (timestamp >> 24) & 0xFF;
                sendbuff[15] = (timestamp >> 16) & 0xFF;
                sendbuff[16] = (timestamp >> 8) & 0xFF;
                sendbuff[17] = timestamp & 0xFF;

                //绝对时间
                memcpy(&sendbuff[18], jdsj, 6);

                //数据体长度
                sendbuff[24] = ((datalen >> 8) & 0xFF);
                sendbuff[25] = (datalen & 0xFF);

                memcpy(sendbuff + 26, rtp_tempbuf[id] + i, datalen);

                pkid++;
                i += datalen;

                //发送数据
                int ret = 0;
                if (rtp_sendbuf_nettype[id] == NETTYPE_TCP) {
                    if ((ret = send(rtp_sock[id], sendbuff, datalen + 26, 0)) < 0) {
                        LOGI("rtp连接断开  ret:%d  id:%d", ret, id);
                        closertp(id);
                        break;
                    }
                } else {
                    if ((ret = sendto(rtp_sock[id], sendbuff, datalen + 26, 0,
                                      (struct sockaddr *) &servaddr[id], sizeof(struct sockaddr))) <
                        0) {
                        LOGI("rtp连接断开  ret:%d  id:%d", ret, id);
                        closertp(id);
                        break;
                    }
                }
            }
        } else if (rtp_sendbuf_PTType[id] == 3) {
            for (int i = 0; i < size;) {
                if (rtp_sendbuf_max[id] == 0)
                    datalen = size - i;
                else
                    datalen = (i + rtp_sendbuf_max[id] > size) ? (size - i) : rtp_sendbuf_max[id];

                //包头
                sendbuff[0] = 0x30;
                sendbuff[1] = 0x31;
                sendbuff[2] = 0x63;
                sendbuff[3] = 0x64;

                //ISU编号
                memcpy(sendbuff + 4, rtp_sendbuf_ISU[id], 6);

                //通道号
                sendbuff[10] = rtp_sendbuf_TDH[id];

                //包序号
                sendbuff[11] = ((pkid >> 8) & 0xFF);
                sendbuff[12] = (pkid & 0xFF);

                //帧ID
                sendbuff[13] = ((rtp_pkuid[id] >> 8) & 0xFF);
                sendbuff[14] = (rtp_pkuid[id] & 0xFF);

                //数据类型
                sendbuff[15] = Frametype;             //

                //负载类型
                sendbuff[16] = 24;        //LCACC

                //时间戳
                sendbuff[17] = (timestamp >> 56) & 0xFF;
                sendbuff[18] = (timestamp >> 48) & 0xFF;
                sendbuff[19] = (timestamp >> 40) & 0xFF;
                sendbuff[20] = (timestamp >> 32) & 0xFF;
                sendbuff[21] = (timestamp >> 24) & 0xFF;
                sendbuff[22] = (timestamp >> 16) & 0xFF;
                sendbuff[23] = (timestamp >> 8) & 0xFF;
                sendbuff[24] = timestamp & 0xFF;

                //绝对时间
                memcpy(&sendbuff[25], jdsj, 6);

                //数据体长度
                sendbuff[31] = ((datalen >> 8) & 0xFF);
                sendbuff[32] = (datalen & 0xFF);

                memcpy(sendbuff + 33, rtp_tempbuf[id] + i, datalen);

                pkid++;
                i += datalen;

                //发送数据
                int ret = 0;
                if (rtp_sendbuf_nettype[id] == NETTYPE_TCP) {
                    if ((ret = send(rtp_sock[id], sendbuff, datalen + 33, 0)) < 0) {
                        LOGI("rtp连接断开  ret:%d  id:%d", ret, id);
                        closertp(id);
                        break;
                    }
                } else {
                    if ((ret = sendto(rtp_sock[id], sendbuff, datalen + 33, 0,
                                      (struct sockaddr *) &servaddr[id], sizeof(struct sockaddr))) <
                        0) {
                        LOGI("rtp连接断开  ret:%d  id:%d", ret, id);
                        closertp(id);
                        break;
                    }
                }
            }
        }
        LOGI("rtp音频传输  id:%d   %d", id, size);
        rtp_rwlock[id].unlock();
        rtp_pkuid[id]++;
    }
    return ret;
}

static JavaVM *s_jVM = NULL;

/*
* Set some test stuff up.
*
* Returns the JNI version on success, -1 on failure.
*/
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
    LOGE("JNI_OnLoad");
    s_jVM = vm;
    return JNI_VERSION_1_4;
}

static void NotifyCallJava(vector<jpegwork>::iterator it, int id, int ret) {
    JNIEnv *env;

    //获取当前线程的JNIEnv*
    if (s_jVM->AttachCurrentThread(&env, NULL) != JNI_OK) {
        LOGE("%s: AttachCurrentThread()  failed", __FUNCTION__);
        return;
    }
    jstring str1 = env->NewStringUTF(it->filename.c_str());
    env->CallVoidMethod(it->jobj, it->callback, str1, id, it->workid, ret); //调用java层相关方法

    error:
    env->DeleteGlobalRef(it->jobj);
    //Detach主线程
    /*     if(s_jVM->DetachCurrentThread() != JNI_OK)
    {
    LOGE("%s: DetachCurrentThread() failed", __FUNCTION__);
    }*/
    return;
}

void mp4packVideo(int id, long long pdata, int size, bool mIsAudio) {
    if (mIsAudio) {
        mp4packAudio(id, pdata, size);
        return;
    }

    unsigned char *buf = (unsigned char *) pdata;

    int ret = 0;
    rwlock[id].write();
    //rwlock[id].read();
    if (hMp4files[id] != NULL) {
        ret = mp4Encoders[id].WriteH264Data(hMp4files[id], buf, size);
    }
    rwlock[id].unlock();

    if (mipiDataisUploading == 0 && id == 4){   //MIPI，USB在这里控制，USB在Camparameter类中控制，主要是为了减少不上传数据时不必要的一些代码执行
        return;
    }

    if (id == MIPI_CAMID && rtp_sendstatus[id] == 1 && rtp_streamtype[id] == 0 && rtp_sendbuf_SPLX[id] == 0) {
        int sub_width = 640, sub_height = 480;
        //转子码流，降分辨率处理
        while (!H264_MIPI_INIT) {
            av_register_all();
            avcodec_register_all();

            // 寻找解码器
            pCodec_H264_MIPI = avcodec_find_decoder(AV_CODEC_ID_H264);
            if (pCodec_H264_MIPI == NULL) {
                LOGE("avcode find decoder failed!");
                break;
            }

            pCodecCtx_H264_MIPI = avcodec_alloc_context3(pCodec_H264_MIPI);
            if (pCodecCtx_H264_MIPI == NULL) {
                LOGE("avcodec_alloc_context3 failed!");
                break;
            }

            /* we do not send complete frames */
            if (pCodec_H264_MIPI->capabilities & AV_CODEC_CAP_TRUNCATED)
                pCodecCtx_H264_MIPI->flags |= AV_CODEC_FLAG_TRUNCATED;

            //打开解码器
            if (avcodec_open2(pCodecCtx_H264_MIPI, pCodec_H264_MIPI, NULL) < 0) {
                LOGE("avcode open failed!");
                break;
            }

            //为每帧图像分配内存
            pFrame_H264_MIPI = av_frame_alloc();
            pFrameRGB_H264_MIPI = av_frame_alloc();

            av_init_packet(&packet_H264_MIPI);
            packet_H264_MIPI.size = 0;
            packet_H264_MIPI.data = NULL;

            // 寻找编码器
            pCodec_H264_MIPI_EN = avcodec_find_encoder(AV_CODEC_ID_H264);    //需在ffmpeg编译时引入x264
            if (pCodec_H264_MIPI_EN == NULL) {
                LOGE("avcode find encoder failed!");
                break;
            }

            pCodecCtx_H264_MIPI_EN = avcodec_alloc_context3(pCodec_H264_MIPI_EN);
            if (pCodecCtx_H264_MIPI_EN == NULL) {
                LOGE("avcodec_alloc_context3 failed!");
                break;
            }

            /* put sample parameters */
            pCodecCtx_H264_MIPI_EN->bit_rate = sub_width * sub_height * 3;
            /* resolution must be a multiple of two */
            pCodecCtx_H264_MIPI_EN->width = sub_width;
            pCodecCtx_H264_MIPI_EN->height = sub_height;
            /* frames per second */
            pCodecCtx_H264_MIPI_EN->time_base.num = 1;
            pCodecCtx_H264_MIPI_EN->time_base.den = 15;
            pCodecCtx_H264_MIPI_EN->gop_size = 10; /* emit one intra frame every ten frames */
            pCodecCtx_H264_MIPI_EN->pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_YUYV422;

            //H264 codec param
            //pCodecCtx->me_range = 16;
            //pCodecCtx->max_qdiff = 4;
            //pCodecCtx->qcompress = 0.6;
            pCodecCtx_H264_MIPI_EN->qmin = 10;
            pCodecCtx_H264_MIPI_EN->qmax = 51;
            //Optional Param
            pCodecCtx_H264_MIPI_EN->max_b_frames = 3;
            // Set H264 preset and tune 关键！！！！！！！否则花屏
            AVDictionary *param = 0;
            av_dict_set(&param, "preset", "ultrafast", 0);
            av_dict_set(&param, "tune", "zerolatency", 0);

            //打开编码器
            if (avcodec_open2(pCodecCtx_H264_MIPI_EN, pCodec_H264_MIPI_EN, &param) < 0) {
                LOGE("avcode open failed!");
                break;
            }

            H264_MIPI_INIT = true;
        }

        int got;
        packet_H264_MIPI.data = buf;
        packet_H264_MIPI.size = size;
        int ret = avcodec_decode_video2(pCodecCtx_H264_MIPI, pFrame_H264_MIPI, &got, &packet_H264_MIPI);  //decode to AV_PIX_FMT_YUVJ420P
        if (ret < 0) {
            LOGE("decodec error");
        }

        if (got) {
            //YUV420转换RGB24
            if (img_convert_ctx_H264_MIPI == NULL) {
                img_convert_ctx_H264_MIPI = sws_getContext(pCodecCtx_H264_MIPI->width,
                                                           pCodecCtx_H264_MIPI->height,
                                                           pCodecCtx_H264_MIPI->pix_fmt, sub_width,
                                                           sub_height, AV_PIX_FMT_YUVJ420P,
                                                           SWS_BICUBIC, NULL, NULL, NULL);
                out_buffer_H264_MIPI = new uint8_t[avpicture_get_size(AV_PIX_FMT_YUVJ420P,
                                                                      sub_width, sub_height)];
                avpicture_fill((AVPicture *) pFrameRGB_H264_MIPI, out_buffer_H264_MIPI,
                               AV_PIX_FMT_YUVJ420P, sub_width, sub_height);
            }
            pFrameRGB_H264_MIPI->width = sub_width;
            pFrameRGB_H264_MIPI->height = sub_height;
            sws_scale(img_convert_ctx_H264_MIPI, (const uint8_t *const *) pFrame_H264_MIPI->data,
                      pFrame_H264_MIPI->linesize, 0, pCodecCtx_H264_MIPI->height,
                      pFrameRGB_H264_MIPI->data, pFrameRGB_H264_MIPI->linesize);

            packet_H264_MIPI_EN.data = NULL;
            packet_H264_MIPI_EN.size = 0;
            av_init_packet(&packet_H264_MIPI_EN);
            //pFrameRGB_H264_MIPI->pict_type = AV_PICTURE_TYPE_I;
            ret = avcodec_encode_video2(pCodecCtx_H264_MIPI_EN, &packet_H264_MIPI_EN,
                                        pFrameRGB_H264_MIPI, &got);

            if (ret == 0) {
                buf = packet_H264_MIPI_EN.data;
                size = packet_H264_MIPI_EN.size;
                //上传视频
                rtp_videosend(id, buf, size, -1, 0);
                av_free_packet(&packet_H264_MIPI_EN);
            }
        }

    } else if (rtp_sendbuf_SPLX[id] == 0){
       // LOGI("走到这儿来了");
        rtp_videosend(id, buf, size, -1, 1);
        }

    jpeg_rwlock[id].write();
    if (jpeg_status[id] == 1) {
        uint8_t *cur_ptr = buf;
        int cur_size = 0;

        MP4ENC_NaluUnit nalu;
        int pos = 0, len = 0;
        while (len = MP4Encoder::ReadOneNaluFromBuf(cur_ptr, size, pos, nalu)) {
            if (nalu.type == 0x05) // I
            {
                // 抓取图片只抓I帧，其它会花？？
                cur_size = size;
                break;
            }
            pos += len;
        }

        int got;

        //LOGE("id = %d  jpeg_status[id] = %d  cur_size = %d", id, jpeg_status[id], cur_size);
        while (jpeg_status[id] == 1 && jpeg_workid[id].size() > 0 && cur_size > 0) {
            /* 返回解析了的字节数 */
            int len = av_parser_parse2(pCodecParserCtx[id], pCodecCtx[id], &packet[id].data,
                                       &packet[id].size, cur_ptr, cur_size,
                                       AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
            cur_ptr += len;
            cur_size -= len;
            if (packet[id].size == 0)
                continue;

            ret = avcodec_decode_video2(pCodecCtx[id], pFrame[id], &got, &packet[id]);
            if (ret < 0) {
                LOGE("decodec error");
                jpeg_rwlock[id].unlock();
                return;
            }

            if (got) {
                for (vector<jpegwork>::iterator it = jpeg_workid[id].begin();
                     it != jpeg_workid[id].end();) {
                    ret = MyWriteJPEG(pFrame[id], pCodecCtx[id]->width, pCodecCtx[id]->height,
                                      it->filename);
                    // LOGE("id = %d, jpeg_status[id] = %d, workid = %d", id, jpeg_status[id], it->workid);

                    NotifyCallJava(it, id, ret);

                    jpeg_workid[id].erase(it);
                    break;
                }

                //关闭编码
                if (jpeg_workid[id].size() == 0) {
                    av_free_packet(&packet[id]);
                    av_frame_free(&pFrame[id]);
                    //avpicture_free(&picture[id]);
                    //sws_freeContext(pSwsCtx[id]);
                    avcodec_free_context(&pCodecCtx[id]);
                    av_parser_close(pCodecParserCtx[id]);
                    jpeg_status[id] = 0;
                }
            }
        }
    }
    jpeg_rwlock[id].unlock();
}

JNIEXPORT void JNICALL
Java_com_hxrk_jni_Mp4v2Native_mipiSendH264Data(JNIEnv *env, jobject thiz,jint id, jbyteArray data, jint size, jint Frametype, jint streamtype){
    unsigned char *buf = (unsigned char *) env->GetByteArrayElements(data, JNI_FALSE);

    rtp_videosend(id, buf, size, Frametype, streamtype);
    //long long pdata = (long long)buf;
   // mp4packVideo( id, pdata,  size, false);
    env->ReleaseByteArrayElements(data, (jbyte *) buf, 0);
}

JNIEXPORT jlong JNICALL Java_com_hxrk_jni_Mp4v2Native_getmipiproc(JNIEnv *env, jobject thiz) {
    void (*fp)(int id, long long pdata, int len, bool mIsAudio);
    fp = mp4packVideo;
    return (long long) fp;
}


//添加视频帧的方法
JNIEXPORT jint JNICALL Java_com_hxrk_jni_Mp4v2Native_mp4packVideo(JNIEnv *env,
                                                                  jobject thiz, jint id,
                                                                  jlong pdata, jint size) {
    mp4packVideo(id, pdata, size, false);
    return SUCCESS_LOCAL;
}

JNIEXPORT jint JNICALL Java_com_hxrk_jni_Mp4v2Native_mp4packVideo2(JNIEnv *env,
                                                                   jobject thiz, jint id,
                                                                   jbyteArray data, jint size) {
    unsigned char *buf = (unsigned char *) env->GetByteArrayElements(data, JNI_FALSE);
    Java_com_hxrk_jni_Mp4v2Native_mp4packVideo(env, thiz, id, (jlong) buf, size);
    env->ReleaseByteArrayElements(data, (jbyte *) buf, 0);
    return SUCCESS_LOCAL;
}

//子码流
JNIEXPORT jint JNICALL Java_com_hxrk_jni_Mp4v2Native_mp4packVideo2sub(JNIEnv *env,
                                                                      jobject thiz, jint id,
                                                                      jbyteArray data, jint size) {
    unsigned char *buf = (unsigned char *) env->GetByteArrayElements(data, JNI_FALSE);
    //只上传视频，不存储
    if (rtp_sendbuf_SPLX[id] == 0)
        rtp_videosend(id, buf, size, -1, 0);
    env->ReleaseByteArrayElements(data, (jbyte *) buf, 0);
    return SUCCESS_LOCAL;
}

//添加音频帧的方法
JNIEXPORT jint JNICALL Java_com_hxrk_jni_Mp4v2Native_mp4packAudio2(JNIEnv *env,
                                                                   jobject thiz, jint id,
                                                                   jbyteArray data, jint size) {
    unsigned char *buf = (unsigned char *) env->GetByteArrayElements(data, JNI_FALSE);
    Java_com_hxrk_jni_Mp4v2Native_mp4packAudio(env, thiz, id, (jlong) buf, size);
    env->ReleaseByteArrayElements(data, (jbyte *) buf, 0);
    return SUCCESS_LOCAL;
}

//添加音频帧的方法
JNIEXPORT jint JNICALL Java_com_hxrk_jni_Mp4v2Native_mp4packAudio(JNIEnv *env,
                                                                  jobject thiz, jint id,
                                                                  jlong pdata, jint size) {
    return mp4packAudio(id, pdata, size);
}

//视频录制结束调用
JNIEXPORT jint JNICALL Java_com_hxrk_jni_Mp4v2Native_mp4close(JNIEnv *env,
                                                              jobject thiz, jint id) {
    //	MP4Close(hMp4file, 0);
    rwlock[id].write();
    mp4Encoders[id].CloseMP4File(hMp4files[id]);
    hMp4files[id] = NULL;
    rwlock[id].unlock();
    return 0;
}

void updateVideosub(int id) {
    //判断是否上传子码流
    if (id >= 0 && id < CAMERANUM_USB) {
        image_video[id].substream = (rtp_sendstatus[id] == 1 && rtp_streamtype[id] == 0 &&
                                     rtp_sendbuf_SPLX[id] == 0) ? true : false;
    }
}

//开始rtp数据上传
JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_RTPstart(JNIEnv *env, jobject thiz, jint id, jstring ip, jint port,
                                       jint udpport, jint datatype, jint streamtype, jint nettype,
                                       jint PTType, jint maxpk, jbyteArray cmd1209, jint is1078) {
    is1078device = is1078;
    const char *ip1 = env->GetStringUTFChars(ip, NULL);
    rtp_ip[id] = ip1;
    env->ReleaseStringUTFChars(ip, ip1);

    rtp_datatype[id] = datatype;
    rtp_streamtype[id] = streamtype;

    rtp_sendbuf_PTType[id] = PTType;
    rtp_isSending[id] == 0;

    if(id == 4) mipiDataisUploading = 1;


    //1209媒体注册信息
    unsigned char *buf = (unsigned char *) env->GetByteArrayElements(cmd1209, JNI_FALSE);
    rtp_Cmd1209Len[id] = env->GetArrayLength(cmd1209);
    if (rtp_Cmd1209[id] != NULL) {
        free(rtp_Cmd1209[id]);
        rtp_Cmd1209[id] = NULL;
    }
    rtp_Cmd1209[id] = (unsigned char *) malloc(rtp_Cmd1209Len[id]);
    memcpy(rtp_Cmd1209[id], buf, rtp_Cmd1209Len[id]);
    env->ReleaseByteArrayElements(cmd1209, (jbyte *) buf, 0);

    rtp_rwlock[id].write();
    if (rtp_Cmd1209Len[id] == 41) {
        //提取数据包中的终端ID号
        memcpy(rtp_sendbuf_ISU[id], rtp_Cmd1209[id] + 4, 6);

        rtp_sendbuf_TDH[id] = rtp_Cmd1209[id][36];
        rtp_sendbuf_SPLX[id] = rtp_Cmd1209[id][39];
//        LOGI("提取终端ID和通道号  终端ID = %s    rtp_sendbuf_TDH = %d     rtp_sendbuf_SPLX = %d     id = %d   ", rtp_sendbuf_ISU[id], rtp_sendbuf_TDH[id], rtp_sendbuf_SPLX[id], id);
    }

    rtp_sendbuf_nettype[id] = nettype == 0 ? NETTYPE_TCP : NETTYPE_UDP;
    //打开socket
    if (rtp_sendbuf_nettype[id] == NETTYPE_TCP) {
        rtp_port[id] = port;
        rtp_sock[id] = socket(AF_INET, SOCK_STREAM, 0);    //TCP连接
    } else {
        rtp_port[id] = udpport;
        rtp_sock[id] = socket(AF_INET, SOCK_DGRAM, 0);    //UDP连接
    }

    LOGI("RTP_start,id=%d",id);

    ///定义sockaddr_in
    memset(&servaddr[id], 0, sizeof(servaddr[id]));
    servaddr[id].sin_family = AF_INET;
    servaddr[id].sin_port = htons(rtp_port[id]);  ///服务器端口
    servaddr[id].sin_addr.s_addr = inet_addr(rtp_ip[id].c_str());  ///服务器ip

    //连接服务器，成功返回0，错误返回-1
    int ret = 0;
    if (rtp_sendbuf_nettype[id] != NETTYPE_TCP ||
        (ret = connect(rtp_sock[id], (struct sockaddr *) &servaddr[id], sizeof(struct sockaddr))) >=
        0) {
        LOGI("lyj 连接到RTP服务器 id:%d  网络类型:%s  网络数据包大小：%d  连接%s", id,
             rtp_sendbuf_nettype[id] == NETTYPE_TCP ? "TCP" : "UDP", maxpk, ret < 0 ? "失败" : "成功");
        rtp_sendbuf_max_1078[id] = maxpk;

        if (rtp_sendbuf[id] == NULL) {
            rtp_sendbuf_len[id] = 1426;
            rtp_sendbuf[id] = new char[rtp_sendbuf_len[id]];
        } else if (rtp_sendbuf_len[id] < 1426) {
            delete rtp_sendbuf[id];
            rtp_sendbuf_len[id] = 1426;
            rtp_sendbuf[id] = new char[rtp_sendbuf_len[id]];
        }

        int k = 0;
        //转义处理
        rtp_sendbuf[id][k++] = 0x7E;
        for (int i = 0; i < rtp_Cmd1209Len[id]; i++) {
            if (rtp_Cmd1209[id][i] == 0x7E) {
                rtp_sendbuf[id][k++] = 0x7D;
                rtp_sendbuf[id][k++] = 0x02;
            } else if (rtp_Cmd1209[id][i] == 0x7D) {
                rtp_sendbuf[id][k++] = 0x7D;
                rtp_sendbuf[id][k++] = 0x01;
            } else {
                rtp_sendbuf[id][k++] = rtp_Cmd1209[id][i];
            }
        }
        rtp_sendbuf[id][k++] = 0x7E;

        if (is1078 == 0){
        //多媒体注册
        int size = 0;
        if (rtp_sendbuf_nettype[id] == NETTYPE_TCP) {
            if ((size = send(rtp_sock[id], rtp_sendbuf[id], k, 0)) > 0 &&
                (size = recv(rtp_sock[id], rtp_sendbuf[id], 1426, 0)) > 0) {
                if (size >= 13 && rtp_sendbuf[id][0] == 0x7E && rtp_sendbuf[id][1] == 0x80 &&
                    rtp_sendbuf[id][2] == 0x01) {
                    rtp_pkuid[id] = 0;
                    rtp_sendstatus[id] = 1;
                    LOGI("lyj TCP实时音视频媒体通道注册成功");
                } else {
                    closertp(id);
                    LOGI("lyj TCP实时音视频媒体通道注册失败1");
                }
            } else {
                closertp(id);
                LOGI("lyj TCP实时音视频媒体通道注册失败2");
            }
        } else if (rtp_sendbuf_nettype[id] == NETTYPE_UDP) {
            socklen_t len = sizeof(struct sockaddr);
            if ((size = sendto(rtp_sock[id], rtp_sendbuf[id], k, 0,
                               (struct sockaddr *) &servaddr[id], sizeof(struct sockaddr))) > 0
                && (size = recvfrom(rtp_sock[id], rtp_sendbuf[id], 1426, 0,
                                    (struct sockaddr *) &servaddr[id], &len)) > 0) {
                if (size >= 13 && rtp_sendbuf[id][0] == 0x7E && rtp_sendbuf[id][1] == 0x80 &&
                    rtp_sendbuf[id][2] == 0x01) {
                    rtp_pkuid[id] = 0;
                    rtp_sendstatus[id] = 1;
                    LOGI("lyj UDP实时音视频媒体通道注册成功");
                } else {
                    closertp(id);
                    LOGI("lyj UDP实时音视频媒体通道注册失败1");
                }
            } else {
                closertp(id);
                LOGI("lyj UDP实时音视频媒体通道注册失败2");
                }
            }
        }
    } else {
        LOGI("lyj 连接到RTP服务器 id:%d  网络类型:%s  网络数据包大小：%d  连接%s", id, nettype == 0 ? "TCP" : "UDP", maxpk,
             ret < 0 ? "失败" : "成功");
        rtp_sendstatus[id] = 0;
    }
    rtp_rwlock[id].unlock();

    updateVideosub(id);
    return ret;
}


JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_RTPctrl(JNIEnv *env, jobject thiz, jint id, jint KZZL, jint YSPLX,
                                      jint MLLX) {
    if(id == 4) mipiDataisUploading = 0;
    if (KZZL == 0 && YSPLX == 0) {
        closertp(id);
    } else if (KZZL == 1) {
        rtp_streamtype[id] = MLLX == 1 ? 0 : 1;
    }
    updateVideosub(id);
    return 0;
}

void *rtp_file_thread(void *ptr) {
    int id = (long) ptr;
    MP4FileHandle mp4File = MP4Read(rtp_files[id].c_str());
    if (mp4File == NULL) {
        LOGI("lyj mp4 file read error  %s", rtp_files[id].c_str());
        return 0;
    }

    uint32_t trackId = MP4_INVALID_TRACK_ID;
    uint32_t numOfTracks = MP4GetNumberOfTracks(mp4File);

    // find video track
    for (uint32_t tmpTrackId = 1; tmpTrackId <= numOfTracks; tmpTrackId++) {
        const char *trackType = MP4GetTrackType(mp4File, tmpTrackId);
        if (MP4_IS_VIDEO_TRACK_TYPE(trackType)) {
            trackId = tmpTrackId;
            break;
        }
    }
    if (trackId != MP4_INVALID_TRACK_ID) {
        uint8_t *buf = (uint8_t *) malloc(655350);
        int index = 0;

        uint32_t numSamples = MP4GetTrackNumberOfSamples(mp4File, trackId);
        uint8_t *pSample = NULL;
        uint32_t sampleSize = 0;
        const char nalHeader[] = {0x00, 0x00, 0x00, 0x01};

        // read sps/pps
        uint8_t **seqheader;
        uint8_t **pictheader;
        uint32_t *pictheadersize;
        uint32_t *seqheadersize;
        uint32_t ix;
        MP4GetTrackH264SeqPictHeaders(mp4File, trackId, &seqheader, &seqheadersize, &pictheader,
                                      &pictheadersize);

        double fFrameRate = MP4GetTrackVideoFrameRate(mp4File, trackId);
        int mmJG = 1000 / fFrameRate;

        // read samples
        for (uint32_t sampleId = 1; sampleId <= numSamples; sampleId++) {
            if (rtp_sendbuf_SPLX[id] != 1)
                break;
            if (rtp_sendstatus[id] == 1 || rtp_file_sendstatus1078[id] == 1) {
                //正常播放
                if (!MP4ReadSample(mp4File, trackId, sampleId, &pSample, &sampleSize)) {
                    LOGI("lyj read sampleId %u error\n", sampleId);
                    if (pSample != NULL) {
                        free(pSample);
                        pSample = NULL;
                    }
                    break;
                } else {
                    index = 0;

                    int type = pSample[4] & 0x1f;
                    int Frametype = -1;

                    if (type == 0x07) // sps
                    {

                    } else if (type == 0x08) // pps
                    {

                    } else if (type == 0x05) // I
                    {
                        Frametype = 0;
                    } else if (type == 0x01)// P or B
                    {
                        Frametype = 1;
                    }

                    if (Frametype != -1) {
                        //fwrite(nalHeader, 4, 1, h264File);
                        memcpy(buf + index, nalHeader, 4);
                        index += 4;
                        for (ix = 0; seqheadersize[ix] != 0; ix++) {
                            //fwrite(seqheader[ix], seqheadersize[ix], 1, h264File);
                            memcpy(buf + index, seqheader[ix], seqheadersize[ix]);
                            index += seqheadersize[ix];
                        }

                        //fwrite(nalHeader, 4, 1, h264File);
                        memcpy(buf + index, nalHeader, 4);
                        index += 4;
                        for (ix = 0; pictheadersize[ix] != 0; ix++) {
                            //fwrite(pictheader[ix], pictheadersize[ix], 1, h264File);
                            memcpy(buf + index, pictheader[ix], pictheadersize[ix]);
                            index += pictheadersize[ix];
                        }

                        memcpy(buf + index, nalHeader, 4);
                        index += 4;
                        memcpy(buf + index, pSample + 4, sampleSize - 4);
                        index += (sampleSize - 4);

                        //fwrite(nalHeader, 4, 1, h264File);
                        //fwrite(pSample + 4, sampleSize - 4, 1, h264File);

                        //上传视频
                        LOGE("lyj ======rtp_vdieosend004====== Frametype:%d  id：%d  index=%d rtp_streamtype=%d\n",Frametype,id,index,rtp_streamtype[id]);
                        //rtp_videosend(id, buf, index, Frametype, rtp_streamtype[id]);
                        rtp_videosend(id, buf, index, Frametype, rtp_streamtype[id]);
                        usleep(1000 * mmJG);
                    }

                    if (pSample != NULL) {
                        free(pSample);
                        pSample = NULL;
                    }
                }
            } else if (rtp_sendstatus[id] == 2 || rtp_file_sendstatus1078[id] == 2) {
                int tik = 0;
                while (rtp_sendstatus[id] == 2) {
                    //暂停播放
                    tik++;
                    if (tik % 10 == 0) {
                        //发送心跳
                        rtp_videosend(id, NULL, 0, 0, rtp_streamtype[id]);
                    }
                    sleep(1);
                }
            } else {
                //停止
                break;
            }
        }

        //清理内存
        for (ix = 0; seqheadersize[ix] != 0; ix++) {
            free(seqheader[ix]);
        }
        free(seqheader);
        free(seqheadersize);
        for (ix = 0; pictheadersize[ix] != 0; ix++) {
            free(pictheader[ix]);
        }
        free(pictheader);
        free(pictheadersize);

        free(buf);
    } else {
        LOGI("lyj Can't find video track  %s\n", rtp_files[id].c_str());
    }

    if (mp4File != NULL) {
        MP4Close(mp4File);
        mp4File = NULL;
    }
    return 0;
}


JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_RTPfilestart(JNIEnv *env, jobject thiz, jint id, jstring ip,
                                           jint port, jint udpport, jint datatype, jint KJKT,
                                           jint streamtype, jint nettype, jint PTType, jint maxpk,
                                           jstring files, jbyteArray cmd1209,jint is1078) {
    if (is1078 == 1){
        closertp(id);   //保证每个id只留一个socket
    }
    is1078device = is1078;
    LOGI("lyj 准备去上传回放数据咯");
    rtp_fileKJKT[id] = KJKT;

    //停止上次传输线程
    rtp_sendstatus[id] = 0;
    pthread_join(rtp_thread_id[id], NULL);
    LOGI("lyj 去注册嘛");
    //注册媒体通道
    int ret = Java_com_hxrk_jni_Mp4v2Native_RTPstart(env, thiz, id, ip, port, udpport, datatype,
                                                     streamtype, nettype, PTType, maxpk, cmd1209,is1078);
    rtp_file_sendstatus1078[id] = 1;
    if (rtp_sendstatus[id] != 0 || is1078 == 1) {
        pthread_join(rtp_thread_id[id], NULL);
        const char *chars = env->GetStringUTFChars(files, NULL);
        rtp_files[id] = chars;
        env->ReleaseStringUTFChars(files, chars);

        LOGI("lyj RTP视频回放");
        int rets = pthread_create(&rtp_thread_id[id], NULL, rtp_file_thread, (void *) id);
        if (rets) {
            LOGI("lyj 创建RPT文件回放线程错误");
        } else {
            LOGI("创建RPT文件回放线程成功");
        }
    }
    return ret;
}

JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_RTPfilectrl(JNIEnv *env, jobject thiz, jint id, jint CSKZ,
                                          jint KJKT_BS, jstring TDHFWZ) {
    if(id == 4) mipiDataisUploading = 0;
    if (CSKZ == 0) {
        LOGI("RPT文件回放开始");
        if (rtp_sendstatus[id] == 2)
            rtp_sendstatus[id] = 1;
    } else if (CSKZ == 1) {
        LOGI("RPT文件回放暂停");
        rtp_sendstatus[id] = 2;
    } else if (CSKZ == 2) {
        LOGI("RPT文件回放停止");
        rtp_sendstatus[id] = 0;
        rtp_file_sendstatus1078[id] = 0;
    }
    return SUCCESS_LOCAL;
}

JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_jpeginit(JNIEnv *env, jobject thiz, jint id, jint w, jint h) {

    jpeg_rwlock[id].write();

    jpeg_rwlock[id].unlock();

    //// 确定图片尺寸
    //PictureSize = avpicture_get_size(AV_PIX_FMT_YUVJ420P, pCodecCtx->width, pCodecCtx->height);
    //outBuff = (uint8_t*)av_malloc(PictureSize);
    //if( outBuff == NULL ) {
    //	printf("av malloc failed!\n");
    //	exit(1);
    //}
    //avpicture_fill((AVPicture *)pFrameRGB, outBuff, PIX_FMT_YUVJ420P, pCodecCtx->width, pCodecCtx->height);

    ////设置图像转换上下文
    //pSwsCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
    //	pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUVJ420P,
    //	SWS_BICUBIC, NULL, NULL, NULL);

    //int i = 0;
    //while( av_read_frame(pFormatCtx, &packet) >= 0 ) {
    //	if( packet.stream_index == videoStream ) {
    //		avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

    //		if( frameFinished ) {
    //			// 保存为jpeg时不需要反转图像
    //			static bool b1 = true;
    //			if( b1 ) {
    //				MyWriteJPEG(pFrame, pCodecCtx->width, pCodecCtx->height, i ++);

    //				b1 = false;
    //			}

    //			//SaveAsBMP(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i ++, 24);
    //		}
    //	} else {
    //		int a=2;
    //		int b=a;
    //	}

    //	av_free_packet(&packet);
    //}

    //sws_freeContext(pSwsCtx);

    //av_free(pFrame);
    //av_free(pFrameRGB);
    //avcodec_close(pCodecCtx);
    //avformat_close_input(&pFormatCtx);
    return SUCCESS_LOCAL;
}

JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_jpegsave(JNIEnv *env, jobject thiz, jint id, jint workid,
                                       jstring filename) {

    jclass cls = env->GetObjectClass(thiz);
    jmethodID callback = env->GetMethodID(cls, "jpegcallback", "(Ljava/lang/String;III)V");

    jpegwork jw;
    jw.callback = callback;
    jw.workid = workid;
    jw.jobj = env->NewGlobalRef(thiz);
    const char *ch = env->GetStringUTFChars(filename, NULL);
    jw.filename = ch;
    env->ReleaseStringUTFChars(filename, ch);
    jpeg_rwlock[id].write();
    jpeg_workid[id].push_back(jw);

    if (jpeg_status[id] == 0) {
        //注册编解码器
        av_register_all();

        // 寻找解码器
        pCodec[id] = avcodec_find_decoder(AV_CODEC_ID_H264);
        if (pCodec[id] == NULL) {
            LOGE("avcode find decoder failed!\n");
            return -1;
        }

        pCodecCtx[id] = avcodec_alloc_context3(pCodec[id]);

        //初始化AVCodecParserContext
        pCodecParserCtx[id] = av_parser_init(AV_CODEC_ID_H264);
        if (!pCodecParserCtx[id]) {
            LOGE("AVCodecParseContext error");
            return -1;
        }

        /* we do not send complete frames */
        if (pCodec[id]->capabilities & AV_CODEC_CAP_TRUNCATED)
            pCodecCtx[id]->flags |= AV_CODEC_FLAG_TRUNCATED;

        //打开解码器
        if (avcodec_open2(pCodecCtx[id], pCodec[id], NULL) < 0) {
            LOGE("avcode open failed!\n");
            return -1;
        }
        jpeg_status[id] = 1;
    }

    LOGE("id = %d, jpeg_status[id] = %d, workid = %d, filename = %s", id, jpeg_status[id], workid,
         filename);

    //为每帧图像分配内存
    pFrame[id] = av_frame_alloc();
    av_init_packet(&packet[id]);
    packet[id].size = 0;
    packet[id].data = NULL;
    jpeg_rwlock[id].unlock();
    return SUCCESS_LOCAL;
}

JNIEXPORT void JNICALL Java_com_hxrk_jni_Mp4v2Native_jpegclose(JNIEnv *env, jobject thiz, jint id) {
    jpeg_rwlock[id].write();
    if (jpeg_status[id] == 1) {
        av_free_packet(&packet[id]);
        av_frame_free(&pFrame[id]);
        //avpicture_free(&picture[id]);
        //sws_freeContext(pSwsCtx[id]);
        avcodec_free_context(&pCodecCtx[id]);
        av_parser_close(pCodecParserCtx[id]);
        jpeg_status[id] = 0;
    }
    jpeg_rwlock[id].unlock();
}

int Version = 1;

JNIEXPORT jint JNICALL Java_com_hxrk_jni_Mp4v2Native_GetMp4v2Version(JNIEnv *env, jobject thiz) {
    return Version;
}

JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_videoopen(JNIEnv *env, jobject thiz, jint w, jint h) {
    char filestr[50];
    vector<string> videostrs;
    int fb = 0, i, j, k;
    bool flag = false;
    int ret = ERROR_LOCAL;

    //回调
    jclass cls = env->GetObjectClass(thiz);
    jmethodID callback = env->GetMethodID(cls, "videocallback2",
                                          "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;IIIIII)V");

    for (i = 0; i < CAMERANUM_USB; i++) {
        if (image_video[i].fd == -1) {
            flag = true;
            break;
        }
    }
    //LOGE("CECarDvr jni usb video open flag:%d",flag);
    if (flag) {
        videostrs.clear();
        for (i = 0; i < 10; i++) {
            sprintf(filestr, "/dev/video%d", i);
            if ((fb = open(filestr, O_RDONLY)) > 0) {
                videostrs.push_back(filestr);
                close(fb);
            }
        }

        string selvideostr;

        if(videostrs.size() == 0){
            env->CallVoidMethod(thiz, callback, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
            return ERROR_LOCAL;
        }

        //获取未使用设备节点
        for (k = 0; k < videostrs.size(); k++) {
            flag = false;
            //判断设备是否已打开
            for (j = 0; j < CAMERANUM_USB; j++) {
                if (image_video[j].fd != -1 &&
                    !strcmp(videostrs.at(k).c_str(), image_video[j].dev_name.c_str())) {
                    flag = true;
                    break;
                }
            }

            //LOGE("CECarDvr jni usb video open flag1--------->:%d",flag);
            if (!flag) {
                selvideostr = videostrs.at(k);
                //打开设备
                for (i = 0; i < CAMERANUM_USB; i++) {
                    if (image_video[i].fd == -1) {
                        rwlock_video[i].write();

                        image_video[i].width = w;
                        image_video[i].height = h;

                        ret = image_video[i].opendevice(selvideostr);

                        if (ret == SUCCESS_LOCAL) {
                            ret = image_video[i].initdevice();
                        }

                        if (ret == SUCCESS_LOCAL) {
                            ret = image_video[i].startcapturing();
                        }

                        if (ret == SUCCESS_LOCAL) {
                            if (image_video[i].rgb == NULL)
                                image_video[i].rgb = (int *) malloc(sizeof(int) *
                                                                    (image_video[i].fmt.fmt.pix.width *
                                                                     image_video[i].fmt.fmt.pix.height));

                            jstring str1 = env->NewStringUTF(selvideostr.c_str());
                            char codes[20];
                            sprintf(codes, "%c%c%c%c",
                                    image_video[i].fmt.fmt.pix.pixelformat & 0xFF,
                                    (image_video[i].fmt.fmt.pix.pixelformat >> 8) & 0xFF,
                                    (image_video[i].fmt.fmt.pix.pixelformat >> 16) & 0xFF,
                                    (image_video[i].fmt.fmt.pix.pixelformat >> 24) & 0xFF);
                            //jstring str2 = env->NewStringUTF((char *)image_video[i].fmtdesc.description);
                            jstring str2 = env->NewStringUTF(codes);
                            char chars[20];
                            sprintf(chars, "%s", image_video[i].cap.card);
                            jstring cardName = env->NewStringUTF(chars);

                            //sprintf(chars, "Driver Version:%u.%u.%u", (image_video[i].cap.version>>16)&0xFF, (image_video[i].cap.version>>8)&0xFF, image_video[i].cap.version&0xFF);

                            LOGE("CECarDvr ============================0000001 init USB==========================");
                            env->CallVoidMethod(thiz, callback, str1, str2, cardName, i,
                                                image_video[i].fmt.fmt.pix.width,
                                                image_video[i].fmt.fmt.pix.height,
                                                image_video[i].setfps.parm.capture.timeperframe.numerator,
                                                image_video[i].setfps.parm.capture.timeperframe.denominator,
                                                image_video[i].cap.version);
                        }

                        if (ret != SUCCESS_LOCAL){
                            LOGE("CECarDvr ============================0000001closeUSB==========================");
                            image_video[i].closedevice();
                            image_video[i].fd = -1;
                            env->CallVoidMethod(thiz, callback, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
                        }
                        rwlock_video[i].unlock();
                        break;
                    }
                }
            }
        }
    } else {
        ret = SUCCESS_LOCAL;
    }
    return ret;
}


SwsContext *img_convert_ctx_BMP;
uint8_t *out_buffer_BMP;
int out_buffer_BMPSize;
AVFrame *pFrameRGB_BMP;
AVFrame *pFrame420;

AVFrame *m_pFrameBGR;                // 人脸识别用
SwsContext *m_ConvertCtxYUV2BGR;
uint8_t *m_bgrBuffer;
size_t m_bgrBufferSize;


JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_videoread(JNIEnv *env, jobject thiz, jint id, jobject bitmap1,
                                        jbyteArray yuvdata, jbyteArray subyuvdata,
                                        jbooleanArray rtpstatus) {
    if (id >= 0 && id < CAMERANUM_USB && image_video[id].fd != -1) {
        rwlock_video[id].write();
        if (image_video[id].readframeonce() == SUCCESS_LOCAL) {

            if (image_video[id].fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_H264) {
                //回调
                jclass cls = env->GetObjectClass(thiz);
                jmethodID callback = env->GetMethodID(cls, "videocallback3", "(IJI)V");
                env->CallVoidMethod(thiz, callback, id,
                                    (long long) image_video[id].readbuffer.start,
                                    image_video[id].readbuffer.length);
            } else if (image_video[id].fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
                unsigned char *buf = (unsigned char *) env->GetByteArrayElements(yuvdata,
                                                                                 JNI_FALSE);
                int i, j, k;

                if (image_video[id].pFrame->format == AV_PIX_FMT_YUVJ422P) {
                    // 422 to 420
                    //double dur;
                    //clock_t start,end;
                    //start = clock();

                    jint in_width = image_video[id].pFrame->width;
                    jint in_height = image_video[id].pFrame->height;

                    const uint8_t *src_y =
                            image_video[id].pFrame->data[0] + image_video[id].pFrame->linesize[0];
                    int src_stride_y = in_width;
                    const uint8_t *src_u =
                            image_video[id].pFrame->data[1] + image_video[id].pFrame->linesize[1];
                    int src_stride_u = in_width / 2;
                    const uint8_t *src_v =
                            image_video[id].pFrame->data[2] + image_video[id].pFrame->linesize[2];
                    int src_stride_v = in_width / 2;

                    libyuv::ConvertI422ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v,
                                              src_stride_v,
                                              (uint8_t *) buf, in_width,
                                              (uint8_t *) buf + (in_width * in_height),
                                              in_width / 2,
                                              (uint8_t *) buf + (in_width * in_height * 5 / 4),
                                              in_width / 2, in_width, in_height);

                    //end = clock();
                    //dur = (double)(end - start);
                    //LOGE("422 to 420 time = %lf", dur);

                } else if (image_video[id].pFrame->format == AV_PIX_FMT_YUVJ420P) {
                    //LOGE("is 420");
                    for (i = 0; i < image_video[id].pFrame->height; i++) {
                        memcpy(buf + image_video[id].pFrame->width * i,
                               image_video[id].pFrame->data[0] +
                               image_video[id].pFrame->linesize[0] * i,
                               image_video[id].pFrame->width);
                    }
                    for (j = 0; j < image_video[id].pFrame->height / 2; j++) {
                        memcpy(buf + image_video[id].pFrame->width * i +
                               image_video[id].pFrame->width / 2 * j,
                               image_video[id].pFrame->data[1] +
                               image_video[id].pFrame->linesize[1] * j,
                               image_video[id].pFrame->width / 2);
                    }
                    for (k = 0; k < image_video[id].pFrame->height / 2; k++) {
                        memcpy(buf + image_video[id].pFrame->width * i +
                               image_video[id].pFrame->width / 2 * j +
                               image_video[id].pFrame->width / 2 * k,
                               image_video[id].pFrame->data[2] +
                               image_video[id].pFrame->linesize[2] * k,
                               image_video[id].pFrame->width / 2);
                    }
                }

                env->ReleaseByteArrayElements(yuvdata, (jbyte *) buf, 0);


                if (image_video[id].substream && rtp_sendstatus[id] == 1 && rtp_datatype[id] == 0) {
                    buf = (unsigned char *) env->GetByteArrayElements(subyuvdata, JNI_FALSE);
                    for (i = 0; i < image_video[id].pFrameRGB->height; i++) {
                        memcpy(buf + image_video[id].pFrameRGB->width * i,
                               image_video[id].pFrameRGB->data[0] +
                               image_video[id].pFrameRGB->linesize[0] * i,
                               image_video[id].pFrameRGB->width);
                    }
                    for (j = 0; j < image_video[id].pFrameRGB->height / 2; j++) {
                        memcpy(buf + image_video[id].pFrameRGB->width * i +
                               image_video[id].pFrameRGB->width / 2 * j,
                               image_video[id].pFrameRGB->data[1] +
                               image_video[id].pFrameRGB->linesize[1] * j,
                               image_video[id].pFrameRGB->width / 2);
                    }
                    for (k = 0; k < image_video[id].pFrameRGB->height / 2; k++) {
                        memcpy(buf + image_video[id].pFrameRGB->width * i +
                               image_video[id].pFrameRGB->width / 2 * j +
                               image_video[id].pFrameRGB->width / 2 * k,
                               image_video[id].pFrameRGB->data[2] +
                               image_video[id].pFrameRGB->linesize[2] * k,
                               image_video[id].pFrameRGB->width / 2);
                    }

                    env->ReleaseByteArrayElements(subyuvdata, (jbyte *) buf, 0);

                    jboolean *intbuf = (jboolean *) env->GetBooleanArrayElements(rtpstatus,
                                                                                 JNI_FALSE);
                    intbuf[0] = image_video[id].substream;
                    env->ReleaseBooleanArrayElements(rtpstatus, (jboolean *) intbuf, 0);
                }
            }

        } else {
            rwlock_video[id].unlock();
            LOGE("=========================0000002closeUSB=========================");
            closeVideoLoop();
            return ERROR_LOCAL;
        }
        rwlock_video[id].unlock();
    }
    return SUCCESS_LOCAL;
}

void closeVideoLoop() {
    for (int i = 0; i < CAMERANUM_USB; i++) {
        rwlock_video[i].write();
        if (image_video[i].fd != -1) {
            if (image_video[i].readframeonce() != SUCCESS_LOCAL) {
                image_video[i].stopcapturing();
                image_video[i].uninitdevice();
                image_video[i].closedevice();
                image_video[i].fd = -1;
                if (image_video[i].rgb != NULL) {
                    free(image_video[i].rgb);
                    image_video[i].rgb = NULL;
                }
                LOGE("colse video %d", i);
            }
        }
        rwlock_video[i].unlock();
    }
}

JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_videowatermark(JNIEnv *env, jobject thiz, jint id, jint group,
                                             jstring info) {
    if (id >= 0 && id < CAMERANUM_USB && image_video[id].fd != -1) {
        string sinfo;
        const char *ch = env->GetStringUTFChars(info, NULL);
        sinfo = ch;
        env->ReleaseStringUTFChars(info, ch);
        image_video[id].setWatermark(group, const_cast<char *>(sinfo.c_str()));
    }
    return SUCCESS_LOCAL;
}

JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_videoclose(JNIEnv *env, jobject thiz, jint id) {

    LOGE("===========================000000003closeUSB===========================");
    rwlock_video[id].write();
    if (image_video[id].fd != -1) {
        image_video[id].stopcapturing();
        image_video[id].uninitdevice();
        image_video[id].closedevice();
        image_video[id].fd = -1;
        LOGE("colse video %d", id);
    }
    rwlock_video[id].unlock();

    return SUCCESS_LOCAL;
}


JNIEXPORT jint JNICALL
Java_com_hxrk_jni_Mp4v2Native_videowatermarkex(JNIEnv *env, jobject thiz, jint id, jint index,
                                               jstring params, jint x, jint y, jint s) {
    rwlock_video[id].write();
    if (id >= 0 && id < CAMERANUM_USB && image_video[id].fd != -1) {
        string sinfo;
        const char *ch = env->GetStringUTFChars(params, NULL);
        sinfo = ch;
        env->ReleaseStringUTFChars(params, ch);
        image_video[id].setWatermark(index, sinfo, x, y, s);
    }
    rwlock_video[id].unlock();
    return SUCCESS_LOCAL;
}


/************************************************************************/
/* 向人脸识别库发送图像数据帧线程                                                                  */
/************************************************************************/
void *face_thread(void *arg) {
    Threadpara *pstru;
    pstru = (struct Threadpara *) arg;

    while (nativeGetFaceNumber != NULL) {
        if (BGRBufferList.size() >= 1) {
            BGRBuffer img = BGRBufferList[0];
            if (faceStatus == 1) {

                LOGE("BGRBufferList 取图像 并往识别库发送人脸识别图像帧   img.start = %d  length = %d img.width = %d img.height = %d",
                     img.bgrBuffer, img.bgrBufferSize, img.width, img.height);
                nativeGetFaceNumber(pstru->env, pstru->thiz, (long long) img.bgrBuffer, img.width,
                                    img.height, img.bgrBufferSize);
            }
            BGRBufferList.erase(BGRBufferList.begin()); //移出输入
            LOGE("BGRBufferList 释放拷贝帧");
            delete[] img.bgrBuffer;
            img.bgrBuffer = NULL;
        }
        usleep(1);
    }
    return 0;
}


/************************************************************************/
/*                                                                  */
/************************************************************************/
JNIEXPORT void JNICALL
Java_com_hxrk_jni_Mp4v2Native_setNativeDrawFrame(JNIEnv *env, jobject thiz, jlong pf) {
    NativeDrawFrame = (void (*)(JNIEnv *env, jobject thiz, jlong addrRgba, jint w, jint h)) pf;
}


/************************************************************************/
/*                                                                  */
/************************************************************************/
JNIEXPORT void JNICALL
Java_com_hxrk_jni_Mp4v2Native_setNativeGetFaceNumber(JNIEnv *env, jobject thiz, jlong pf) {
    nativeGetFaceNumber = (void (*)(JNIEnv *env, jobject thiz, jlong addrBGR, jint w, jint h,
                                    jint len)) pf;
}


/************************************************************************/
/* 开始、停止人脸识别                                                                     */
/************************************************************************/
JNIEXPORT void JNICALL
Java_com_hxrk_jni_Mp4v2Native_faceStart(JNIEnv *env, jobject thiz, jint status, jint bFlag) {
    faceStatus = status;

    if (faceStatus == 1) {
        //停止上次传输线程
        pthread_join(face_threadid, NULL);
        struct Threadpara pstru;
        pstru.env = env;
        pstru.thiz = thiz;
        int ret = pthread_create(&face_threadid, NULL, face_thread, &pstru);
        if (ret) {
            LOGI("创建人脸识别线程错误");
        } else {
            LOGI("创建人脸识别线程成功");
        }

    } else {
        // 停止线程
        //pthread_join(face_threadid, NULL);
        LOGI("停止人脸识别线程");
    }
}

void JNICALL Java_com_hxrk_jni_Mp4v2Native_setParams(JNIEnv *env, jobject thiz, jint cameraid,
                                                     jint channelNumber, jint sim_1,
                                                     jint sim_2, jint sim_3, jint sim_4, jint sim_5,
                                                     jint sim_6) {
    rtp_sendbuf_TDH[cameraid] = channelNumber;
    rtp_sim1 = sim_1;
    rtp_sim2 = sim_2;
    rtp_sim3 = sim_3;
    rtp_sim4 = sim_4;
    rtp_sim5 = sim_5;
    rtp_sim6 = sim_6;
}

int writeRTPData(const char *path, unsigned char *data, unsigned int len) {
    int ret = 0, error = errno;
    FILE *fd = NULL;
    if (data == NULL || path == NULL) {
        LOGE("Path or data is NULL\n");
        return -1;
    }

    if ((fd = fopen(path, "a+")) == NULL) {
        LOGE("Open file %s fail\n", path);
        return ret;
    }

    errno = 0;
    // ret =fwrite(data, strlen(data), 1, fd);
    ret = fwrite(data, len, 1, fd);
    if (ret == 1) printf("file %s write OK\n", path);
    else {
        if (errno > 0) perror("write error");
    }

    fclose(fd);
    errno = error;
    return ret;
}