//
// Created by admin on 2022-06-10.
//
#include <jni.h>
#include "ffmpeg.h"
#include <android/log.h>
#include <pthread.h>
#include <unistd.h>

JavaVM *g_VM;
int mNeedDetach;
void ffmpeg::init() {
    av_register_all();
    avformat_network_init();
    //注册各大组件
    LOGI("注册成功");



}


ffmpeg::~ffmpeg() {

}

ffmpeg::ffmpeg() {

}


void ffmpeg::logInfo(State state) {
    LOGI("通道%d,流地址%s,状态%d", channel,pUrl,state);
}
void ffmpeg::logInfo(State state, char *msg) {
    LOGI("通道%d,流地址%s,状态%d,信息%s", channel,pUrl,state,msg);
}
void ffmpeg::logError(State state) {
    LOGE("通道%d,流地址%s,状态%d", channel,pUrl,state);
}
void ffmpeg::logError(State state, char *msg) {
    LOGE("通道%d,流地址%s,状态%d,信息%s", channel,pUrl,state,msg);
}
//初始化变量
void ffmpeg::reset() {
    swsContext = NULL;
    out_buffer = NULL;
    packet = NULL;
    frame = NULL;
    rgb_frame = NULL;
    avCodecContextVideo = NULL;
    avCodecContextAudio = NULL;
    avCodec = NULL;
    avdic = NULL;
    avFormatContext = NULL;

    video_index = -1;
    audio_index = -1;
    input_runner = {0, 0, channel};


    char option_key[]="rtsp_transport";
    char option_value[]="tcp";
    av_dict_set(&avdic,option_key,option_value,0);
    char option_key2[]="max_delay";
    char option_value2[]= "5000";
    av_dict_set(&avdic,option_key2,option_value2,0);
    av_dict_set(&avdic, "bufsize", "1024000", 0); // "131072" 默认大小是65536 ，这里可以将它扩大10倍看看，我扩大的2倍也可以
    av_dict_set(&avdic, "probesize", "512000", 0);
    av_dict_set(&avdic,"max_analyze_duration","5000000",0);
    //av_dict_set(&avdic, "timeout", "8000000", 0);
    av_dict_set(&avdic, "profile", "baseline", 0);
    av_dict_set(&avdic, "tune", "zerolatency", 0);
    av_dict_set(&avdic, "preset", "ultrafast", 0);
    //AVInputFormat* avInputFormat = av_find_input_format("h264");

    avFormatContext = avformat_alloc_context();//获取上下文
    logInfo(STATE_ALLOC_CONTEXT);
//    AVInputFormat* iformat = av_find_input_format("mpeg4");
    avFormatContext->interrupt_callback.callback = interrupt_callback;
    avFormatContext->interrupt_callback.opaque = &input_runner;

//    avFormatContext->probesize=500*1024;
//    avFormatContext->max_analyze_duration=5*AV_TIME_BASE;

}

void ffmpeg::openStream() {
    while (!bClose) {
        // 把接口传进去，或者保存在一个结构体里面的属性， 进行传递也可以
//        pthread_t ntid1 =  pthread_create(&ntid1, NULL, reinterpret_cast<void *(*)(void *)>(download()), NULL);
        reset();
        logInfo(STATE_INIT);
        //连接流
        int ret = -1;
        do {
            ret = connect();
        } while (ret != SUCCESS && !bClose);

        do {
            ret = prepareDecoder();
        } while (ret != SUCCESS && !bClose);

        allocPacket();
        while (!bClose && readFrameError <= 100) {
            decode();
            av_free_packet(packet);
        }
    }
    release();
};

void ffmpeg::openThread() {
    pthread_t ntid;
//    int ret = pthread_create(&ntid, NULL, openStream, this);
//    pthread_create(&ntid, NULL, reinterpret_cast<void *(*)(void *)>(openStream()), NULL);
    LOGI("启动线程%d",ntid);
}
//连接
int ffmpeg::connect() {
    //设置avformat_open_input超时判断
    input_runner.lasttime = time(NULL);
    input_runner.connected = false;

    int error = -1;
    if(!bClose && ((error = avformat_open_input(&avFormatContext, pUrl, NULL, &avdic)) >= 0)) {
        input_runner.connected = true;
        logInfo(STATE_OPEN_INPUT);
    } else {
        if(!bClose) {
            char buf[] = "";
            av_strerror(error, buf, 1024);
            bzero(buffer_log, sizeof(buffer_log)); //buffer置零
            sprintf(buffer_log,"打开流失败,错误码%d,%s",error,buf);
            logError(STATE_OPEN_INPUT,buffer_log);
        }
        return STATE_OPEN_INPUT;
    }

    return SUCCESS;


}
int ffmpeg::prepareDecoder() {
    if (bClose || avformat_find_stream_info(avFormatContext, NULL) < 0) {
        logError(STATE_FIND_STREAM_INFO);
        return STATE_FIND_STREAM_INFO;
    } else {
        logInfo(STATE_FIND_STREAM_INFO);
    }
    //找到视频音频帧
    for (int i = 0; i < avFormatContext->nb_streams; ++i) {
        if (avFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_index = i;
            avCodecContextVideo = avFormatContext->streams[video_index]->codec;
        }
        else if (avFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            audio_index = i;
            avCodecContextAudio = avFormatContext->streams[audio_index]->codec;
        }
    }

    if(video_index == -1) {
        logError(STATE_FIND_VIDEO_INDEX);
        return STATE_FIND_VIDEO_INDEX;
    } else {
        logInfo(STATE_FIND_VIDEO_INDEX);
    }

//    LOGI("%d",avCodecContext->pix_fmt);
    if(avCodecContextVideo->pix_fmt == AV_PIX_FMT_NONE){
        logError(STATE_FIND_VIDEO_INDEX);
        return STATE_FIND_VIDEO_INDEX;
    }

    //获取解码器
    if(!bClose &&  (avCodec = avcodec_find_decoder(avCodecContextVideo->codec_id))) {
        logInfo(STATE_FIND_DECODER);
    } else {
        logError(STATE_FIND_DECODER);
        return STATE_FIND_DECODER;
    }

    //打开解码器
    if (!bClose && avcodec_open2(avCodecContextVideo, avCodec, NULL) == 0) {
        logInfo(STATE_AVCODEC_OPEN2);
    } else {
        logError(STATE_AVCODEC_OPEN2);
        return STATE_AVCODEC_OPEN2;
    }
    return SUCCESS;
}
void ffmpeg::allocPacket() {
    //申请AVPacket
    packet = (AVPacket *) av_malloc(sizeof(AVPacket));
    //申请AVFrame
    frame = av_frame_alloc();//分配一个AVFrame结构体,AVFrame结构体一般用于存储原始数据，指向解码后的原始帧
    rgb_frame = av_frame_alloc();//分配一个AVFrame结构体，指向存放转换成rgb后的帧
    //缓存区
    out_buffer= (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_RGBA,avCodecContextVideo->width,avCodecContextVideo->height));
    //与缓存区相关联，设置rgb_frame缓存区
    avpicture_fill((AVPicture *)rgb_frame,out_buffer,AV_PIX_FMT_RGBA,avCodecContextVideo->width,avCodecContextVideo->height);
    swsContext = sws_getContext(avCodecContextVideo->width,avCodecContextVideo->height,avCodecContextVideo->pix_fmt,
                                avCodecContextVideo->width,avCodecContextVideo->height,AV_PIX_FMT_RGBA,
                                SWS_BICUBIC,NULL,NULL,NULL);
    readFrameError = 0;
}
//开始解码
int ffmpeg::decode() {

    logInfo(STATE_READ_FRAME);
    input_runner.lasttime = time(NULL);
    input_runner.connected = false;
    int state = -1;
    state = av_read_frame(avFormatContext, packet);
    if(state < 0) {
        readFrameError++;
        input_runner.connected = false;
        return STATE_READ_FRAME;
    }
    readFrameError = 0;
    input_runner.connected = true;

    //解码视频和音频
    if(packet->stream_index == video_index) {
        int got_picture_ptr;
        avcodec_decode_video2(avCodecContextVideo, frame, &got_picture_ptr, packet);
        if(!got_picture_ptr) {
            LOGE("avcodec_decode_video2 error")
            logError(STATE_DECODE_VIDEO2);
            av_free_packet(packet);
            return STATE_DECODE_VIDEO2;
        } else {
            LOGI("avcodec_decode_video2 success")
            if(nativeWindow) {
                drawVideo();
            }

        }

    } else if(packet -> stream_index == audio_index) {

    } else {

    }
    return SUCCESS;
}
bool ffmpeg::drawVideo() {
    LOGI("drawVideo start")
    if(ANativeWindow_setBuffersGeometry(nativeWindow, avCodecContextVideo->width,avCodecContextVideo->height,WINDOW_FORMAT_RGBA_8888) != 0) {
        logError(STATE_SET_BUFFERS_GEOMETRY);
        return false;
    }
    int lock_ANativeWindow;
    //上锁
    lock_ANativeWindow = ANativeWindow_lock(nativeWindow, &native_outBuffer, NULL);
    //上锁失败
    if(lock_ANativeWindow != 0) {
        logError(STATE_ANATIVE_WINDOW_LOCK);
        return false;
    }

    //转换为rgb格式
    sws_scale(swsContext, (const uint8_t *const *) frame->data, frame->linesize,0,frame->height, rgb_frame->data,rgb_frame->linesize);
    uint8_t *dst = (uint8_t *) native_outBuffer.bits;
    //拿到一行有多少个字节 RGBA
    int destStride = native_outBuffer.stride * 4;
    //像素数据的首地址
    uint8_t *src = rgb_frame->data[0];
    //实际内存一行数量
    int srcStride = rgb_frame->linesize[0];
    for (int i = 0; i < avCodecContextVideo->height; ++i) {
        //memcpy(void *dest, const void *src, size_t n)
        //将rgb_frame中每一行的数据复制给nativewindow
        memcpy(dst + i * destStride, src + i * srcStride, srcStride);
    }
    //解锁
    ANativeWindow_unlockAndPost(nativeWindow);
    return true;

}

//avformat_open_input连接超时判断
int ffmpeg::interrupt_callback(void *p) {
    Runner *r = (Runner *)p;
    if(r->lasttime > 0) {
        //超时五秒
        if(time(NULL) - r->lasttime > 5 && !r->connected) {
            LOGE("超时 %d",r->channel);
            return AVERROR_EOF;
        }
    }
    return 0;
}
void ffmpeg::release(){
    if(nativeWindow){
        ANativeWindow_release(nativeWindow);
    }
    if(out_buffer) {
        free(out_buffer);
    }
    if(packet != nullptr) {
        av_packet_unref(packet);
    }
    if(avdic != nullptr) {
        av_dict_free(&avdic);
    }
    if(frame != nullptr) {
        av_frame_free(&frame);
    }
    if(rgb_frame != nullptr) {
        av_frame_free(&rgb_frame);
    }

    if(swsContext) {
        sws_freeContext(swsContext);
    }

    if(avCodecContextVideo != nullptr) {
        avcodec_close(avCodecContextVideo);
    }
    if(avCodecContextAudio != nullptr) {
        avcodec_close(avCodecContextAudio);
    }
    if(avFormatContext != nullptr) {
        avformat_close_input(&avFormatContext);
    }
    env->ReleaseStringUTFChars(url, pUrl);

    env->DeleteGlobalRef(callback);
    callback = NULL;
    LOGI("%d RTSP deconstruction end", channel);
}


//在此处跑在子线程中，并回调到java层
void * ffmpeg::download() {
    if(callback == NULL) return NULL;

//    //获取当前native线程是否有没有被附加到jvm环境中
//    int getEnvStat = g_VM->GetEnv((void **)env,JNI_VERSION_1_6);
//    if (getEnvStat == JNI_EDETACHED) {
//        //如果没有， 主动附加到jvm环境中，获取到env
//        if (g_VM->AttachCurrentThread(&env, NULL) != 0) {
//            return;
//        }
//        mNeedDetach = JNI_TRUE;
//    }
//

    //通过强转后的jcallback 获取到要回调的类
    jclass javaClass = env->GetObjectClass(callback);

    if (javaClass == 0) {
        LOGE("Unable to find class");
//        g_VM->DetachCurrentThread();
        return NULL;
    }

    //获取要回调的方法ID
    jmethodID javaCallbackId = env->GetMethodID(javaClass,"onProgressChange", "(JJ)I");
    if (javaCallbackId == NULL) {
        LOGI("Unable to find method:onProgressCallBack");
        return NULL;
    }
    //执行回调
//    env->CallIntMethod(env, jcallback, javaCallbackId,1,1);
    env->CallIntMethod(callback,javaCallbackId,1,1);
    //释放当前线程
//    if(mNeedDetach) {
//        g_VM->DetachCurrentThread();
//    }
//    jobject g_poll = env->NewGlobalRef(jcallback);
    //释放你的全局引用的接口，生命周期自己把控


}