#include "ffmpeg_cmd.h"

#include <jni.h>
#include <string.h>
#include "ffmpeg_thread.h"
#include "android_log.h"
#include "cmdutils.h"

static JavaVM *jvm = NULL;
//java虚拟机
static jclass m_clazz = NULL;//当前类(面向java)

/**
 * 回调执行Java方法
 * 参看 Jni反射+Java反射
 */
void callJavaMethod(JNIEnv *env, jclass clazz,int ret) {
    if (clazz == NULL) {
        LOGE("---------------clazz isNULL---------------");
        return;
    }
    //获取方法ID (I)V指的是方法签名 通过javap -s -public FFmpegCmd 命令生成
    jmethodID methodID = (*env)->GetStaticMethodID(env, clazz, "onExecuted", "(I)V");
    if (methodID == NULL) {
        LOGE("---------------methodID isNULL---------------");
        return;
    }
    //调用该java方法
    (*env)->CallStaticVoidMethod(env, clazz, methodID,ret);
}
void callJavaMethodProgress(JNIEnv *env, jclass clazz,float ret) {
    if (clazz == NULL) {
        LOGE("---------------clazz isNULL---------------");
        return;
    }
    //获取方法ID (I)V指的是方法签名 通过javap -s -public FFmpegCmd 命令生成
    jmethodID methodID = (*env)->GetStaticMethodID(env, clazz, "onProgress", "(F)V");
    if (methodID == NULL) {
        LOGE("---------------methodID isNULL---------------");
        return;
    }
    //调用该java方法
    (*env)->CallStaticVoidMethod(env, clazz, methodID,ret);
}

/**
 * c语言-线程回调
 */
static void ffmpeg_callback(int ret) {
    JNIEnv *env;
    //附加到当前线程从JVM中取出JNIEnv, C/C++从子线程中直接回到Java里的方法时  必须经过这个步骤
    (*jvm)->AttachCurrentThread(jvm, (void **) &env, NULL);
    callJavaMethod(env, m_clazz,ret);

    //完毕-脱离当前线程
    (*jvm)->DetachCurrentThread(jvm);
}

void ffmpeg_progress(float progress) {
    JNIEnv *env;
    (*jvm)->AttachCurrentThread(jvm, (void **) &env, NULL);
    callJavaMethodProgress(env, m_clazz,progress);
    (*jvm)->DetachCurrentThread(jvm);
}

JNIEXPORT jint JNICALL
Java_com_github_xch168_ffmpeg_1cmd_FFmpegCmd_exec(JNIEnv *env, jclass clazz, jint cmdnum, jobjectArray cmdline)
{
    (*env)->GetJavaVM(env, &jvm);
    m_clazz = (*env)->NewGlobalRef(env, clazz);
    //---------------------------------C语言 反射Java 相关----------------------------------------
    //---------------------------------java 数组转C语言数组----------------------------------------
    int i = 0;//满足NDK所需的C99标准
    char **argv = NULL;//命令集 二维指针
    jstring *strr = NULL;

    if (cmdline != NULL) {
        argv = (char **) malloc(sizeof(char *) * cmdnum);
        strr = (jstring *) malloc(sizeof(jstring) * cmdnum);

        for (i = 0; i < cmdnum; ++i) {//转换
            strr[i] = (jstring)(*env)->GetObjectArrayElement(env, cmdline, i);
            argv[i] = (char *) (*env)->GetStringUTFChars(env, strr[i], 0);
        }

    }
    //---------------------------------java 数组转C语言数组----------------------------------------
    //---------------------------------执行FFmpeg命令相关----------------------------------------
    //新建线程 执行ffmpeg 命令
    ffmpeg_thread_run_cmd(cmdnum, argv);
    //注册ffmpeg命令执行完毕时的回调
    ffmpeg_thread_callback(ffmpeg_callback);

    free(strr);
    return 0;
}

JNIEXPORT jint JNICALL
Java_com_github_xch168_ffmpeg_1cmd_FFmpegCmd_startRtmp(JNIEnv *env, jclass clazz,
                                                       jstring rtsp, jstring rtmp)
{
    jint i=0;
    int pts=0,dts=0;
    char* inUrl = (*env)->GetStringUTFChars(env, rtsp, 0); // 可以是本地文件
    char* outUrl = (*env)->GetStringUTFChars(env, rtmp, 0);

    //初始化所有封装器
    av_register_all();

    //初始化网络库
    avformat_network_init();

    int res = 0;
    //打开文件，解封装文件头
    //输入封装上下文
    AVFormatContext* ictx = NULL;
    //设置rtsp协议延时最大值
    AVDictionary *opts = NULL;
    av_dict_set(&opts, "max_delay", "500", 0);

    if ((res = avformat_open_input(&ictx, inUrl, NULL, &opts)) != 0)
        return res;

    //获取音视频流信息
    if ((res = avformat_find_stream_info(ictx, NULL)) < 0)
        return res;
    av_dump_format(ictx, 0, inUrl, 0);

    //创建输出上下文
    AVFormatContext* octx = NULL;
    if ((res = avformat_alloc_output_context2(&octx, NULL, "flv", outUrl) < 0))
        return res;

    //配置输出流
    //遍历输入的AVStream
    for (i = 0; i < ictx->nb_streams; ++i)
    {
        //创建输出流
        AVStream* out = avformat_new_stream(octx, ictx->streams[i]->codec->codec);
        if (out == NULL)
        {
            printf("new stream error.\n");
            return -1;
        }
        //复制配置信息
        if ((res = avcodec_copy_context(out->codec, ictx->streams[i]->codec)) != 0)
            return res;
        //out->codec->codec_tag = 0;//标记不需要重新编解码
    }
    av_dump_format(octx, 0, outUrl, 1);

    //rtmp推流
    //打开io
    //@param s Used to return the pointer to the created AVIOContext.In case of failure the pointed to value is set to NULL.
    res = avio_open(&octx->pb, outUrl, AVIO_FLAG_WRITE);
    if (octx->pb == NULL) // 网络掉了，会连接超时，error no 110
        return res;

    //写入头信息
    //avformat_write_header可能会改变流的timebase
    if ((res = avformat_write_header(octx, NULL)) < 0)
        return res;

//    long long  begintime = av_gettime();
    long long  realdts = 0;
    long long  caldts = 0;
    AVPacket pkt;
    while (1)
    {
        if ((res = av_read_frame(ictx, &pkt)) != 0) {
            break;
        }

        if(pkt.size <= 0)//读取rtsp时pkt.size可能会等于0
            continue;
        //转换pts、dts、duration
        pkt.pts = pkt.pts * av_q2d(ictx->streams[pkt.stream_index]->time_base) / av_q2d(octx->streams[pkt.stream_index]->time_base);
        pkt.dts = pkt.dts * av_q2d(ictx->streams[pkt.stream_index]->time_base) / av_q2d(octx->streams[pkt.stream_index]->time_base);
        pkt.duration = pkt.duration * av_q2d(ictx->streams[pkt.stream_index]->time_base) / av_q2d(octx->streams[pkt.stream_index]->time_base);
        pkt.pos = -1;//byte position in stream, -1 if unknown
        if (pts == 0)
        {
            pts = pkt.pts;
            dts = pkt.dts;
        }
        else
        {
            pkt.pts += pts;
            pkt.dts += dts;
        }
        if (pkt.dts < pkt.pts)
        {
            pkt.dts = pkt.pts;
        }
        //文件推流计算延时
        //av_usleep(30 * 1000);
        /*realdts = av_gettime() - begintime;
        caldts = 1000 * 1000 * pkt.pts * av_q2d(octx->streams[pkt.stream_index]->time_base);
        if (caldts > realdts)
            av_usleep(caldts - realdts);*/

        //推流,推完之后pkt的pts，dts竟然都被重置了！而且前面几帧还因为dts没有增长而返回-22错误
        if ((res = av_interleaved_write_frame(octx, &pkt)) < 0) {
            LOGE("av_interleaved_write_frame error%d.\n", res);
        }

        av_free_packet(&pkt);//回收pkt内部分配的内存
    }
    av_write_trailer(octx);//写文件尾

    return 0;
}

JNIEXPORT void JNICALL
Java_com_github_xch168_ffmpeg_1cmd_FFmpegCmd_exit(JNIEnv *env, jclass type) {

}