//
// Created by hyh on 4/27/20.
//
#include <HyhPlayer.h>

int read_buffer(void *opaque, uint8_t *buf, int buf_size){

    HyhPlayer * hyhPlayer = (HyhPlayer *)opaque;
    MediaData *mediaData = hyhPlayer->getData();
    int len = mediaData->len;
    memcpy(buf,mediaData->data,len);
    free(mediaData);
    return len;
}

MediaData* HyhPlayer::getData() {
    unique_lock<mutex> lock(mediaDataMutex);
    if(mediaDataQueue.empty()){
        mediaDataCv.wait(lock);
    }
    MediaData *mediaData = mediaDataQueue.front();
    mediaDataQueue.pop();
    return mediaData;
}

void HyhPlayer::putData(uint8_t *data, int len) {

    MediaData *mediaData= new MediaData();
    mediaData->data = new uint8_t[len];
    memcpy(mediaData->data,data,len);
    mediaData->len = len;
    unique_lock<mutex> lock(mediaDataMutex);
    mediaDataQueue.push(mediaData);
    mediaDataCv.notify_one();
}

void HyhPlayer::open(JavaVM *javaVM, jobject instance, const char *filePath,
                     ANativeWindow *pANativeWindow) {

    this->filePath = filePath;
    this->javaVM = javaVM;
    this->instance = instance;
    this->pANativeWindow = pANativeWindow;
    mediaStatus = start;

    av_register_all();

    unsigned char * inbuffer = (unsigned char *)av_malloc(32768);

//    FILE *file = fopen(filePath,"rb+");
    AVIOContext *avio_in = avio_alloc_context(inbuffer,32768,0,this,read_buffer,NULL,NULL);

    if(avio_in==NULL){
        LOGE("Could not alloc avio");
        return;
    }

    formatCtx = avformat_alloc_context();
    formatCtx->pb = avio_in;
    formatCtx->flags = AVFMT_FLAG_CUSTOM_IO;

    if(avformat_open_input(&formatCtx,NULL,NULL,NULL)<0){
        LOGE("Could not open source file %s\n", filePath);
        exit(1);
    }

    if(avformat_find_stream_info(formatCtx,NULL)<0){
        LOGE("Could not find stream information\n");
        exit(1);
    }

    if(open_codec_context(&videoStreamIndex,&videoCodecCtx,formatCtx,AVMEDIA_TYPE_VIDEO)>=0){

//        video_dst_file = fopen(video_dst_filename,"wb");
        videoStream = formatCtx->streams[videoStreamIndex];
        width = videoCodecCtx->width;
        height = videoCodecCtx->height;
        pixelFormat = videoCodecCtx->pix_fmt;
        int ret = av_image_alloc(video_dst_data, video_dst_linesize,
                                 width, height, pixelFormat, 1);
        if (ret < 0) {
            LOGE("Could not allocate raw video buffer\n");
            return;
        }
        video_dst_bufsize = ret;
    }

    if(open_codec_context(&audioStreamIndex,&audioCodecCtx,formatCtx,AVMEDIA_TYPE_AUDIO)>=0){

//        audio_dst_file = fopen(audio_dst_filename,"wb");
        audioStream = formatCtx->streams[audioStreamIndex];
    }

//    av_dump_format(formatCtx, 0, filePath, 0);

    if(!audioStream&&!videoStream){
        LOGE("Could not find audio or video stream in the input, aborting\n");
        return;
    }

    av_init_packet(&pkt);

    thread decodeThread(&HyhPlayer::decode,this);
    thread playAudioThread(&HyhPlayer::playAudio,this);
    thread playVideoThread(&HyhPlayer::playVideo,this);
    decodeThread.detach();
    playAudioThread.detach();
    playVideoThread.detach();
}





/*void HyhPlayer::open(JavaVM *javaVM, jobject instance, const char *filePath,
                     ANativeWindow *pANativeWindow) {

    this->filePath = filePath;
    this->javaVM = javaVM;
    this->instance = instance;
    this->pANativeWindow = pANativeWindow;
    mediaStatus = start;

    av_register_all();
    if(avformat_open_input(&formatCtx,filePath,NULL,NULL)<0){
        LOGE("Could not open source file %s\n", filePath);
        exit(1);
    }

    if(avformat_find_stream_info(formatCtx,NULL)<0){
        LOGE("Could not find stream information\n");
        exit(1);
    }

    if(open_codec_context(&videoStreamIndex,&videoCodecCtx,formatCtx,AVMEDIA_TYPE_VIDEO)>=0){

        video_dst_file = fopen(video_dst_filename,"wb");
        videoStream = formatCtx->streams[videoStreamIndex];

        width = videoCodecCtx->width;
        height = videoCodecCtx->height;
        pixelFormat = videoCodecCtx->pix_fmt;
        int ret = av_image_alloc(video_dst_data, video_dst_linesize,
                                 width, height, pixelFormat, 1);
        if (ret < 0) {
            LOGE("Could not allocate raw video buffer\n");
            return;
        }
        video_dst_bufsize = ret;
    }

    if(open_codec_context(&audioStreamIndex,&audioCodecCtx,formatCtx,AVMEDIA_TYPE_AUDIO)>=0){

        audio_dst_file = fopen(audio_dst_filename,"wb");
        audioStream = formatCtx->streams[audioStreamIndex];
    }

    av_dump_format(formatCtx, 0, filePath, 0);

    if(!audioStream&&!videoStream){
        LOGE("Could not find audio or video stream in the input, aborting\n");
        return;
    }

    av_init_packet(&pkt);

    thread decodeThread(&HyhPlayer::decode,this);
    thread playAudioThread(&HyhPlayer::playAudio,this);
    thread playVideoThread(&HyhPlayer::playVideo,this);
    decodeThread.detach();
    playAudioThread.detach();
    playVideoThread.detach();
}*/


int HyhPlayer::open_codec_context(int *stream_idx, AVCodecContext **dec_ctx,
                                  AVFormatContext *fmt_ctx, enum AVMediaType type) {
    int stream_index;
    int ret = av_find_best_stream(fmt_ctx,type,-1,-1,NULL,0);
    if(ret<0){
        LOGE("Could not find %s stream in input file '%s'\n",
                av_get_media_type_string(type), filePath);
        return ret;
    }else{
        stream_index = ret;
        LOGE("stream_index=%d",stream_index);
        AVStream *avStream = fmt_ctx->streams[stream_index];

        AVCodec *avCodec = avcodec_find_decoder(avStream->codecpar->codec_id);
        if(!avCodec){
            LOGE("Failed to find %s codec\n",
                    av_get_media_type_string(type));
            return AVERROR(EINVAL);
        }

        *dec_ctx = avcodec_alloc_context3(avCodec);
        if(!*dec_ctx){
            LOGE("Failed to allocate the %s codec context\n",
                    av_get_media_type_string(type));
            return AVERROR(ENOMEM);
        }

        if((ret=avcodec_parameters_to_context(*dec_ctx,avStream->codecpar))<0){
            LOGE("Failed to copy %s codec parameters to decoder context\n",
                    av_get_media_type_string(type));
            return ret;
        }

        AVDictionary *opts = NULL;
        av_dict_set(&opts, "refcounted_frames", refcount ? "1" : "0", 0);
        if ((ret = avcodec_open2(*dec_ctx, avCodec, &opts)) < 0) {
            LOGE("Failed to open %s codec\n",
                    av_get_media_type_string(type));
            return ret;
        }
        *stream_idx = stream_index;
    }
    return 0;
}


void HyhPlayer::decode() {

    while(mediaStatus!=stop){
        if(videoFrameQueue.size()<MAX_VIDEO_FRAME&&audioFrameQueue.size()<MAX_AUDIO_FRAME)
        {
            if(av_read_frame(formatCtx,&pkt)>=0){
                if(pkt.stream_index == videoStreamIndex){

                    int ret = avcodec_send_packet(videoCodecCtx, &pkt);
                    while (ret >= 0) {
                        AVFrame *avFrame = av_frame_alloc();
                        ret = avcodec_receive_frame(videoCodecCtx, avFrame);
                        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
                            break;
                        } else if (ret < 0) {
                            break;
                        }
                        unique_lock<mutex> lock(videoMutex);
                        videoFrameQueue.push(avFrame);
                        videoCv.notify_one();
                    }

                }else if(pkt.stream_index == audioStreamIndex){

                    int ret = avcodec_send_packet(audioCodecCtx, &pkt);
                    while (ret >= 0) {
                        AVFrame *avFrame = av_frame_alloc();
                        ret = avcodec_receive_frame(audioCodecCtx, avFrame);
                        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
                            break;
                        } else if (ret < 0) {
                            break;
                        }
                        unique_lock<mutex> lock(audioMutex);
                        audioFrameQueue.push(avFrame);
                        audioCv.notify_one();
                    }
                }
                av_packet_unref(&pkt);
            }else{
                onComplete();
                break;
            }
        }
    }
}

void HyhPlayer::onComplete() {
    LOGE("onComplete");
}



AVFrame * HyhPlayer::getAudioFrame() {
    unique_lock<mutex> lock(audioMutex);
    if(audioFrameQueue.empty()){
        audioCv.wait(lock);
    }
    AVFrame* frame = audioFrameQueue.front();
    audioFrameQueue.pop();
    return frame;
}

AVFrame * HyhPlayer::getVideoFrame() {
    unique_lock<mutex> lock(videoMutex);
    if(videoFrameQueue.empty()){
        videoCv.wait(lock);
    }
    AVFrame* frame = videoFrameQueue.front();
    videoFrameQueue.pop();
    return frame;
}



void HyhPlayer::playAudio() {

    if(audioStreamIndex==-1){
        return;
    }

    JNIEnv *env;
    if (javaVM->AttachCurrentThread(&env, NULL) != JNI_OK) {
        LOGE("获取JNIEnv失败");
        return;
    }
    //-----------------初始化音频参数Start---------------//

    uint8_t outChLayout = AV_CH_LAYOUT_STEREO;
    int outChLayoutNb=av_get_channel_layout_nb_channels(outChLayout);
    AVSampleFormat outSampleFmt = AV_SAMPLE_FMT_S16;
    int outSampleRate = audioCodecCtx->sample_rate;
    SwrContext *pSwrCtx = swr_alloc();
    swr_alloc_set_opts(pSwrCtx,
                       outChLayout, outSampleFmt, outSampleRate,
                       audioCodecCtx->channel_layout, audioCodecCtx->sample_fmt, audioCodecCtx->sample_rate,
                       0, NULL);
    swr_init(pSwrCtx);
    //-----------------初始化音频参数End---------------//

    //--------------通过反射获取音频播放器Start--------//
    jclass mediaPlay = env->GetObjectClass(instance);
    jmethodID createTrackId = env->GetMethodID(mediaPlay, "createTrack", "(II)V");
    env->CallVoidMethod(instance, createTrackId, outSampleRate, outChLayoutNb);
    jmethodID playTrackId = env->GetMethodID(mediaPlay, "playTrack", "([BI)V");
    //--------------通过反射获取音频播放器End--------//
    uint8_t *outBuffer=(uint8_t *) av_malloc(44100 * 2);
//    FILE *file = fopen("/storage/emulated/0/aaa.aac","w");
//    fwrite(outBuffer,1,size,file);
//    if(count++==500){
//        fclose(file);
//    }
    while (mediaStatus!=stop) {
        AVFrame *avFrame = getAudioFrame();

        int nb=swr_convert(pSwrCtx, &outBuffer, 44100 * 2,
                           (const uint8_t **) avFrame->data, avFrame->nb_samples);
        int size = av_samples_get_buffer_size(NULL, outChLayoutNb, avFrame->nb_samples,
                                              outSampleFmt, 1);

        jbyteArray byteArray = env->NewByteArray(size);
        env->SetByteArrayRegion(byteArray, 0, size, (const jbyte *) outBuffer);
        env->CallVoidMethod(instance, playTrackId, byteArray, size);
        env->DeleteLocalRef(byteArray);

        if (avFrame->pts != AV_NOPTS_VALUE){
            audio_clock = (avFrame->pts) * av_q2d(audioStream->time_base);
//            int data_size = avFrame->channels * nb * av_get_bytes_per_sample(outSampleFmt);
//            audio_clock += static_cast<double>(data_size) /
//                           (2 * stream->codec->channels * stream->codec->sample_rate);
        }else{
            LOGE("audio AV_NOPTS_VALUE");
        }
        av_frame_free(&avFrame);
    }

    free(outBuffer);
    free(env);
}

int now = av_gettime();
void HyhPlayer::playVideo() {

    if(videoStreamIndex==-1){
        return;
    }

    ANativeWindow_setBuffersGeometry(pANativeWindow,videoCodecCtx->width,videoCodecCtx->height,WINDOW_FORMAT_RGBA_8888);//初始化窗口
    SwsContext * pSwsCtx = sws_getContext(videoCodecCtx->width,videoCodecCtx->height,videoCodecCtx->pix_fmt,
                                          videoCodecCtx->width,videoCodecCtx->height,AV_PIX_FMT_RGBA,
                                          SWS_BILINEAR,NULL,NULL,NULL);//初始化图形转化上下文
    int outSize=av_image_get_buffer_size(AV_PIX_FMT_RGBA,videoCodecCtx->width,videoCodecCtx->height,1);
    uint8_t * outBuffer = (uint8_t *) malloc(sizeof(uint8_t) * outSize);
    AVFrame * rgbFrame=av_frame_alloc();
    av_image_fill_arrays(rgbFrame->data,rgbFrame->linesize,
                         outBuffer,AV_PIX_FMT_RGBA,videoCodecCtx->width,videoCodecCtx->height,1);//初始化图片帧
    ANativeWindow_Buffer windowBuffer;


    while (mediaStatus!=stop) {
        AVFrame* avFrame = getVideoFrame();
        video_frame_count++;
        if (avFrame->pts != AV_NOPTS_VALUE){
            double timestamp = avFrame->pts*av_q2d(videoStream->time_base);
            if (timestamp > audio_clock) {
                usleep((unsigned long)((timestamp - audio_clock)*1000000));
            }
        }else{
            double frame_rate = av_q2d(videoStream->avg_frame_rate);
            frame_rate += avFrame->repeat_pict * (frame_rate * 0.5);
            usleep((unsigned long)(frame_rate * 1000));
        }
        if (ANativeWindow_lock(pANativeWindow, &windowBuffer, NULL) >= 0){
            sws_scale(pSwsCtx, (const uint8_t *const *) avFrame->data, avFrame->linesize,
                      0, avFrame->height,
                      rgbFrame->data, rgbFrame->linesize);
            uint8_t * firstWindow = (uint8_t *) windowBuffer.bits;
            uint8_t * firstFrame = rgbFrame->data[0];
            int windowLineByte = windowBuffer.stride * 4;
            int frameLineByte = rgbFrame->linesize[0];
            for(int i=0;i<videoCodecCtx->height;i++){
                memcpy(firstWindow+i*windowLineByte,firstFrame+i*frameLineByte,frameLineByte);
            }
            ANativeWindow_unlockAndPost(pANativeWindow);
        }
        av_frame_free(&avFrame);
    }

    free(pSwsCtx);
    free(outBuffer);
    free(rgbFrame);

}

void clear(queue<AVFrame *> q) {
    while (!q.empty()){
        AVFrame* frame = q.front();
        q.pop();
        av_frame_free(&frame);
    }
}

void HyhPlayer::close() {
    mediaStatus = stop;
    usleep(100000);
    avcodec_free_context(&audioCodecCtx);
    avcodec_free_context(&videoCodecCtx);
    avformat_close_input(&formatCtx);
//    if (video_dst_file)
//        fclose(video_dst_file);
//    if (audio_dst_file)
//        fclose(audio_dst_file);
    av_free(video_dst_data[0]);
    av_packet_unref(&pkt);
    clear(videoFrameQueue);
    clear(audioFrameQueue);
}



