#include "FVideo.h"

FVideo::FVideo() {
    this->window = 0;
}

FVideo::~FVideo() {
}

void FVideo::setCodecContext(AVCodecContext *codecCtx, int stream_idx) {
    this->codec_ctx = codecCtx;
    this->stream_idx = stream_idx;
    this->sws_ctx = sws_getContext(codec_ctx->width,
                                   codec_ctx->height,
                                   codec_ctx->pix_fmt,
                                   codec_ctx->width,
                                   codec_ctx->height,
                                   AV_PIX_FMT_RGBA,
                                   SWS_BILINEAR,
                                   NULL,
                                   NULL,
                                   NULL);
    this->buf_size = avpicture_get_size(AV_PIX_FMT_RGBA, this->codec_ctx->width,
                                        this->codec_ctx->height);
    this->out_buf = (uint8_t *) av_malloc(buf_size);
}

int FVideo::play() {
    isPlaying = 1;
    pthread_create(&p_tid, NULL, playVideo, this);
    return 0;
}

void FVideo::setANativeWindow(ANativeWindow *window) {
    if (this->window) {
        ANativeWindow_release(this->window);
        this->window = 0;
    }
    if (this->codec_ctx) {
        int win_w = ANativeWindow_getWidth(window);
        int win_h = ANativeWindow_getHeight(window);
        ANativeWindow_setBuffersGeometry(window, this->codec_ctx->width,
                                         this->codec_ctx->width * win_h /
                                         win_w, // 重新计算绘制区域的高度，防止纵向变形
                                         WINDOW_FORMAT_RGBA_8888);
    }
    this->window = window;
}

double FVideo::synchronize(AVFrame *frame, double play_ts) {
    if (play_ts != 0) {
        clock = play_ts;
    } else {
        play_ts = clock;
    }
    double frame_delay = av_q2d(codec_ctx->time_base);
    double fps = 1 / frame_delay;
    // 下一帧的播放时间=画面的显示延迟时间 + 帧的间隔
    clock += frame->repeat_pict / (2 * fps) + frame_delay;
    return play_ts;
}

void *playVideo(void *arg) {
    FVideo *fVideo = (FVideo *) arg;
    AVPacket *packet = (AVPacket *) av_mallocz(sizeof(AVPacket));
    // 解码后的 yuv 数据
    AVFrame *frame = av_frame_alloc();
    // 转换后的 rgb 数据
    AVFrame *rgb_frame = av_frame_alloc();
    avpicture_fill((AVPicture *) rgb_frame, fVideo->out_buf, AV_PIX_FMT_RGBA,
                   fVideo->codec_ctx->width,
                   fVideo->codec_ctx->height);
    ANativeWindow_Buffer outBuffer;
    double last_play_ts;    // 上一帧的播放时间
    double play_ts;         // 当前帧的播放时间
    double last_delay;      // 上一次播放视频的两帧的间隔
    double audio_clock;     // 音频的播放时间
    double delay;           // 两帧的播放间隔
    double actual_delay;    // 真正需要延迟的时间
    double sync_threshold;
    double start_ts;        // 第一帧开始的绝对时间
    double pts;
    double diff_ts;         // 音视频的播放时间差
    // 单位 秒
    start_ts = av_gettime() / (1000 * 1000);
    while (fVideo->isPlaying) {
        fVideo->pop(packet); // 从视频队列取出一帧的视频
        int got_frame = 0;
        avcodec_decode_video2(fVideo->codec_ctx, frame, &got_frame, packet);
        if (got_frame) {
            sws_scale(fVideo->sws_ctx,
                      (const uint8_t *const *) frame->data,
                      frame->linesize,
                      0,
                      frame->height,
                      rgb_frame->data,
                      rgb_frame->linesize);
            LOG_I("解码一帧视频");
            pts = av_frame_get_best_effort_timestamp(frame);
            if (pts == AV_NOPTS_VALUE) {
                pts = 0;
            }
            // 相对于第一帧的播放时间
            play_ts = pts * av_q2d(fVideo->time_base);
            // 纠正时间
            play_ts = fVideo->synchronize(frame, play_ts);
            delay = play_ts - last_play_ts;
            if (delay < 0 || delay > 1) {
                delay = last_delay;
            }
            last_play_ts = play_ts;
            last_delay = delay;
            // 音频的播放时间
            audio_clock = fVideo->fAudio->clock;
            // 算出音视频相差时间
            diff_ts = fVideo->clock - audio_clock;
            // 合理范围内
            sync_threshold = delay > 0.01 ? 0.01 : delay;
            // 如果音视频的播放间隔大于10s，我们认为没有音频数据了
            if (fabs(diff_ts) < 10) {
                if (diff_ts < -sync_threshold) {
                    // 视频慢了
                    delay = 0;
                } else if (diff_ts > sync_threshold) {
                    // 视频快了
                    delay = 2 * delay;
                }
            }
            start_ts += delay;
            // 排除掉上面代码执行的时间
            actual_delay = start_ts - av_gettime() / (1000 * 1000);
            if (actual_delay < 0.01) {
                actual_delay = 0.01;
            }
            // 单位 微妙
            av_usleep(actual_delay * 1000 * 1000 + 6000);
            if (fVideo->window) {
                ANativeWindow_lock(fVideo->window, &outBuffer, NULL);
                // 缓冲区的首地址
                uint8_t *dst = (uint8_t *) outBuffer.bits;
                // 每行的内存大小
                int dstStride = outBuffer.stride * 4;
                // rgb 的首地址
                uint8_t *src = rgb_frame->data[0];
                // rgb 每行的内存大小
                int srcStride = rgb_frame->linesize[0];
                int i = 0;
                for (; i < fVideo->codec_ctx->height; ++i) {
                    memcpy(dst + i * dstStride, src + i * srcStride, srcStride);
                }
                ANativeWindow_unlockAndPost(fVideo->window);
                LOG_I("绘制一帧视频");
            }
        }
        av_packet_unref(packet);
    }
    pthread_exit(0);
}

void FVideo::stop() {
    FMedia::stop();
}

void FVideo::release() {
    FMedia::release();
    sws_freeContext(sws_ctx);
}