﻿#include "mediacapture.h"

MediaCapture::MediaCapture()
{
    isRun = false;

    videoOutput = new VideoOutput();
    audioOutput = new AudioOutput();
}

MediaCapture::~MediaCapture()
{
    delete videoOutput;
    videoOutput = nullptr;
    delete audioOutput;
    audioOutput = nullptr;

    quit();
    wait();
}

void MediaCapture::setRunState(bool start)
{
    isRun = start;

    if(start)
        this->start();
}

void MediaCapture::sendAVPacketToPusher(RtmpPusher *pusher, AVPacket *packet, AVPacketType type, uint32_t pts, uint32_t dts)
{
    if(packet == nullptr)
        return;

    //务必使用ffmpeg自带av_malloc申请空间 防止释放错误
    int size = packet->size;
    char *data = (char *)av_malloc(size);
    memcpy(data, packet->data, size);

    AVPacket *newPacket = av_packet_alloc();
    newPacket->pts = packet->pts;
    newPacket->dts = packet->dts;
    av_packet_from_data(newPacket, (uint8_t *)data, size);

    ReadyPacket *readyPacket = new ReadyPacket();
    readyPacket->packet = newPacket;
    readyPacket->type = type;
    readyPacket->ptsTime = pts;
    readyPacket->dtsTime = dts;

    pusher->writePacket(readyPacket);
}

void MediaCapture::run()
{
    std::string path = filePath.toStdString();
    bool hasVideo = false;
    bool hasAudio = false;
    int  result = 0;

    //打开输入
    AVFormatContext *iFmtCtx = avformat_alloc_context();
    result = avformat_open_input(&iFmtCtx, path.c_str(), NULL, NULL);
    if (result < 0)
    {
        qDebug() << "open input error" << filePath;
        return;
    }

    //获取流信息
    result = avformat_find_stream_info(iFmtCtx, NULL);
    if (result < 0)
    {
        qDebug() << "find stream info error";
        return;
    }

    /*************** 视频流操作 *****************/
    //1 获取视频流
    int videoIndex = av_find_best_stream(iFmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);

    AVStream *videoStream = nullptr;
    AVCodecContext *videoCodecCtx = nullptr;
    if (videoIndex >= 0)
    {
        videoStream = iFmtCtx->streams[videoIndex];

        //2 获取视频流解码器, 或者指定解码器
        AVCodec *videoDecoder = avcodec_find_decoder(videoStream->codecpar->codec_id);
        if (videoDecoder == NULL)
        {
            qDebug() << "video decoder not found";
            return;
        }

        //3 初始化视频解码器上下文 解码就绪
        videoCodecCtx = avcodec_alloc_context3(videoDecoder);
        avcodec_parameters_to_context(videoCodecCtx, videoStream->codecpar);
        videoCodecCtx->lowres = videoDecoder->max_lowres;
        videoCodecCtx->flags2 |= AV_CODEC_FLAG2_FAST;
        //减少延时但会丢部分数据
//        videoCodecCtx->flags |= AVFMT_FLAG_NOBUFFER;
        result = avcodec_open2(videoCodecCtx, videoDecoder, NULL);
        if (result < 0)
        {
            qDebug() << "open video decoder error";
            return;
        }

        hasVideo = true;
    }
    else
    {
        qDebug() << "find video stream index error";
    }

    /*************** 音频流操作 *****************/
    //1 获取音频流
    int audioIndex = av_find_best_stream(iFmtCtx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);

    AVStream *audioStream = nullptr;
    AVCodecContext *audioCodecCtx = nullptr;
    if (audioIndex >= 0)
    {
        audioStream = iFmtCtx->streams[audioIndex];

        //2 获取音频流解码器, 或者指定解码器
        AVCodec *audioDecoder = avcodec_find_decoder(audioStream->codecpar->codec_id);
        if (audioDecoder == NULL)
        {
            qDebug() << "audio decoder not found";
            return;
        }

        //3 初始化音频解码器上下文 解码就绪
        audioCodecCtx = avcodec_alloc_context3(audioDecoder);
        avcodec_parameters_to_context(audioCodecCtx, audioStream->codecpar);
        audioCodecCtx->lowres = audioDecoder->max_lowres;
        audioCodecCtx->flags2 |= AV_CODEC_FLAG2_FAST; //设置加速解码
        result = avcodec_open2(audioCodecCtx, audioDecoder, NULL);
        if (result < 0)
        {
            qDebug() << "open audio decoder error";
            return;
        }

        hasAudio = true;
    }
    else
    {
        qDebug() << "find audio stream index error";
    }

    if(!hasAudio && !hasVideo)
    {
        qDebug() << "not find any stream";
        return;
    }

    //设置rtmp推流器
    pusher.filePath = filePath;
    pusher.hasAudio = hasAudio;
    pusher.hasVideo = hasVideo;
    FLVMetadataMsg *metadata = new FLVMetadataMsg();

    //封装信息
    if(hasAudio)
    {
        int channels = audioCodecCtx->channels;
        int sampleRate = audioCodecCtx->sample_rate;
        int sampleFmt = audioCodecCtx->sample_fmt;
        int audioBitRate = audioCodecCtx->bit_rate;
        int profile = audioCodecCtx->profile;
        qDebug() << "Audio:" << sampleRate << sampleFmt << channels << audioBitRate << profile;

        audioOutput->setAudioParm(sampleRate, sampleFmt, channels);

        metadata->has_audio = hasAudio;
        metadata->channles = channels;
        metadata->audiosamplerate = sampleRate;
        metadata->audiosamplesize = av_get_bytes_per_sample(audioCodecCtx->sample_fmt);
        metadata->audiodatarate = audioBitRate;
        pusher.setAudioSeqHeader((char *)audioCodecCtx->extradata, audioCodecCtx->extradata_size);
    }

    if(hasVideo)
    {
        int videoWidth = videoCodecCtx->width;
        int videoHeight = videoCodecCtx->height;
        int frameRate = videoStream->avg_frame_rate.num;
        int videoBitRate = videoCodecCtx->bit_rate;
        qDebug() << "Video:" << videoWidth << videoHeight << frameRate << videoBitRate;

        videoOutput->setVideoParm(videoWidth, videoHeight, false);

        metadata->has_video = hasVideo;
        metadata->width = videoWidth;
        metadata->height = videoHeight;
        metadata->framerate = frameRate;
        metadata->videodatarate = videoBitRate;
        pusher.setVideoSeqHeader((char *)videoCodecCtx->extradata, videoCodecCtx->extradata_size);
    }

    //启动推流
    pusher.setMetaMsg(metadata);
    pusher.setRunState(true);

    qDebug() << "开始读包";
    AVPacket *packet = av_packet_alloc();
    AVFrame *videoFrame = av_frame_alloc();
    AVFrame *audioFrame = av_frame_alloc();
    int64_t startTime = av_gettime();

    while (isRun)
    {
        //不断读取视频packet
        int ret = av_read_frame(iFmtCtx, packet);
        if (ret == AVERROR_EOF)
        {
            qDebug() << "read finish";
            break;
        }

        if(packet->stream_index == videoIndex)
        {
            //发送至推流器
            int64_t pts = getRealTimeByPTS(packet->pts, videoStream->time_base)/1000;
            int64_t dts = getRealTimeByPTS(packet->dts, videoStream->time_base)/1000;
            if(pts < 0)
                pts = dts = 0;

            sendAVPacketToPusher(&pusher, packet, TypeVideo, pts, dts);

            if(!hasAudio)
                calcAndDelay(startTime, packet->pts, videoStream->time_base);

            //解码本地渲染视频
            avcodec_send_packet(videoCodecCtx, packet);
            avcodec_receive_frame(videoCodecCtx, videoFrame);

            videoOutput->startDisplay(videoFrame);
            av_frame_unref(videoFrame);

        }
        else if(packet->stream_index == audioIndex)
        {
            //发送至推流器
            int64_t pts = getRealTimeByPTS(packet->pts, audioStream->time_base)/1000;
            if(pts < 0)
                pts = 0;

            sendAVPacketToPusher(&pusher, packet, TypeAudio, pts, pts);

            //延时等待
            calcAndDelay(startTime, packet->pts, audioStream->time_base);

            //解码本地播放音频
            avcodec_send_packet(audioCodecCtx, packet);
            while( avcodec_receive_frame(audioCodecCtx, audioFrame) == 0)
            {
//                audioOutput->startPlay(audioFrame);
                av_frame_unref(audioFrame);
            }
        }

        av_packet_unref(packet);
    }

    qDebug() << "结束取包";
    videoOutput->stopDisplay();
    pusher.setRunState(false);

    av_frame_free(&videoFrame);
    av_frame_free(&audioFrame);
    av_packet_free(&packet);
    avcodec_free_context(&videoCodecCtx);
    avcodec_free_context(&audioCodecCtx);
    avformat_close_input(&iFmtCtx);
}
