#include "ffmpegdecode.h"
#include "AVCodecParamsSerializer.h"
#include <QDebug>
#include <QImage>
#include <QHostAddress>
#include <QThread>
#include <QAudioOutput>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libswresample/swresample.h>
#include <libavutil/opt.h>
}

enum StreamIdx {
    AUDIO_STREAM = 0,
    VIDEO_STREAM,
};

FFmpegDecode::FFmpegDecode(const QString &url, shared_ptr<RingBuffer> pSharedBuffer,  QObject *parent)
    : QObject(parent)
    , _url(url)
    , _pTcpConnection(nullptr)
    , _pVideoCodec(nullptr)
    , _pVideoCodecCtx(nullptr)
    , _pAudioCodec(nullptr)
    , _pAudioCodecCtx(nullptr)
    , _pSharedBuffer(pSharedBuffer)
    , _isWorking(false)
{

}

FFmpegDecode::~FFmpegDecode()
{
    qDebug() << "lyncheer";
    this->stop();
    qDebug() << "lyncheer";
}

void FFmpegDecode::start()
{
    _pTcpConnection = new MyTcpSocket(this);
    // 一旦连接到服务器，就发送查看RTSP实时流的url
    connect(_pTcpConnection, &MyTcpSocket::connected, [this]() {
        _pTcpConnection->sendTLV(TASK_GET_CODEC_PARAMS, _url.size(), _url.toUtf8());
    });
    // 一旦收到完整的TLV包，就处理收到的解码器参数
    connect(_pTcpConnection, &MyTcpSocket::readPacketFinished, this, &FFmpegDecode::handleCodecParams);

    // 向服务器发起TCP连接
    _pTcpConnection->connectToHost(QHostAddress("192.168.105.108"), 8000);
    _isWorking = true;
}

void FFmpegDecode::process()
{
    qDebug() << "FFmpegDecode::process()";
    if (!_pVideoCodecCtx) {
        qCritical() << "Video codec context is uninitialized.";
        return;
    }

    if (!_pAudioCodecCtx) {
        qCritical() << "Audio codec context is uninitialized.";
        return;
    }

    // 创建并配置图像转换（YUV转RGB32）上下文结构体
    struct SwsContext *pImgConvertCtx = sws_getContext(_pVideoCodecCtx->width, _pVideoCodecCtx->height, _pVideoCodecCtx->pix_fmt,
                                                            _pVideoCodecCtx->width, _pVideoCodecCtx->height, AV_PIX_FMT_RGB32,
                                                            SWS_BICUBIC, nullptr, nullptr, nullptr);

    // 准备源和目标数据缓冲区
    int numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, _pVideoCodecCtx->width, _pVideoCodecCtx->height);
    uint8_t *pOutBuffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
    AVFrame *pFrameRGB = av_frame_alloc();
    avpicture_fill((AVPicture *)pFrameRGB, pOutBuffer, AV_PIX_FMT_RGB32, _pVideoCodecCtx->width, _pVideoCodecCtx->height);

    AVFrame *pVideoFrame = av_frame_alloc();

    // ---------------------------音频-------------------------------
    // 初始化音频重采样上下文
    SwrContext *pSwrCtx = swr_alloc();
    if (!pSwrCtx) {
        qCritical() << "Allocate SwrContext failed.";
        avcodec_free_context(&_pAudioCodecCtx);
        return;
    }
    // 设置重采样参数
    av_opt_set_int(pSwrCtx, "in_channel_count", _pAudioCodecCtx->channels, 0);
//    av_opt_set_int(pSwrCtx, "out_channel_count", 2, 0);
    av_opt_set_int(pSwrCtx, "in_channel_layout", av_get_default_channel_layout(_pAudioCodecCtx->channels), 0);
    av_opt_set_int(pSwrCtx, "out_channel_layout", av_get_default_channel_layout(2), 0); // 输出为立体声
//    av_opt_set_int(pSwrCtx, "out_channel_layout", av_get_default_channel_layout(AV_CH_LAYOUT_STEREO), 0);
    av_opt_set_int(pSwrCtx, "in_sample_rate", _pAudioCodecCtx->sample_rate, 0);
    av_opt_set_int(pSwrCtx, "out_sample_rate", 44100, 0); // 输出采样率为44100Hz
    av_opt_set_sample_fmt(pSwrCtx, "in_sample_fmt", _pAudioCodecCtx->sample_fmt, 0);
    av_opt_set_sample_fmt(pSwrCtx, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); // 输出为16位有符号整数
    // 初始化重采样上下文
    int ret = swr_init(pSwrCtx);
    if (ret < 0) {
        qCritical() << "Init SwrContext failed.";
        swr_free(&pSwrCtx);
        avcodec_free_context(&_pAudioCodecCtx);
        return;
    }

    // 初始化Qt音频输出
    QAudioFormat format;
    format.setSampleRate(44100);
    format.setChannelCount(2);
    format.setSampleSize(16);
    format.setCodec("audio/pcm");
    format.setByteOrder(QAudioFormat::LittleEndian);
    format.setSampleType(QAudioFormat::SignedInt);

    QAudioDeviceInfo info(QAudioDeviceInfo::defaultOutputDevice());
    if (!info.isFormatSupported(format)) {
        qWarning() << "Default audio format not supported, trying to use the nearest";
        format = info.nearestFormat(format);
    }

    QAudioOutput *pAudioOutput = new QAudioOutput(format);
    QIODevice *pAudioIO = pAudioOutput->start();
    if (!pAudioIO) {
        qWarning() << "Start audio output failed.";
        // 继续处理视频，但不处理音频
        pAudioOutput->stop();
        delete pAudioOutput;
        pAudioOutput = nullptr;
        pAudioIO = nullptr;
    }

    AVFrame *pAudioFrame = av_frame_alloc();

    while (_isWorking) {
        if (!_isWorking) {
            break;
        }
        // 若共享缓冲区为空，睡眠3ms防止空转
        if (_pSharedBuffer->empty()) {
            QThread::msleep(3);
            if (!_isWorking) {
                break;
            }
            continue;
        }
        // 从共享缓冲区中取出一个AVPacket
        AVPacket *pPacket = _pSharedBuffer->pop();
        if (!pPacket) {
            continue;
        }

        qDebug() << pPacket->stream_index;
//        qDebug() << pPacket->data;
        if (pPacket->stream_index == VIDEO_STREAM) {
            // 发送包到视频解码器
            int ret = avcodec_send_packet(_pVideoCodecCtx, pPacket);
            if (ret < 0) {
                char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
                av_make_error_string(errbuf, AV_ERROR_MAX_STRING_SIZE, ret);
                qWarning() << "Send packet to video decoder failed:" << errbuf;
                av_packet_unref(pPacket);
                continue;
            }

            // 接收解码后的视频帧
            while (ret >= 0) {
                ret = avcodec_receive_frame(_pVideoCodecCtx, pVideoFrame);
                if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
                    break;
                }
                else if (ret < 0) {
                    char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
                    av_make_error_string(errbuf, AV_ERROR_MAX_STRING_SIZE, ret);
                    qWarning() << "Receive frame from video decoder failed:" << errbuf;
                    break;
                }

                // 转换视频帧格式
                sws_scale(pImgConvertCtx, pVideoFrame->data, pVideoFrame->linesize, 0,
                          _pVideoCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);

                // 使用QImage加载该RGB数据
                QImage tmpImg(pOutBuffer, _pVideoCodecCtx->width, _pVideoCodecCtx->height, QImage::Format_RGB32);
                // 复制一份图像传给界面显示
                QImage image = tmpImg.copy();
                emit sigGetOneFrame(image); // 发送信号

                av_frame_unref(pVideoFrame);
            }
        }
        // 处理音频流
        if (pPacket->stream_index == AUDIO_STREAM) {
            // 发送包到音频解码器
            ret = avcodec_send_packet(_pAudioCodecCtx, pPacket);
            if (ret < 0) {
                char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
                av_make_error_string(errbuf, AV_ERROR_MAX_STRING_SIZE, ret);
                qWarning() << "Send packet to audio decoder failed:" << errbuf;
                av_packet_unref(pPacket);
                continue;
            }
            // 接收解码后的音频帧
            while (ret >= 0) {
                ret = avcodec_receive_frame(_pAudioCodecCtx, pAudioFrame);
                if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
                    break;
                }
                else if (ret < 0) {
                    char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
                    av_make_error_string(errbuf, AV_ERROR_MAX_STRING_SIZE, ret);
                    qWarning() << "Receive frame from audio decoder failed:" << errbuf;
                    break;
                }

                // 重采样音频数据
                int out_samples = av_rescale_rnd(swr_get_delay(pSwrCtx, _pAudioCodecCtx->sample_rate) + pAudioFrame->nb_samples,
                                                 44100, _pAudioCodecCtx->sample_rate, AV_ROUND_UP); // 考虑采样率的转换和延迟
                int audioBufferSize = av_samples_get_buffer_size(nullptr, 2, out_samples, AV_SAMPLE_FMT_S16, 1);
                if (audioBufferSize < 0) {
                    qWarning() << "av_samples_get_buffer_size error.";
                    av_frame_unref(pAudioFrame);
                    continue;
                }
                uint8_t *pAudioBuffer = new uint8_t[audioBufferSize];
                int real_samples = swr_convert(pSwrCtx, &pAudioBuffer, out_samples,
                                               (const uint8_t **)pAudioFrame->data, pAudioFrame->nb_samples);
                if (real_samples > 0 && pAudioIO) {
                    // 将音频数据写入音频设备
                    pAudioIO->write((const char *)pAudioBuffer, real_samples * 2 * 2); // 2通道*2字节（16位）
                }
                delete [] pAudioBuffer;
                av_frame_unref(pAudioFrame);
            }
        }
        // 清理AVPacket堆对象
        av_packet_free(&pPacket);
    }

    // 资源释放
    if (pAudioFrame) {
        av_frame_free(&pAudioFrame);
    }
    if (pAudioOutput) {
        pAudioOutput->stop();
        delete pAudioOutput;
    }
    if (pSwrCtx) {
        swr_free(&pSwrCtx);
    }
    if (pVideoFrame) {
        av_frame_free(&pVideoFrame);
    }
    if (pFrameRGB) {
        av_frame_free(&pFrameRGB);
    }
    av_free(pOutBuffer);
    sws_freeContext(pImgConvertCtx);
    if (_pAudioCodecCtx) {
        avcodec_free_context(&_pAudioCodecCtx);
    }
    if (_pVideoCodecCtx) {
        avcodec_free_context(&_pVideoCodecCtx);
    }
}

void FFmpegDecode::handleCodecParams(Packet pkt)
{
    if (pkt._type != TASK_GET_CODEC_PARAMS_RESP_OK && pkt._type != TASK_GET_AUDIO_CODEC_PARAMS_RESP_OK) {
        qWarning() << "Get codec params failed.";
        return;
    }
    AVCodecParameters *pCodecPar = nullptr;
    pCodecPar = AVCodecParamsSerializer::deserialize((const uint8_t *)pkt._msg.constData(), pkt._length);
    // 初始化解码器
    if (pkt._type == TASK_GET_CODEC_PARAMS_RESP_OK) {
        initVideoCodec(pCodecPar);
    }
    else if (pkt._type == TASK_GET_AUDIO_CODEC_PARAMS_RESP_OK) {
        initAudioCodec(pCodecPar);
    }
    // 清理pCodecPar
    avcodec_parameters_free(&pCodecPar);
}

void FFmpegDecode::stop()
{
    if (_isWorking) {
        _isWorking = false;
        _pSharedBuffer->clear();
    }
    if (_pTcpConnection->state() == QAbstractSocket::ConnectedState) {
        _pTcpConnection->close();
    }
}

void FFmpegDecode::initVideoCodec(AVCodecParameters *pVideoCodecPar)
{
    qDebug() << "Init vedio codec...";
    // 查找解码器
    _pVideoCodec = avcodec_find_decoder(pVideoCodecPar->codec_id);
    if (!_pVideoCodec) {
        qCritical() << "Video codec not found.";
        return;
    }
    // 分配视频解码器上下文结构体
    _pVideoCodecCtx = avcodec_alloc_context3(_pVideoCodec);
    if (!_pVideoCodecCtx) {
        qCritical() << "Allocate video CodecContext failed.";
        return;
    }
    // 拷贝视频解码器参数
    int ret = avcodec_parameters_to_context(_pVideoCodecCtx, pVideoCodecPar);
    if (ret < 0) {
        qCritical() << "Copy video codec parameters failed.";
        avcodec_free_context(&_pVideoCodecCtx);
        _pVideoCodecCtx = nullptr;
    }
    // 初始化视频解码器参数
    _pVideoCodecCtx->bit_rate = 0; // 平均码率
    _pVideoCodecCtx->time_base.num = 1; // 时间基（每个时间刻度的秒数）分子
    _pVideoCodecCtx->time_base.den = 25; // 时间基分母
    _pVideoCodecCtx->frame_number = 1; // 每个包的视频帧数
    // 打开视频解码器
    ret = avcodec_open2(_pVideoCodecCtx, _pVideoCodec, nullptr);
    if (ret < 0) {
        char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
        av_strerror(ret, errbuf, sizeof(errbuf));
        qCritical() << "Open video codec failed:" << errbuf;
        avcodec_free_context(&_pVideoCodecCtx);
        _pVideoCodecCtx = nullptr;
    }
    qDebug() << "Init video codec success.";
}


void FFmpegDecode::initAudioCodec(AVCodecParameters *pAudioCodecPar)
{
    qDebug() << "Init audio codec...";
    // 查找音频解码器
    _pAudioCodec = avcodec_find_decoder(pAudioCodecPar->codec_id);
    if (!_pAudioCodec) {
        qCritical() << "Audio codec not found.";
        return;
    }
    qDebug() << "111";
    // 分配音频解码器上下文结构体
    _pAudioCodecCtx = avcodec_alloc_context3(_pAudioCodec);
    if (!_pAudioCodecCtx) {
        qCritical() << "Allocate audio CodecContext failed.";
        return;
    }
    qDebug() << "111";
    // 拷贝音频解码器参数
    int ret = avcodec_parameters_to_context(_pAudioCodecCtx, pAudioCodecPar);
    if (ret < 0) {
        qCritical() << "Copy audio codec parameters failed.";
        avcodec_free_context(&_pAudioCodecCtx);
        _pAudioCodecCtx = nullptr;
    }
    qDebug() << "111";
    // 打开音频解码器
    ret = avcodec_open2(_pAudioCodecCtx, _pAudioCodec, nullptr);
    if (ret < 0) {
        char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
        av_strerror(ret, errbuf, sizeof(errbuf));
        qCritical() << "Open audio codec failed:" << errbuf;
        avcodec_free_context(&_pAudioCodecCtx);
        _pAudioCodecCtx = nullptr;
    }
    qDebug() << "111";
    // 初始化音频解码器成功，关闭TCP连接，通知视频窗口
    qDebug() << "Init audio codec success.";
    _pTcpConnection->close();
    qDebug() << "111";
    emit initCodecFinished();
    qDebug() << "111";
    // 等待共享缓冲区不为空时，从环形缓冲区中取出AVPacket并进行解码和格式转换处理，处理完通知窗口
//    while (_pSharedBuffer->empty());
//    process();
}
