#include "playbackvideowidget.h"
#include <QThread>
#include <QDateTime>
#include <QJsonObject>
#include <QPainter>
#include <QJsonDocument>

PlaybackVideoWidget::PlaybackVideoWidget(QWidget *parent)
    : QWidget(parent)
    , _jsonStr()
    , _pFFmpegDecode(nullptr)
    , _pRecvAVPacket(nullptr)
    , _pDecodeThread(nullptr)
    , _pRecvPktThread(nullptr)
    , _pSharedBuffer(new RingBuffer(6000))
    , _image()
    , _label(this)
{
    _label.setText("0KB/s");
    _label.setStyleSheet("color: white; background-color: rgba(0, 0, 0, 100);");
    _label.move(0, this->height() - 20);
    _label.resize(70, 20);
}

PlaybackVideoWidget::~PlaybackVideoWidget()
{
    stopPlay();
}

void PlaybackVideoWidget::paintEvent(QPaintEvent *event)
{
    qDebug() << "11111111111111111111111111";
    QPainter painter(this);
    painter.setBrush(Qt::white);
    painter.drawRect(0, 0, this->width(), this->height()); // 先画成白色
    if (_image.size().width() <= 0) {
        return;
    }

    // 将图像按比例缩放
    int showWidth = this->width();
    int showHeight = this->height();
    QImage img = _image.scaled(QSize(showWidth, showHeight), Qt::KeepAspectRatio);

    int x = this->width();
    int y = this->height();
    x /= 2;
    y /= 2;
//    qDebug() << "x:" << x << ", y:" << y;
    painter.drawImage(QPoint(0, 0), img);
}



void PlaybackVideoWidget::handleStartPlay(qlonglong startTime, int cameraId, int channel)
{
    // 停止当前窗口的播放
    stopPlay();
    _pSharedBuffer->clear();
    // 构造用户要发送给服务器的回放的json信息
    qint64 endTime = QDateTime(QDateTime::fromMSecsSinceEpoch(startTime).date(), QTime(23, 59, 59)).toMSecsSinceEpoch();
    QJsonObject jsonObj;
    jsonObj["start_time"] = startTime;
    jsonObj["end_time"] = endTime; // 当天最后一秒的时间戳（毫秒）
    jsonObj["camera_id"] = cameraId;
    jsonObj["channel"] = channel;
    _jsonStr = QJsonDocument(jsonObj).toJson(QJsonDocument::Compact);
    // 构造解码类对象
    _pFFmpegDecode = new FFmpegDecode(_jsonStr, _pSharedBuffer);
    // 创建子线程，将该解码类对象迁移到子线程中
    _pDecodeThread = new QThread(this);
    _pFFmpegDecode->moveToThread(_pDecodeThread);
    connect(_pDecodeThread, &QThread::started, _pFFmpegDecode, &FFmpegDecode::startPlayback);
    connect(_pFFmpegDecode, &FFmpegDecode::initCodecFinished, this, &PlaybackVideoWidget::handleInitCodecFinished, Qt::QueuedConnection);
    connect(_pFFmpegDecode, &FFmpegDecode::initCodecFinished, _pFFmpegDecode, &FFmpegDecode::process);
    // 一旦解码线程处理好一帧，就调用该槽函数触发重绘事件显示画面
    connect(_pFFmpegDecode, &FFmpegDecode::sigGetOneFrame, this, &PlaybackVideoWidget::handleGetOneFrame, Qt::QueuedConnection);
    // 解码线程结束时自动删除解码类对象
    connect(_pDecodeThread, &QThread::finished, _pFFmpegDecode, &QObject::deleteLater);

    // 启动该解码子线程
    // 解码子线程向服务器请求解码器参数并初始化解码器，完成后会向窗口发送initCodecFinished信号
    // 当共享缓冲区不为空时，不断取出AVPacket并解码以及格式转换，每处理完一帧就向窗口发送sigGetOneFrame信号
    _pDecodeThread->start();
}

void PlaybackVideoWidget::handleInitCodecFinished()
{
    qDebug() << "handleInitCodecFinished()";
    // 构造收包类对象
    _pRecvAVPacket = new RecvAVPacket(_jsonStr, TASK_STOP_RTMP, _pSharedBuffer);
    // 创建子线程，将该收包类对象迁移到子线程中
    _pRecvPktThread = new QThread(this);
    _pRecvAVPacket->moveToThread(_pRecvPktThread);
    connect(_pRecvPktThread, &QThread::started, _pRecvAVPacket, &RecvAVPacket::startPlayback);
    connect(_pRecvAVPacket, &RecvAVPacket::updateBitRate, this, &PlaybackVideoWidget::handleUpdateBitRate, Qt::QueuedConnection);
//    connect(_pRecvAVPacket, &RecvAVPacket::sigTcpDisconnected, this, &VideoWidget::stopPlay, Qt::QueuedConnection);
    // 收包线程结束时自动删除收包类对象
    connect(_pRecvPktThread, &QThread::finished, _pRecvAVPacket, &QObject::deleteLater);

    // 启动该收包子线程，收包子线程不断从服务器获取AVPacket包放入共享缓冲区中
    _pRecvPktThread->start();
}

void PlaybackVideoWidget::handleGetOneFrame(QImage img)
{
    // 清空当前播放窗口
    _image = img;
    update(); // 调用后会执行paintEvent函数
}

void PlaybackVideoWidget::handleUpdateBitRate(double bitRate)
{
    _label.setText(QString::number(bitRate, 'f', 2) + "KB/s"); // 保留2位小数
}

void PlaybackVideoWidget::stopPlay()
{
    qDebug() << "lyncheer";
    if (_pFFmpegDecode) {
        _pFFmpegDecode->stop();
    }
    if (_pRecvAVPacket) {
//        _pRecvAVPacket->stop();
    }
    qDebug() << "lyncheer";
    if (_pDecodeThread) {
        qDebug() << "lyncheer";
        _pDecodeThread->quit();
        _pDecodeThread->wait();
        delete _pDecodeThread;
        _pDecodeThread = nullptr;
    }
    qDebug() << "lyncheer";
    if (_pRecvPktThread) {
        qDebug() << "lyncheer";
        _pRecvPktThread->quit();
        _pRecvPktThread->wait();
        delete _pRecvPktThread;
        _pRecvPktThread = nullptr;
    }
    _pFFmpegDecode = nullptr;
    _pRecvAVPacket = nullptr;
    qDebug() << "lyncheer";
}
