#include "cameramanager.h"
#include <QDebug>
#include <QApplication>
#include <QDir>
#include <QStandardPaths>
#include <QDateTime>
#include <QImageWriter>
#include <QVideoFrame>
#include <QVideoSurfaceFormat>

CameraManager::CameraManager(QObject *parent)
    : QObject(parent)
    , m_camera(nullptr)
    , m_imageCapture(nullptr)
    , m_videoSurface(nullptr)
    , m_displayLabel(nullptr)
    , m_frameRateTimer(new QTimer(this))
    , m_frameCount(0)
    , m_currentFPS(0)
    , m_isActive(false)
    , m_resolution(640, 480)
{
    // 设置帧率计算定时器
    m_frameRateTimer->setInterval(1000); // 每秒更新一次帧率
    connect(m_frameRateTimer, &QTimer::timeout, this, &CameraManager::updateFrameRate);
    
    setupCamera();
}

CameraManager::~CameraManager()
{
    stopCamera();
    
    if (m_imageCapture) {
        delete m_imageCapture;
    }
    
    if (m_videoSurface) {
        delete m_videoSurface;
    }
    
    if (m_camera) {
        delete m_camera;
    }
}

void CameraManager::setupCamera()
{
    // 获取默认摄像头
    QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
    if (cameras.isEmpty()) {
        m_lastError = "没有找到可用的摄像头设备";
        qWarning() << m_lastError;
        return;
    }
    
    // 使用第一个可用的摄像头
    QCameraInfo cameraInfo = cameras.first();
    m_camera = new QCamera(cameraInfo, this);
    
    // 连接信号
    connect(m_camera, QOverload<QCamera::State>::of(&QCamera::stateChanged),
            this, &CameraManager::onCameraStateChanged);
    connect(m_camera, QOverload<QCamera::Status>::of(&QCamera::statusChanged),
            this, &CameraManager::onCameraStatusChanged);
    connect(m_camera, QOverload<QCamera::Error>::of(&QCamera::error),
            this, &CameraManager::onCameraError);
    
    setupImageCapture();
    
    qDebug() << "摄像头初始化完成:" << cameraInfo.description();
}

void CameraManager::setupImageCapture()
{
    if (!m_camera) return;
    
    m_imageCapture = new QCameraImageCapture(m_camera, this);
    
    // 连接拍照相关信号
    connect(m_imageCapture, &QCameraImageCapture::imageCaptured,
            this, &CameraManager::onImageCaptured);
    connect(m_imageCapture, &QCameraImageCapture::imageSaved,
            this, &CameraManager::onImageSaved);
    
    // 设置拍照格式
    QImageEncoderSettings settings;
    settings.setCodec("image/jpeg");
    settings.setQuality(QMultimedia::HighQuality);
    settings.setResolution(m_resolution);
    m_imageCapture->setEncodingSettings(settings);
}

bool CameraManager::startCamera()
{
    if (!m_camera) {
        m_lastError = "摄像头未初始化";
        emit errorOccurred(m_lastError);
        return false;
    }
    
    if (m_isActive) {
        qDebug() << "摄像头已经在运行中";
        return true;
    }
    
    try {
        // 创建视频表面
        if (m_displayLabel && !m_videoSurface) {
            m_videoSurface = new VideoSurface(m_displayLabel, this);
            connect(m_videoSurface, &VideoSurface::frameAvailable,
                    this, [this]() { m_frameCount++; });
        }
        
        // 设置视频输出
        if (m_videoSurface) {
            m_camera->setViewfinder(m_videoSurface);
        }
        
        // 启动摄像头
        m_camera->start();
        
        // 启动帧率计算
        m_frameRateTimer->start();
        m_frameCount = 0;
        
        qDebug() << "正在启动摄像头...";
        return true;
        
    } catch (const std::exception &e) {
        m_lastError = QString("启动摄像头失败: %1").arg(e.what());
        emit errorOccurred(m_lastError);
        return false;
    }
}

void CameraManager::stopCamera()
{
    if (!m_camera || !m_isActive) {
        return;
    }
    
    m_frameRateTimer->stop();
    m_camera->stop();
    
    if (m_videoSurface) {
        delete m_videoSurface;
        m_videoSurface = nullptr;
    }
    
    m_isActive = false;
    m_currentFPS = 0;
    
    emit cameraStatusChanged(false);
    emit frameRateChanged(0);
    
    qDebug() << "摄像头已停止";
}

bool CameraManager::isActive() const
{
    return m_isActive;
}

void CameraManager::captureImage()
{
    if (!m_imageCapture || !m_isActive) {
        emit errorOccurred("摄像头未启动，无法拍照");
        return;
    }
    
    // 设置保存路径
    QString saveDirPath = QDir::current().filePath("camera");
    QDir saveDir(saveDirPath);
    if (!saveDir.exists()) {
        // 尝试创建 ./camera 文件夹
        if (!saveDir.mkpath(".")) {
            emit errorOccurred("无法创建 ./camera 目录，使用当前目录保存");
            saveDirPath = QDir::currentPath();
        }
    }

    QString timestamp = QDateTime::currentDateTime().toString("yyyyMMdd_hhmmss");
    QString fileName = QString("snapshot_%1.jpg").arg(timestamp);
    fileName = QDir(saveDirPath).filePath(fileName);
    
    // 拍照
    m_imageCapture->capture(fileName);
    
    qDebug() << "正在拍照，保存到:" << fileName;
}

void CameraManager::setDisplayLabel(QLabel *label)
{
    m_displayLabel = label;
    if (m_displayLabel) {
        m_displayLabel->setScaledContents(true);
        m_displayLabel->setAlignment(Qt::AlignCenter);
        qDebug() << "设置显示标签，尺寸:" << m_displayLabel->size();
    }
}

QStringList CameraManager::getAvailableCameras() const
{
    QStringList cameraNames;
    QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
    
    for (const QCameraInfo &cameraInfo : cameras) {
        cameraNames << cameraInfo.description();
    }
    
    return cameraNames;
}

bool CameraManager::setCamera(int index)
{
    QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
    if (index < 0 || index >= cameras.size()) {
        return false;
    }
    
    bool wasActive = m_isActive;
    if (wasActive) {
        stopCamera();
    }
    
    // 重新创建摄像头
    if (m_camera) {
        delete m_camera;
    }
    
    QCameraInfo cameraInfo = cameras.at(index);
    m_camera = new QCamera(cameraInfo, this);
    
    // 重新连接信号
    connect(m_camera, QOverload<QCamera::State>::of(&QCamera::stateChanged),
            this, &CameraManager::onCameraStateChanged);
    connect(m_camera, QOverload<QCamera::Status>::of(&QCamera::statusChanged),
            this, &CameraManager::onCameraStatusChanged);
    connect(m_camera, QOverload<QCamera::Error>::of(&QCamera::error),
            this, &CameraManager::onCameraError);
    
    setupImageCapture();
    
    if (wasActive) {
        return startCamera();
    }
    
    return true;
}

QString CameraManager::getCameraStatus() const
{
    if (!m_camera) {
        return "未初始化";
    }
    
    switch (m_camera->status()) {
    case QCamera::ActiveStatus:
        return "在线";
    case QCamera::StartingStatus:
        return "启动中";
    case QCamera::StoppingStatus:
        return "停止中";
    case QCamera::StandbyStatus:
        return "待机";
    case QCamera::LoadedStatus:
        return "已加载";
    case QCamera::LoadingStatus:
        return "加载中";
    case QCamera::UnloadingStatus:
        return "卸载中";
    case QCamera::UnloadedStatus:
        return "离线";
    case QCamera::UnavailableStatus:
        return "不可用";
    default:
        return "未知状态";
    }
}

QString CameraManager::getResolution() const
{
    return QString("%1x%2").arg(m_resolution.width()).arg(m_resolution.height());
}

int CameraManager::getFrameRate() const
{
    return m_currentFPS;
}

void CameraManager::onCameraStateChanged(QCamera::State state)
{
    qDebug() << "摄像头状态变化:" << state;
    
    switch (state) {
    case QCamera::ActiveState:
        m_isActive = true;
        emit cameraStatusChanged(true);
        break;
    case QCamera::LoadedState:
    case QCamera::UnloadedState:
        if (m_isActive) {
            m_isActive = false;
            emit cameraStatusChanged(false);
        }
        break;
    }
}

void CameraManager::onCameraStatusChanged(QCamera::Status status)
{
    qDebug() << "摄像头状态:" << status;
    
    if (status == QCamera::ActiveStatus && !m_isActive) {
        m_isActive = true;
        emit cameraStatusChanged(true);
    }
}

void CameraManager::onCameraError(QCamera::Error error)
{
    QString errorString = getErrorString(error);
    m_lastError = errorString;
    
    qWarning() << "摄像头错误:" << errorString;
    emit errorOccurred(errorString);
    
    if (m_isActive) {
        m_isActive = false;
        emit cameraStatusChanged(false);
    }
}

void CameraManager::onImageCaptured(int id, const QImage &image)
{
    Q_UNUSED(id)
    qDebug() << "图像已捕获，尺寸:" << image.size();
}

void CameraManager::onImageSaved(int id, const QString &fileName)
{
    Q_UNUSED(id)
    qDebug() << "图像已保存:" << fileName;
    emit imageCaptured(fileName);
}

void CameraManager::updateFrameRate()
{
    m_currentFPS = m_frameCount;
    m_frameCount = 0;
    emit frameRateChanged(m_currentFPS);
}

QString CameraManager::getErrorString(QCamera::Error error) const
{
    switch (error) {
    case QCamera::NoError:
        return "无错误";
    case QCamera::CameraError:
        return "摄像头通用错误";
    case QCamera::InvalidRequestError:
        return "无效请求";
    case QCamera::ServiceMissingError:
        return "摄像头服务缺失";
    case QCamera::NotSupportedFeatureError:
        return "不支持的功能";
    default:
        return "未知错误";
    }
}

// VideoSurface 实现
VideoSurface::VideoSurface(QLabel *label, QObject *parent)
    : QAbstractVideoSurface(parent)
    , m_targetLabel(label)
    , m_targetSize(320, 240)
{
    if (m_targetLabel) {
        m_targetSize = m_targetLabel->size();
    }
}

QList<QVideoFrame::PixelFormat> VideoSurface::supportedPixelFormats(
    QAbstractVideoBuffer::HandleType handleType) const
{
    Q_UNUSED(handleType)
    return QList<QVideoFrame::PixelFormat>()
            << QVideoFrame::Format_ARGB32
            << QVideoFrame::Format_ARGB32_Premultiplied
            << QVideoFrame::Format_RGB32
            << QVideoFrame::Format_RGB24
            << QVideoFrame::Format_RGB565
            << QVideoFrame::Format_RGB555
            << QVideoFrame::Format_ARGB8565_Premultiplied
            << QVideoFrame::Format_BGRA32
            << QVideoFrame::Format_BGRA32_Premultiplied
            << QVideoFrame::Format_BGR32
            << QVideoFrame::Format_BGR24
            << QVideoFrame::Format_BGR565
            << QVideoFrame::Format_BGR555
            << QVideoFrame::Format_BGRA5658_Premultiplied
            << QVideoFrame::Format_AYUV444
            << QVideoFrame::Format_AYUV444_Premultiplied
            << QVideoFrame::Format_YUV444
            << QVideoFrame::Format_YUV420P
            << QVideoFrame::Format_YV12
            << QVideoFrame::Format_UYVY
            << QVideoFrame::Format_YUYV
            << QVideoFrame::Format_NV12
            << QVideoFrame::Format_NV21
            << QVideoFrame::Format_IMC1
            << QVideoFrame::Format_IMC2
            << QVideoFrame::Format_IMC3
            << QVideoFrame::Format_IMC4
            << QVideoFrame::Format_Y8
            << QVideoFrame::Format_Y16
            << QVideoFrame::Format_Jpeg
            << QVideoFrame::Format_CameraRaw
            << QVideoFrame::Format_AdobeDng;
}

bool VideoSurface::present(const QVideoFrame &frame)
{
    if (!m_targetLabel || !frame.isValid()) {
        return false;
    }
    
    QVideoFrame cloneFrame(frame);
    if (!cloneFrame.map(QAbstractVideoBuffer::ReadOnly)) {
        return false;
    }
    
    QImage image;
    
    // 根据像素格式转换图像
    switch (cloneFrame.pixelFormat()) {
    case QVideoFrame::Format_RGB32:
        image = QImage(cloneFrame.bits(),
                      cloneFrame.width(),
                      cloneFrame.height(),
                      cloneFrame.bytesPerLine(),
                      QImage::Format_RGB32);
        break;
    case QVideoFrame::Format_ARGB32:
        image = QImage(cloneFrame.bits(),
                      cloneFrame.width(),
                      cloneFrame.height(),
                      cloneFrame.bytesPerLine(),
                      QImage::Format_ARGB32);
        break;
    case QVideoFrame::Format_RGB24:
        image = QImage(cloneFrame.bits(),
                      cloneFrame.width(),
                      cloneFrame.height(),
                      cloneFrame.bytesPerLine(),
                      QImage::Format_RGB888);
        break;
    case QVideoFrame::Format_YUYV:
        // YUYV格式需要特殊处理
        image = convertYUYVToRGB(cloneFrame);
        break;
    case QVideoFrame::Format_UYVY:
        // UYVY格式处理
        image = convertUYVYToRGB(cloneFrame);
        break;
    case QVideoFrame::Format_NV12:
        image = convertNV12ToRGB(cloneFrame);
        break;
    default:
        qDebug() << "不支持的像素格式:" << cloneFrame.pixelFormat();
        // 尝试使用Qt的内置转换
        image = QImage(cloneFrame.bits(),
                      cloneFrame.width(),
                      cloneFrame.height(),
                      cloneFrame.bytesPerLine(),
                      QImage::Format_RGB888);
        if (image.isNull()) {
            cloneFrame.unmap();
            return false;
        }
        break;
    }
    
    cloneFrame.unmap();
    
    if (!image.isNull()) {
        // 缩放图像以适应标签大小
        QPixmap pixmap = QPixmap::fromImage(image.scaled(m_targetSize, Qt::KeepAspectRatio, Qt::SmoothTransformation));
        m_targetLabel->setPixmap(pixmap);
        
        emit frameAvailable();
        return true;
    }
    
    return false;
}

QImage VideoSurface::convertYUYVToRGB(const QVideoFrame &frame)
{
    int width = frame.width();
    int height = frame.height();
    const uchar *src = frame.bits();
    
    QImage image(width, height, QImage::Format_RGB888);
    
    for (int y = 0; y < height; ++y) {
        const uchar *srcLine = src + y * frame.bytesPerLine();
        uchar *dstLine = image.scanLine(y);
        
        for (int x = 0; x < width; x += 2) {
            int srcIndex = x * 2;
            int dstIndex = x * 3;
            
            // YUYV: Y0 U Y1 V
            int y0 = srcLine[srcIndex];
            int u = srcLine[srcIndex + 1];
            int y1 = srcLine[srcIndex + 2];
            int v = srcLine[srcIndex + 3];
            
            // YUV to RGB conversion
            int c0 = y0 - 16;
            int c1 = y1 - 16;
            int d = u - 128;
            int e = v - 128;
            
            // Pixel 0
            int r0 = (298 * c0 + 409 * e + 128) >> 8;
            int g0 = (298 * c0 - 100 * d - 208 * e + 128) >> 8;
            int b0 = (298 * c0 + 516 * d + 128) >> 8;
            
            // Pixel 1
            int r1 = (298 * c1 + 409 * e + 128) >> 8;
            int g1 = (298 * c1 - 100 * d - 208 * e + 128) >> 8;
            int b1 = (298 * c1 + 516 * d + 128) >> 8;
            
            // Clamp values
            r0 = qBound(0, r0, 255);
            g0 = qBound(0, g0, 255);
            b0 = qBound(0, b0, 255);
            r1 = qBound(0, r1, 255);
            g1 = qBound(0, g1, 255);
            b1 = qBound(0, b1, 255);
            
            // Set RGB values
            dstLine[dstIndex] = r0;
            dstLine[dstIndex + 1] = g0;
            dstLine[dstIndex + 2] = b0;
            
            if (x + 1 < width) {
                dstLine[dstIndex + 3] = r1;
                dstLine[dstIndex + 4] = g1;
                dstLine[dstIndex + 5] = b1;
            }
        }
    }
    
    return image;
}

QImage VideoSurface::convertUYVYToRGB(const QVideoFrame &frame)
{
    int width = frame.width();
    int height = frame.height();
    const uchar *src = frame.bits();
    
    QImage image(width, height, QImage::Format_RGB888);
    
    for (int y = 0; y < height; ++y) {
        const uchar *srcLine = src + y * frame.bytesPerLine();
        uchar *dstLine = image.scanLine(y);
        
        for (int x = 0; x < width; x += 2) {
            int srcIndex = x * 2;
            int dstIndex = x * 3;
            
            // UYVY: U Y0 V Y1
            int u = srcLine[srcIndex];
            int y0 = srcLine[srcIndex + 1];
            int v = srcLine[srcIndex + 2];
            int y1 = srcLine[srcIndex + 3];
            
            // YUV to RGB conversion (same as YUYV)
            int c0 = y0 - 16;
            int c1 = y1 - 16;
            int d = u - 128;
            int e = v - 128;
            
            // Pixel 0
            int r0 = (298 * c0 + 409 * e + 128) >> 8;
            int g0 = (298 * c0 - 100 * d - 208 * e + 128) >> 8;
            int b0 = (298 * c0 + 516 * d + 128) >> 8;
            
            // Pixel 1
            int r1 = (298 * c1 + 409 * e + 128) >> 8;
            int g1 = (298 * c1 - 100 * d - 208 * e + 128) >> 8;
            int b1 = (298 * c1 + 516 * d + 128) >> 8;
            
            // Clamp values
            r0 = qBound(0, r0, 255);
            g0 = qBound(0, g0, 255);
            b0 = qBound(0, b0, 255);
            r1 = qBound(0, r1, 255);
            g1 = qBound(0, g1, 255);
            b1 = qBound(0, b1, 255);
            
            // Set RGB values
            dstLine[dstIndex] = r0;
            dstLine[dstIndex + 1] = g0;
            dstLine[dstIndex + 2] = b0;
            
            if (x + 1 < width) {
                dstLine[dstIndex + 3] = r1;
                dstLine[dstIndex + 4] = g1;
                dstLine[dstIndex + 5] = b1;
            }
        }
    }
    
    return image;
}

QImage VideoSurface::convertNV12ToRGB(const QVideoFrame &frame)
{
    int width = frame.width();
    int height = frame.height();
    const uchar *srcY = frame.bits();
    const uchar *srcUV = srcY + width * height; // UV 平面起始

    QImage image(width, height, QImage::Format_RGB888);

    for (int y = 0; y < height; ++y) {
        uchar *dstLine = image.scanLine(y);
        const uchar *pY = srcY + y * width;
        // UV 采样每两行
        const uchar *pUV = srcUV + (y / 2) * width;

        for (int x = 0; x < width; x += 2) {
            int Y0 = pY[x];
            int Y1 = pY[x + 1];
            int U = pUV[x] - 128;   // U V 交替存储
            int V = pUV[x + 1] - 128;

            // 第一个像素
            int C1 = Y0 - 16;
            int D1 = U;
            int E1 = V;
            int R1 = (298 * C1 + 409 * E1 + 128) >> 8;
            int G1 = (298 * C1 - 100 * D1 - 208 * E1 + 128) >> 8;
            int B1 = (298 * C1 + 516 * D1 + 128) >> 8;
            R1 = qBound(0, R1, 255);
            G1 = qBound(0, G1, 255);
            B1 = qBound(0, B1, 255);

            // 第二个像素共享同一组 UV
            int C2 = Y1 - 16;
            int R2 = (298 * C2 + 409 * E1 + 128) >> 8;
            int G2 = (298 * C2 - 100 * D1 - 208 * E1 + 128) >> 8;
            int B2 = (298 * C2 + 516 * D1 + 128) >> 8;
            R2 = qBound(0, R2, 255);
            G2 = qBound(0, G2, 255);
            B2 = qBound(0, B2, 255);

            int dstIdx = x * 3;
            dstLine[dstIdx] = R1;
            dstLine[dstIdx + 1] = G1;
            dstLine[dstIdx + 2] = B1;
            dstLine[dstIdx + 3] = R2;
            dstLine[dstIdx + 4] = G2;
            dstLine[dstIdx + 5] = B2;
        }
    }

    return image;
}

 