﻿#include "rtcclient.h"
#include <QBuffer>
#include <QDebug>
#include <QMetaObject>
#include <QSet>
#include "utils.h"
#include "alirtceventhandler.h"
#include "serialization.h"

RTCClient::RTCClient(QObject *parent)
    : QObject(parent), requestSocket(nullptr), callbackSocket(nullptr),
      engine(nullptr), audioDeviceManager(nullptr), videoDeviceManager(nullptr),
      audioMixingManager(nullptr), eventhandler(nullptr), m_audioFrameObserver(nullptr)
{
    qInfo() << "[RTC] RTCClient initializing for ZDMMService...";
    
    qRegisterMetaType<RTCEngineCallback>("RTCEngineCallback");
    
    // 注册自定义类型以支持Qt信号槽系统
    qRegisterMetaType<ding::rtc::RtcEngineAudioFrame>("ding::rtc::RtcEngineAudioFrame");
    qRegisterMetaType<ding::rtc::RtcEngineAudioFrame>("RtcEngineAudioFrame");
    
    qInfo() << "[RTC] RTCClient initialized as client mode";
}

RTCClient::~RTCClient()
{
    qInfo() << "RTCClient shutting down...";
    
    // 断开连接
    if (requestSocket) {
        requestSocket->disconnectFromServer();
        requestSocket->deleteLater();
        requestSocket = nullptr;
    }
    
    if (callbackSocket) {
        callbackSocket->disconnectFromServer();
        callbackSocket->deleteLater();
        callbackSocket = nullptr;
    }

    if (engine) {
        engine->SetEngineEventListener(nullptr);
        // NOTE 该方法为同步调用，需要等待内部资源释放之后才能执行其他方法
        RTC3::RtcEngine::Destroy(engine);
        engine = nullptr;
    }

    audioDeviceManager = nullptr;
    videoDeviceManager = nullptr;

    if(eventhandler){
        delete eventhandler;
        eventhandler = nullptr;
    }
    
    if (m_audioFrameObserver) {
        delete m_audioFrameObserver;
        m_audioFrameObserver = nullptr;
    }
    
    qInfo() << "RTCClient shutdown complete";
}

void RTCClient::connectToTeacher()
{
    qInfo() << "[RTC] RTCClient connecting to Teacher RTCService...";
    
    // 创建请求 socket，连接到 Teacher 的 RTCService
    qInfo() << "[RTC] Creating request socket...";
    requestSocket = new QLocalSocket(this);
    if (!requestSocket) {
        qCritical() << "[RTC] FAILED to create request socket";
        return;
    }
    qInfo() << "[RTC] Request socket created successfully";
    
    qInfo() << "[RTC] Connecting request socket signals...";
    connect(requestSocket, &QLocalSocket::connected, this, &RTCClient::onRequestConnected);
    connect(requestSocket, &QLocalSocket::readyRead, this, &RTCClient::onRequestReadyRead);
    connect(requestSocket, &QLocalSocket::disconnected, this, &RTCClient::onRequestDisconnected);
    qInfo() << "[RTC] Request socket signals connected";
    
    // 创建回调 socket，连接到 Teacher 的 RTCService_Callback
    qInfo() << "[RTC] Creating callback socket...";
    callbackSocket = new QLocalSocket(this);
    if (!callbackSocket) {
        qCritical() << "[RTC] FAILED to create callback socket";
        return;
    }
    qInfo() << "[RTC] Callback socket created successfully";
    
    qInfo() << "[RTC] Connecting callback socket signals...";
    connect(callbackSocket, &QLocalSocket::connected, this, &RTCClient::onCallbackConnected);
    connect(callbackSocket, &QLocalSocket::readyRead, this, &RTCClient::onCallbackReadyRead);
    connect(callbackSocket, &QLocalSocket::disconnected, this, &RTCClient::onCallbackDisconnected);
    qInfo() << "[RTC] Callback socket signals connected";
    
    // 连接到 Teacher 端
    qInfo() << "[RTC] Attempting to connect request socket to 'RTCService'...";
    requestSocket->connectToServer("RTCService");
    qInfo() << "[RTC] Request socket connection attempt initiated";
    
    qInfo() << "[RTC] Attempting to connect callback socket to 'RTCService_Callback'...";
    callbackSocket->connectToServer("RTCService_Callback");
    qInfo() << "[RTC] Callback socket connection attempt initiated";
    
    qInfo() << "[RTC] RTCClient connection requests sent to Teacher";
}

bool RTCClient::isConnected() const
{
    return (requestSocket && requestSocket->state() == QLocalSocket::ConnectedState &&
            callbackSocket && callbackSocket->state() == QLocalSocket::ConnectedState);
}

void RTCClient::onRequestConnected()
{
    qInfo() << "[RTC] Request socket connected to Teacher RTCService";
    // 检查是否两个socket都连接了
    if (callbackSocket && callbackSocket->state() == QLocalSocket::ConnectedState) {
        qInfo() << "[RTC] Both sockets connected - RTC fully connected";
        emit rtcFullyConnected();
    }
}

void RTCClient::onRequestReadyRead()
{
    if (!requestSocket) return;
    
    requestBuffer.append(requestSocket->readAll());
    processRequestBuffer();
}

void RTCClient::onRequestDisconnected()
{
    qInfo() << "[RTC] Request socket disconnected from Teacher RTCService";
    
    // 检查socket状态
    if (requestSocket) {
        qInfo() << "Request socket state:" << requestSocket->state();
        qInfo() << "Request socket error:" << requestSocket->error() << requestSocket->errorString();
    }
    
    qInfo() << "Emitting exit signal...";
    emit exit();
    qInfo() << "Exit signal emitted";
    
    // 直接通知父对象（MMService）
    if (parent()) {
        qInfo() << "Calling parent's onRTCClientDisconnected method...";
        QMetaObject::invokeMethod(parent(), "onRTCClientDisconnected", Qt::QueuedConnection);
    }
}

void RTCClient::onCallbackConnected()
{
    qInfo() << "[RTC] Callback socket connected to Teacher RTCService_Callback";
    
    // 检查是否两个socket都连接了
    if (requestSocket && requestSocket->state() == QLocalSocket::ConnectedState) {
        qInfo() << "[RTC] Both sockets connected - RTC fully connected";
        emit rtcFullyConnected();
    }
}

void RTCClient::onCallbackReadyRead()
{
    if (!callbackSocket) return;
    
    callbackBuffer.append(callbackSocket->readAll());
    processCallbackBuffer();
}

void RTCClient::onCallbackDisconnected()
{
    qInfo() << "[RTC] Callback socket disconnected from Teacher RTCService_Callback";
    
    // 检查socket状态
    if (callbackSocket) {
        qInfo() << "Callback socket state:" << callbackSocket->state();
        qInfo() << "Callback socket error:" << callbackSocket->error() << callbackSocket->errorString();
    }
    
    // 任何一个socket断开都应该触发退出
    qInfo() << "Emitting exit signal...";
    emit exit();
    qInfo() << "Exit signal emitted";
    
    // 直接通知父对象（MMService）
    if (parent()) {
        qInfo() << "Calling parent's onRTCClientDisconnected method...";
        QMetaObject::invokeMethod(parent(), "onRTCClientDisconnected", Qt::QueuedConnection);
    }
}

void RTCClient::processRequestBuffer()
{
    static const int headerSize = 1 + sizeof(qint32);

    while (requestBuffer.size() >= headerSize) {
        QDataStream stream(requestBuffer);
        stream.setVersion(QDataStream::Qt_5_0);

        qint8 flag;
        qint32 packetSize;

        // 预检数据头
        stream >> flag >> packetSize;

        if (requestBuffer.size() < headerSize + packetSize) {
            qDebug() << "[RTC] Not enough data to read entire packet, waiting for more data.";
            return; // 数据不足以处理完整包，等待下次读取
        }

        requestBuffer.remove(0, headerSize); // 移除头部
        QByteArray packetData = requestBuffer.mid(0, packetSize);
        requestBuffer.remove(0, packetSize); // 移除已处理的包

        // 处理数据包
        RTCEngineRequest request = deserializeRequest(packetData);
        RTCEngineResponse response;
        processRequest(requestSocket, request, response);

        QByteArray serializedResponse = serializeResponse(response);

        if(requestSocket->write(serializedResponse) != serializedResponse.size()){
            qDebug() << "[RTC] Could not send all data to Teacher!";
        }

        requestSocket->flush();
        qDebug() << "[RTC] Processed request[" << RTCEngineCommand::commandToString(request.command) << "_" << request << "]" << "and sent response ["<< response << "]";
    }
}

void RTCClient::processCallbackBuffer()
{
    // 回调数据的处理逻辑（如果需要的话）
    // 通常回调是从 MMService 发送到 Teacher，这里可能不需要处理接收到的回调数据
}

QByteArray RTCClient::serializeResponse(const RTCEngineResponse &response, int flag) const {
    QByteArray data;
    QDataStream out(&data, QIODevice::WriteOnly);
    out.setVersion(QDataStream::Qt_5_0);

    // 占位写入标志位和长度
    qint32 placeholder = 0;
    out << (qint8)flag << placeholder;

    // 序列化 response 对象
    out << response;

    // 计算数据长度并回填
    qint32 dataSize = data.size() - sizeof(qint8) - sizeof(qint32); // 总长度减去标志位和长度本身的空间
    QDataStream sizeStream(&data, QIODevice::WriteOnly);
    sizeStream.device()->seek(1); // 跳过标志位
    sizeStream << dataSize;

    return data;
}

RTCEngineRequest RTCClient::deserializeRequest(const QByteArray &requestData) const
{
    if (requestData.isEmpty()) {
        qDebug() << "[RTC] Invalid request data: empty data";
        return RTCEngineRequest(RTCEngineCommand::Engine_Invalid, QVector<QVariant>());
    }

    QDataStream in(requestData);
    in.setVersion(QDataStream::Qt_5_0);

    RTCEngineRequest request;
    in >> request; // 使用已定义的 `>>` 操作符重载来反序列化

    if (in.status() != QDataStream::Ok) {
        qDebug() << "[RTC] Failed to deserialize request: invalid data format";
        return RTCEngineRequest(RTCEngineCommand::Engine_Invalid, QVector<QVariant>());
    }

    return request;
}

void RTCClient::processRequest(QLocalSocket *socket, const RTCEngineRequest &request, RTCEngineResponse& response)
{
    QString errorMessage;
    int ret = -1;

    response.command = request.command;

    // 处理请求 - 复制 RTCService 的逻辑
    switch (request.command) {
        // RtcEngine 接口
        case RTCEngineCommand::Engine_Create:
            engine = RTC3::RtcEngine::Create(nullptr);
            if (engine) {
                eventhandler = new AliRtcEventHandler(this);
                engine->SetEngineEventListener(eventhandler);
                audioDeviceManager = engine->GetAudioDeviceManager();
                videoDeviceManager = engine->GetVideoDeviceManager();
                
                // RTC引擎创建成功后，注册音频帧观察者
                qInfo() << "RTC engine created successfully, registering audio frame observer";
                registerAudioFrameObserver();
            }
            break;
        case RTCEngineCommand::Engine_Destroy:
            break;
        case RTCEngineCommand::Engine_GetSDKVersion:
            response.results.append(QString(engine->GetSDKVersion()));
            break;
        case RTCEngineCommand::Engine_SetLogDirPath:
            ret = engine->SetLogDirPath(RETURN_STRING(request).toStdString().c_str());
            response.results.append(ret);
            break;
        case RTCEngineCommand::Engine_GetLogDirPath:
            break;
        case RTCEngineCommand::Engine_SetLogLevel:
            break;
        case RTCEngineCommand::Engine_GetErrorDescription:
            break;
        case RTCEngineCommand::Engine_SetEngineEventListener:
            break;
        case RTCEngineCommand::Engine_GetAudioMixingManager:
            break;
        case RTCEngineCommand::Engine_GetAudioDeviceManager:
            break;
        case RTCEngineCommand::Engine_GetVideoDeviceManager:
            break;
        case RTCEngineCommand::Engine_GetCurrentConnectionStatus:
            break;
        case RTCEngineCommand::Engine_JoinChannel:
            if (engine){
                ret = engine->JoinChannel(desRtcEngineAuthInfo(RETURN_STRING(request)), "");
                response.results.append(ret);
            }
            break;
        case RTCEngineCommand::Engine_LeaveChannel:
            engine->LeaveChannel();
            break;
        case RTCEngineCommand::Engine_GetOnlineRemoteUsers:
            break;
        case RTCEngineCommand::Engine_GetUserInfo:
            break;
        case RTCEngineCommand::Engine_IsUserOnline:
            break;
        case RTCEngineCommand::Engine_IsInCall:
            response.results.append(engine->IsInCall());
            break;
        case RTCEngineCommand::Engine_PublishLocalVideoStream:
            response.results.append(engine->PublishLocalVideoStream(RETURN_BOOL(request)));
            break;
        case RTCEngineCommand::Engine_PublishLocalAudioStream:
            response.results.append(engine->PublishLocalAudioStream(RETURN_BOOL(request)));
            break;
        case RTCEngineCommand::Engine_IsLocalVideoStreamPublished:
            response.results.append(engine->IsLocalVideoStreamPublished());
            break;
        case RTCEngineCommand::Engine_IsScreenSharePublished:
            break;
        case RTCEngineCommand::Engine_IsLocalAudioStreamPublished:
            response.results.append(engine->IsLocalAudioStreamPublished());
            break;
        case RTCEngineCommand::Engine_MuteLocalVideo:
            break;
        case RTCEngineCommand::Engine_MuteLocalAudio:
            response.results.append(engine->MuteLocalAudio(RETURN_BOOL(request)));
            break;
        case RTCEngineCommand::Engine_StartScreenShareByDesktopId:
            break;
        case RTCEngineCommand::Engine_StartScreenShareByWindowId:
            break;
        case RTCEngineCommand::Engine_UpdateScreenShareConfig:
            break;
        case RTCEngineCommand::Engine_StopScreenShare:
            break;
        case RTCEngineCommand::Engine_GetScreenShareSourceInfo:
            break;
        case RTCEngineCommand::Engine_GetCurrentScreenShareSourceId:
            break;
        case RTCEngineCommand::Engine_SubscribeAllRemoteAudioStreams:
            response.results.append(engine->SubscribeAllRemoteAudioStreams(RETURN_BOOL(request)));
            break;
        case RTCEngineCommand::Engine_SubscribeAllRemoteVideoStreams:
            response.results.append(engine->SubscribeAllRemoteVideoStreams(RETURN_BOOL(request)));
            break;
        case RTCEngineCommand::Engine_SubscribeRemoteVideoStream:
            engine->SubscribeRemoteVideoStream(RETURN_STRING(request).toStdString().c_str(),
                                               desFromJSONStringEnum(RETURN_STRING_INDEX(request,1),desRtcEngineVideoTrack),RETURN_BOOL_INDEX(request,2));
            break;
        case RTCEngineCommand::Engine_SetRemoteVideoStreamType:
            break;
        case RTCEngineCommand::Engine_SetRemoteDefaultVideoStreamType:
            break;
        case RTCEngineCommand::Engine_EnableLocalVideo:
            break;
        case RTCEngineCommand::Engine_EnableBeautyFace:
        {
            RtcEngineBeautyFaceOptions opt = desFromJSONString(RETURN_STRING_INDEX(request,1),desRtcEngineBeautyFaceOptions);
            ret = engine->EnableBeautyFace(RETURN_BOOL(request),opt);
            response.results.append(ret);
        }
            break;
        case RTCEngineCommand::Engine_EnableVirtualBackground:
            break;
        case RTCEngineCommand::Engine_SetVideoEnhance:
            break;
        case RTCEngineCommand::Engine_SetVideoDenoise:
            break;
        case RTCEngineCommand::Engine_SetAudioDenoise:
            break;
        case RTCEngineCommand::Engine_SetVideoEncoderConfiguration:
        {
                RtcEngineVideoEncoderConfiguration config = desFromJSONString(RETURN_STRING(request),desRtcEngineVideoEncoderConfiguration);
                engine->SetVideoEncoderConfiguration(config);
        }
            break;
        case RTCEngineCommand::Engine_SetScreenShareEncoderConfiguration:
            break;
        case RTCEngineCommand::Engine_SetLocalViewConfig:
            ret = engine->SetLocalViewConfig(desFromJSONString(RETURN_STRING(request),desRtcEngineVideoCanvas),
                   desFromJSONStringEnum(RETURN_STRING_INDEX(request,1),desRtcEngineVideoTrack));
            response.results.append(ret);
            break;
        case RTCEngineCommand::Engine_SetRemoteViewConfig:
            ret = engine->SetRemoteViewConfig(desFromJSONString(RETURN_STRING(request),desRtcEngineVideoCanvas),
                                    RETURN_STRING_INDEX(request,1).toStdString().c_str(),
                                    desFromJSONStringEnum(RETURN_STRING_INDEX(request,2),desRtcEngineVideoTrack));
            response.results.append(ret);
            break;
        case RTCEngineCommand::Engine_UpdateViewConfig:
            break;
        case RTCEngineCommand::Engine_StartPreview:
            response.results.append(engine->StartPreview());
            break;
        case RTCEngineCommand::Engine_StopPreview:
            response.results.append(engine->StopPreview());
            break;
        case RTCEngineCommand::Engine_RegisterVideoFrameObserver:
            break;
        case RTCEngineCommand::Engine_EnableVideoFrameObserver:
            break;
        case RTCEngineCommand::Engine_SetAudioProfile:
            ret = engine->SetAudioProfile(desFromJSONStringEnum(RETURN_STRING_INDEX(request,1),desRtcEngineAudioProfile),
                                    desFromJSONStringEnum(RETURN_STRING_INDEX(request,1),desRtcEngineAudioScenario));
            response.results.append(ret);
            break;
        case RTCEngineCommand::Engine_EnableAudioVolumeIndication:
            break;
        case RTCEngineCommand::Engine_RegisterAudioFrameObserver:
            break;
        case RTCEngineCommand::Engine_EnableAudioFrameObserver:
            {
                bool enabled = RETURN_BOOL(request);
                enableAudioFrameObserver(enabled);
                response.results.append(0); // 成功返回0
                break;
            }
        case RTCEngineCommand::Engine_SetExternalAudioSource:
            break;
        case RTCEngineCommand::Engine_PushExternalAudioFrame:
            break;
        case RTCEngineCommand::Engine_SetExternalAudioRender:
            break;
        case RTCEngineCommand::Engine_PushExternalAudioRenderFrame:
            break;
        case RTCEngineCommand::Engine_EnableCustomAudioCapture:
            break;
        case RTCEngineCommand::Engine_SendCustomAudioCaptureFrame:
            break;
        case RTCEngineCommand::Engine_EnableCustomAudioRender:
            break;
        case RTCEngineCommand::Engine_GetCustomAudioRenderFrame:
            break;
        case RTCEngineCommand::Engine_SetPlayoutSignalVolume:
            break;
        case RTCEngineCommand::Engine_GetPlayoutSignalVolume:
            break;
        case RTCEngineCommand::Engine_SetExternalVideoSource:
            break;
        case RTCEngineCommand::Engine_PushExternalVideoFrame:
            break;
        case RTCEngineCommand::Engine_StartRecord:
            ret = engine->StartRecord(desFromJSONString(RETURN_STRING(request),desRtcEngineRecordOptions),RETURN_STRING_INDEX(request,1).toStdString().c_str(),
                                RETURN_STRING_INDEX(request,2).toStdString().c_str());
            response.results.append(ret);
            break;
        case RTCEngineCommand::Engine_UpdateRecordLayout:
            break;
        case RTCEngineCommand::Engine_PauseRecord:
            break;
        case RTCEngineCommand::Engine_ResumeRecord:
            break;
        case RTCEngineCommand::Engine_StopRecord:
            engine->StopRecord();
            break;
        case RTCEngineCommand::Engine_SnapshotVideo:
            break;
        case RTCEngineCommand::Engine_SendMediaExtensionMsg:
            break;

        // RtcEngineVideoDeviceManager 接口
        case RTCEngineCommand::VideoDM_GetCameraList:
            response.results.append(sToJSONString(videoDeviceManager->GetCameraList(),sRtcEngineDeviceInfoList));
            break;
        case RTCEngineCommand::VideoDM_GetCurrentCameraName:
            break;
        case RTCEngineCommand::VideoDM_GetCurrentCameraID:
            break;
        case RTCEngineCommand::VideoDM_SetCurrentCameraName:
            response.results.append(videoDeviceManager->SetCurrentCameraName(RETURN_STRING(request).toStdString().c_str()));
            break;
        case RTCEngineCommand::VideoDM_SetCurrentCameraID:
            response.results.append(videoDeviceManager->SetCurrentCameraID(RETURN_STRING(request).toStdString().c_str()));
            break;
        case RTCEngineCommand::VideoDM_IsCameraOn:
            response.results.append(videoDeviceManager->IsCameraOn());
            break;
        case RTCEngineCommand::VideoDM_SwitchCamera:
            break;
        case RTCEngineCommand::VideoDM_SetCameraZoom:
            break;
        case RTCEngineCommand::VideoDM_SetCameraFlash:
            break;
        case RTCEngineCommand::VideoDM_IsCameraFocusPointSupported:
            break;
        case RTCEngineCommand::VideoDM_SetCameraFocusPoint:
            break;
        case RTCEngineCommand::VideoDM_IsCameraExposurePointSupported:
            break;
        case RTCEngineCommand::VideoDM_SetCameraExposurePoint:
            break;
        case RTCEngineCommand::VideoDM_SetDeviceOrientationMode:
            break;

        // RtcEngineAudioDeviceManager 接口
        case RTCEngineCommand::AudioDM_GetRecordingDeviceList:
            response.results.append(sToJSONString(audioDeviceManager->GetRecordingDeviceList(),sRtcEngineDeviceInfoList));
            break;
        case RTCEngineCommand::AudioDM_GetCurrentRecordingDeviceId:
            break;
        case RTCEngineCommand::AudioDM_SetCurrentRecordingDeviceId:
            response.results.append(audioDeviceManager->SetCurrentRecordingDeviceId(RETURN_STRING(request).toStdString().c_str()));
            break;
        case RTCEngineCommand::AudioDM_GetPlayoutDeviceList:
            response.results.append(sToJSONString(audioDeviceManager->GetPlayoutDeviceList(),sRtcEngineDeviceInfoList));
            break;
        case RTCEngineCommand::AudioDM_GetCurrentPlayoutDeviceId:
            break;
        case RTCEngineCommand::AudioDM_SetCurrentPlayoutDeviceId:
            response.results.append(audioDeviceManager->SetCurrentPlayoutDeviceId(RETURN_STRING(request).toStdString().c_str()));
            break;
        case RTCEngineCommand::AudioDM_SetRecordingDeviceVolume:
            response.results.append(audioDeviceManager->SetRecordingDeviceVolume(RETURN_INT(request)));
            break;
        case RTCEngineCommand::AudioDM_GetRecordingDeviceVolume:
            response.results.append(audioDeviceManager->GetRecordingDeviceVolume());
            break;
        case RTCEngineCommand::AudioDM_SetPlayoutDeviceVolume:
            response.results.append(audioDeviceManager->SetPlayoutDeviceVolume(RETURN_INT(request)));
            break;
        case RTCEngineCommand::AudioDM_GetPlayoutDeviceVolume:
            response.results.append(audioDeviceManager->GetPlayoutDeviceVolume());
            break;
        case RTCEngineCommand::AudioDM_SetRecordingDeviceMute:
            break;
        case RTCEngineCommand::AudioDM_SetRecordingDataMute:
            break;
        case RTCEngineCommand::AudioDM_GetRecordingDeviceMute:
            break;
        case RTCEngineCommand::AudioDM_SetPlayoutDeviceMute:
            break;
        case RTCEngineCommand::AudioDM_GetPlayoutDeviceMute:
            break;
        case RTCEngineCommand::AudioDM_StartRecordingDeviceTest:
            response.results.append(audioDeviceManager->StartRecordingDeviceTest());
            break;
        case RTCEngineCommand::AudioDM_StopRecordingDeviceTest:
            response.results.append(audioDeviceManager->StopRecordingDeviceTest());
            break;
        case RTCEngineCommand::AudioDM_StartPlayoutDeviceTest:
            response.results.append(audioDeviceManager->StartPlayoutDeviceTest(RETURN_STRING(request).toStdString().c_str()));
            break;
        case RTCEngineCommand::AudioDM_StopPlayoutDeviceTest:
             response.results.append(audioDeviceManager->StopPlayoutDeviceTest());
            break;
        case RTCEngineCommand::AudioDM_EnableAudioLevelObserver:
            break;

        case RTCEngineCommand::AudioDM_EnableSystemAudio:
            response.results.append(audioDeviceManager->EnableSystemAudio(true));
            break;

        case RTCEngineCommand::AudioDM_DisableSystemAudio:
            response.results.append(audioDeviceManager->EnableSystemAudio(false));
            break;

        case RTCEngineCommand::AudioDM_StartAudioPlayer:
            response.results.append(audioDeviceManager->StartAudioPlayer());
            break;

        case RTCEngineCommand::AudioDM_StartAudioCapture:
            response.results.append(audioDeviceManager->StartAudioCapture());
            break;

        case RTCEngineCommand::Enable_AI_Denoise:
            {
                ding::rtc::RtcEngineAudioDenoiseOptions options;
                options.mode =
                  ding::rtc::RtcEngineAudioDenoiseMode::RtcEngineAudioDenoiseEnhance;

                response.results.append(engine->SetAudioDenoise(options));
                break;
            }

        default:
            break;
    }
}

QByteArray RTCClient::serializeCallback(const RTCEngineCallback &callback, int flag) const {
    QByteArray data;
    QDataStream out(&data, QIODevice::WriteOnly);
    out.setVersion(QDataStream::Qt_5_0);

    // 占位写入标志位和长度
    qint32 placeholder = 0;
    out << (qint8)flag << placeholder;

    // 序列化 callback 对象
    out << callback;

    // 计算数据长度并回填
    qint32 dataSize = data.size() - sizeof(qint8) - sizeof(qint32); // 总长度减去标志位和长度本身的空间
    QDataStream sizeStream(&data, QIODevice::WriteOnly);
    sizeStream.device()->seek(1); // 跳过标志位
    sizeStream << dataSize;

    return data;
}

void RTCClient::processCallback(RTCEngineCallback callback) {
    if(!callbackSocket || callbackSocket->state() != QLocalSocket::ConnectedState){
        qDebug() << "[RTC] callbackSocket invalid!!!";
        return;
    }

    int length = callbackSocket->write(serializeCallback(callback));
    callbackSocket->flush();
    qDebug() << "[RTC] Sent callback to Teacher:" << callback.event;
}

void RTCClient::registerAudioFrameObserver()
{
    // 如果音频帧观察者不存在，创建它
    if (!m_audioFrameObserver) {
        qInfo() << "[RTC] Creating audio frame observer...";
        m_audioFrameObserver = new ding::rtc::RTCAudioFrameObserver(this);
        connect(m_audioFrameObserver, &ding::rtc::RTCAudioFrameObserver::audioFrameReceived,
                this, [this](const QString &source, const ding::rtc::RtcEngineAudioFrame &frame) {
            static QSet<QString> loggedSources;
            
            // 每个音频源只在第一次接收时输出日志
            if (!loggedSources.contains(source)) {
                qDebug() << "[RTC] First audio frame received from" << source << "with" << frame.samples << "samples";
                loggedSources.insert(source);
            }
            
            // 转发音频数据到录制服务
            if (m_mmService) {
                // 由于 RTC3 是 ding::rtc 的别名，所以不需要转换，直接传递
                QMetaObject::invokeMethod(m_mmService, "onAudioFrameReceived", 
                                        Qt::QueuedConnection,
                                        Q_ARG(QString, source),
                                        Q_ARG(ding::rtc::RtcEngineAudioFrame, frame));
            }
        });
        qInfo() << "[RTC] Audio frame observer created and signal connected";
    } else {
        qInfo() << "[RTC] Audio frame observer already exists, skipping creation";
    }
    
    // 注册音频帧观察者回调（但不启用）
    if (engine) {
        engine->RegisterAudioFrameObserver(m_audioFrameObserver);
        qInfo() << "[RTC] Audio frame observer callback registered with RTC engine successfully";
    } else {
        qWarning() << "[RTC] RTC engine not available for audio frame observer registration";
    }
}

void RTCClient::enableAudioFrameObserver(bool enabled)
{
    if (!m_audioFrameObserver) {
        if (enabled) {
            // 如果启用但观察者不存在，先创建它
            registerAudioFrameObserver();
        } else {
            qWarning() << "Audio frame observer not available";
            return;
        }
    }
    
    m_audioFrameObserver->setEnabled(enabled);
    
    // 如果RTC引擎可用，启用或禁用音频帧观察者
    if (engine) {
        if (enabled) {
            // 启用音频帧观察者 - 监听3A处理后的音频位置
            engine->EnableAudioFrameObserver(true, RTC3::RtcEngineAudioPositionProcessCaptured);
            qInfo() << "Audio frame observer enabled in RTC engine for processed audio";
        } else {
            // 禁用音频帧观察者
            engine->EnableAudioFrameObserver(false, RTC3::RtcEngineAudioPositionProcessCaptured);
            qInfo() << "Audio frame observer disabled in RTC engine";
        }
    } else {
        qWarning() << "RTC engine not available for audio frame observer enable/disable";
    }
}

void RTCClient::setAudioFrameObserverConfig(const ding::rtc::RtcEngineAudioFrameObserverConfig &config)
{
    if (!m_audioFrameObserver) {
        qWarning() << "Audio frame observer not available";
        return;
    }
    
    m_audioFrameObserver->setConfig(config);
    
    // 如果RTC引擎可用，更新音频帧观察者配置
    if (engine) {
        engine->EnableAudioFrameObserver(true, RTC3::RtcEngineAudioPositionProcessCaptured, config);
        qInfo() << "Audio frame observer configuration updated for processed audio";
    } else {
        qWarning() << "RTC engine not available for audio frame observer configuration";
    }
} 
