﻿#include "../../includes/multimedia/cremotevoice.h"
#include "../../includes/QsLog/QsLog.h"
#include "../../includes/common/common.h"

#include <QDebug>
#include <QCoreApplication>
#include <QFileInfo>
#include <QBuffer>
#include <QtConcurrent>

#ifdef ENABLEREMOTEVOICE

static int s_RemoteVoiceKCP_ID = 0x112233;

int KCP_RemoteVoice_Callback(const char * buf, int len, ikcpcb * kcp, void * user)
{
    auto pRemoteClient = (CRemoteClient*)user;
    return pRemoteClient->onProcessSendData(buf, len);
}

CRemoteClient::CRemoteClient(QObject *parent,QUdpSocket *pUdpSocket)
    : QObject(parent),m_UdpSocket(pUdpSocket),
      m_address(QHostAddress::Broadcast),
      m_port(-1),
      m_isdatareiveHearder(false)
{
    m_ikcp = ikcp_create(s_RemoteVoiceKCP_ID, this);

    switch (s_RT_KCP_MODE)
    {
    case 0:
        ikcp_nodelay(m_ikcp, 0, 10, 0, 0);
        break;
    case 1:
        ikcp_nodelay(m_ikcp, 0, 10, 0, 1);
        break;
    case 2:
        ikcp_nodelay(m_ikcp, 1, 10, 2, 1);
        m_ikcp->rx_minrto = 10;
        m_ikcp->fastresend = 1;
        break;
    default:
        break;
    }

    ikcp_setmtu(m_ikcp,s_RT_MTU);
    m_ikcp->output = KCP_RemoteVoice_Callback;
}

CRemoteClient::~CRemoteClient(void)
{
    ikcp_flush(m_ikcp);
    ikcp_release(m_ikcp);
}

/**
 * @brief CRemoteClient::Send 发送报文
 * @param buf 要发送的报文数据
 * @param len 要发送的报文数据长度
 * @return 返回发送成功的数据长度
 */
int CRemoteClient::SendKcpData(QByteArray Datagramdata)
{
    if(Datagramdata.isEmpty() ||
            m_ikcp == NULL ||
            m_port <= 0)
        return -1;

    m_datasendBuffer.push_back(Datagramdata);

    return 0;
}

/**
 * @brief CRemoteClient::processdatasend 处理数据发送
 */
void CRemoteClient::processdatasend(void)
{
    if(m_datasendBuffer.isEmpty())
        return;

    if (m_ikcp->nsnd_que>s_RT_max_send_que)
    {
        return;
    }

    int preadbufferlength = m_datasendBuffer.size() < s_RT_MTU ? m_datasendBuffer.size() : s_RT_MTU;
    QByteArray ptempBuffer(m_datasendBuffer.constData(),preadbufferlength);
    m_datasendBuffer.remove(0,preadbufferlength);

    ikcp_send(m_ikcp,ptempBuffer.data(),ptempBuffer.size());
}

/**
 * @brief CRemoteClient::SetClientInfo 设置要连接的地址和端口
 * @param address 要设置的IP
 * @param port 要设置的端口
 */
void CRemoteClient::SetClientInfo(QHostAddress address,int port)
{
    m_address = address;
    m_port = port;

    m_Name = m_address.toString() + "_" + QString::asprintf("%d",m_port);
}

/**
 * @brief CRemoteClient::Send 发送报文
 * @param buf 要发送的报文数据
 * @param len 要发送的报文数据长度
 * @return 返回发送成功的数据长度
 */
qint64 CRemoteClient::onProcessSendData(const char * buf, int len)
{
    if(m_UdpSocket == NULL ||
            buf == NULL || len <= 0)
        return -1;

    return m_UdpSocket->writeDatagram(buf,len,m_address,m_port);
}

/**
 * @brief CRemoteClient::kcp_update kcp更新
 */
void CRemoteClient::kcp_update(void)
{
    if(m_ikcp == NULL)
        return;

    auto clock = iclock();
    ikcp_update(m_ikcp, clock);

    // 处理数据发送
    processdatasend();

    // 处理数据接收
    processdatarecive();
}

/**
 * @brief CRemoteClient::processdatarecive 处理数据接收
 */
void CRemoteClient::processdatarecive(void)
{
    if(m_datareciveBuffer.isEmpty())
        return;

    if(!m_isdatareiveHearder && m_datareciveBuffer.size() >= sizeof(tagMultiDataHearder))
    {
        m_isdatareiveHearder = true;
        m_reciverFileBuffer.clear();
        memcpy(&m_datareiveHearder,m_datareciveBuffer.constData(),sizeof(m_datareiveHearder));
        m_reciverFileBuffer.push_back(QByteArray(m_datareciveBuffer.constData(),sizeof(m_datareiveHearder)));
        m_datareciveBuffer.remove(0,sizeof(m_datareiveHearder));
    }

    if(m_isdatareiveHearder &&
            m_datareiveHearder.compresssize > 0 &&
            m_datareciveBuffer.size() >= m_datareiveHearder.compresssize)
    {
        m_isdatareiveHearder = false;
        m_reciverFileBuffer.push_back(QByteArray(m_datareciveBuffer.constData(),m_datareiveHearder.compresssize));
        m_datareciveBuffer.remove(0,m_datareiveHearder.compresssize);

        emit processPendingDatagram(this,m_reciverFileBuffer);
    }
}

/**
 * @brief CRemoteClient::onProcessReciverKcpDatagramdata kcp处理接收到的报文
 * @param Datagramdata 要处理的报文
 */
void CRemoteClient::onProcessReciverKcpDatagramdata(QByteArray& Datagramdata)
{
    if(Datagramdata.isEmpty() || m_ikcp == NULL)
        return;

    ikcp_input(m_ikcp, Datagramdata.data(), Datagramdata.size());

    char buf[s_RT_MTU];
    while (true)
    {
        int hr = ikcp_recv(m_ikcp, buf, s_RT_MTU);
        if (hr<0)
        {
            break;
        }

        //emit processPendingDatagram(this,QByteArray(buf, hr));
        m_datareciveBuffer.push_back(QByteArray(buf, hr));
    }
}

////////////////////////////////////////////////////////////////////////////////////////

CRemoteVoice::CRemoteVoice(QObject *parent,int receivePort)
    : QObject(parent),
      m_audio_in(NULL),
      m_audio_out(NULL),
      m_audio_streamIn(NULL),
      m_audio_out_streamIn(NULL),
      m_issendvoicedata(true),
      m_isrecordvoicedata(false),
      m_isrecordvideodata(false),
      m_camera(NULL),
      m_cameraImageCapture(NULL),
      m_receivePort(receivePort)
{
    m_UdpSocket.setSocketOption(QAbstractSocket::SendBufferSizeSocketOption,10*1024*1024);
    m_UdpSocket.setSocketOption(QAbstractSocket::ReceiveBufferSizeSocketOption,10*1024*1024);

    m_formatContext=NULL;
    m_codecParameters=NULL;
    m_videoIndex=0;
    m_codecContext=NULL;
    m_packet=NULL;
    m_frame=NULL;
    m_imgConvertContext=NULL;
    memset(m_lineSize,0,sizeof(m_lineSize));
    m_screenWidth=m_screenHeight=0;
    m_refreshframe=0;
    m_out_video_fd=NULL;

    connect(&m_UdpSocket,&QUdpSocket::readyRead,this,&CRemoteVoice::reciverPendingDatagram);

    if(m_receivePort != -1)
        m_UdpSocket.bind(QHostAddress::LocalHost,m_receivePort);

    QObject::connect(&m_updateTimer, &QTimer::timeout,
        this, &CRemoteVoice::KCPUpdate);
    m_updateTimer.start(10);
}

CRemoteVoice::~CRemoteVoice()
{
    if(m_audio_in)
    {
        m_audio_in->stop();
        delete m_audio_in;
        m_audio_in = NULL;
    }

    if(m_audio_out)
    {
        m_audio_out->stop();
        delete m_audio_out;
        m_audio_out = NULL;
    }

    m_UdpSocket.close();
    deleteAllClients();

    if(m_recordvoicefile.isOpen())
        m_recordvoicefile.close();

    delete m_camera;
    delete m_cameraImageCapture;
    //delete m_cameraviewfinder;

    av_frame_free(&m_frame);
    av_packet_free(&m_packet);
    avcodec_free_context(&m_codecContext);
    avformat_close_input(&m_formatContext);
    sws_freeContext(m_imgConvertContext);

    if(m_out_video_fd)
        AVI_close(m_out_video_fd);
    m_out_video_fd=NULL;
}

/**
 * @brief CRemoteVoice::setRecivePort 设置数据接收端口
 * @param port 端口号
 */
void CRemoteVoice::setRecivePort(int port)
{
    m_receivePort = port;

    if(m_receivePort != -1)
        m_UdpSocket.bind(QHostAddress::LocalHost,m_receivePort);
}

/**
 * @brief CRemoteVoice::InitScreenInput 初始化屏幕输入设备
 * @param framerate 屏幕录制帧率
 * @param deviceType 屏幕录制类型：gdigrab，dshow
 *
 * gdigrab
 *   gdigrab是FFmpeg专门用于抓取Windows桌面的设备。非常适合用于屏幕录制。它通过不同的输入URL支持两种方式的抓取：
 *   （1）“desktop”：抓取整张桌面。或者抓取桌面中的一个特定的区域。
 *   （2）“title={窗口名称}”：抓取屏幕中特定的一个窗口（目前中文窗口还有乱码问题）。
 *   gdigrab另外还支持一些参数，用于设定抓屏的位置：
 *   offset_x：抓屏起始点横坐标。
 *   offset_y：抓屏起始点纵坐标。
 *   video_size：抓屏的大小。
 *   framerate：抓屏的帧率。

 * dshow
 *   使用dshow抓屏需要安装抓屏软件：screen-capture-recorder
 *   软件地址： http://sourceforge.net/projects/screencapturer/
 *   下载软件安装完成后，可以指定dshow的输入设备为“screen-capture-recorder”即可。有关dshow设备的使用方法在上一篇文章中已经有详细叙述，这里不再重复。参考的代码如下：
 *   AVInputFormat *ifmt=av_find_input_format("dshow");
 *    if(avformat_open_input(&pFormatCtx,"video=screen-capture-recorder",ifmt,NULL)!=0){
 *     printf("Couldn't open input stream.（无法打开输入流）\n");
 *     return -1;
 *    }
 *
 * @return
 */
bool CRemoteVoice::InitScreenInput(int framerate,QString deviceType,int screenWidth,int screenHeight,bool isRecord)
{
    if(framerate <= 0 || deviceType.isEmpty())
        return false;

    m_refreshframe = framerate;
    m_screenWidth = screenWidth;
    m_screenHeight = screenHeight;

    this->setIsRecordVideoData(isRecord);

    av_register_all();
    avdevice_register_all();                                    //初始化所有设备
    m_formatContext=avformat_alloc_context();                     //分配format上下文
    AVInputFormat *inputFormat=av_find_input_format(deviceType.toStdString().c_str()); //寻找输入设备【gdigrab】
    AVDictionary* options = NULL;
    av_dict_set(&options,"framerate",QString::asprintf("%d",framerate).toStdString().c_str(),0);                   //设置帧数为60
    if(avformat_open_input(&m_formatContext,"desktop",inputFormat,&options)){ //开启输入设备
        QLOG_INFO()<<"cant`t open input stream.";
        return false;
    }
    if(avformat_find_stream_info(m_formatContext,nullptr)){       //加载流中存储的信息
        QLOG_INFO()<<"can`t find stream information.";
        return false;
    }

    m_videoIndex=-1;                                              //寻找视频流
    for(uint i=0;i<m_formatContext->nb_streams;i++){
        if(m_formatContext->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO){
            m_videoIndex=i;
            break;
        }
    }
    if(m_videoIndex==-1){
        QLOG_INFO()<<"can`t find video stream.";
        return false;
    }

    m_codecParameters=m_formatContext->streams[m_videoIndex]->codecpar;
    m_codecContext=avcodec_alloc_context3(nullptr);
    avcodec_parameters_to_context(m_codecContext,m_codecParameters);
    AVCodec* codec=avcodec_find_decoder(m_codecParameters->codec_id);

    if(codec==nullptr){
        QLOG_INFO()<<"can`t find codec";
        return false;
    }
    if(avcodec_open2(m_codecContext,codec,nullptr)){
        QLOG_INFO()<<"can`t open codec";
        return false;
    }

    m_packet=av_packet_alloc();
    m_frame=av_frame_alloc();

    m_imgConvertContext=sws_getContext(m_codecParameters->width,m_codecParameters->height,m_codecContext->pix_fmt,m_codecParameters->width,m_codecParameters->height,AV_PIX_FMT_RGB32,SWS_BICUBIC,nullptr,nullptr,nullptr);
    m_screenImage=QImage(m_codecParameters->width,m_codecParameters->height,QImage::Format_RGB32);
    av_image_fill_linesizes(m_lineSize,AV_PIX_FMT_RGB32,m_codecParameters->width);

    connect(this,SIGNAL(signal_convertImageToBytes(QByteArray)),this,SLOT(processconvertImageToBytes(QByteArray)),Qt::BlockingQueuedConnection);

    QObject::connect(&m_creentCamera, &QTimer::timeout,
        this, &CRemoteVoice::screenCamera_Update);
    //m_creentCamera.start(framerate);

    return true;
}

/**
 * @brief CRemoteVoice::InitVideoInput 初始化视频输入设备
 * @param refreshfrequency 刷新定时器，单位毫秒
 * @param screenWidth 视频宽度
 * @param screenHeight 视频高度
 * @return
 */
bool CRemoteVoice::InitVideoInput(int refreshfrequency,int screenWidth,int screenHeight,bool isRecord)
{
    m_camera = new QCamera();
    //m_cameraviewfinder = new QCameraViewfinder();
    m_cameraImageCapture = new QCameraImageCapture(m_camera);
    m_screenWidth = screenWidth;
    m_screenHeight = screenHeight;
    m_refreshframe = refreshfrequency;

    this->setIsRecordVideoData(isRecord);

    m_camera->setCaptureMode(QCamera::CaptureStillImage);

    if (m_cameraImageCapture->isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer))
    {
        m_cameraImageCapture->setCaptureDestination(QCameraImageCapture::CaptureToBuffer);
        //qDebug() << m_cameraImageCapture->supportedBufferFormats();
        m_cameraImageCapture->setBufferFormat(QVideoFrame::PixelFormat::Format_Jpeg);
        //qDebug() << m_cameraImageCapture->supportedResolutions(m_cameraImageCapture->encodingSettings());
        QImageEncoderSettings iamge_setting;
        iamge_setting.setResolution(screenWidth,screenHeight);
        m_cameraImageCapture->setEncodingSettings(iamge_setting);

        connect(m_cameraImageCapture,SIGNAL(imageCaptured(int,QImage)),this,SLOT(processcameraImageCaptured(int,QImage)));
    }

    connect(this,SIGNAL(signal_convertImageToBytes(QByteArray)),this,SLOT(processconvertImageToBytes(QByteArray)),Qt::BlockingQueuedConnection);

    QObject::connect(&m_timerCamera, &QTimer::timeout,
        this, &CRemoteVoice::timerCamera_Update);
    m_timerCamera.start(refreshfrequency);

    return true;
}

/**
 * @brief CRemoteVoice::openScreenCapture 打开录屏
 */
void CRemoteVoice::openScreenCapture(void)
{
    m_creentCamera.start(m_refreshframe > 0 ? 1000 / m_refreshframe : 100);
}

/**
 * @brief closeScreenCapture 关闭录屏
 */
void CRemoteVoice::closeScreenCapture(void)
{
    m_creentCamera.stop();
}

/**
 * @brief CRemoteVoice::openCamera 打开摄像头
 */
void CRemoteVoice::openCamera(void)
{
    if(m_camera == NULL)
        return;

    m_camera->start();
}

/**
 * @brief CRemoteVoice::closeCamera 关闭摄像头
 */
void CRemoteVoice::closeCamera(void)
{
    if(m_camera == NULL)
        return;

    m_camera->stop();
}

/**
 * @brief CRemoteVoice::InitAudioInputAndOutput 初始化音频输入输出设备
 *
 * @return 如果音频设备初始成功返回真，否则返回假
 */
bool CRemoteVoice::InitAudioInputAndOutput(bool isRecord)
{
    QAudioFormat auido_input_format;

    //设置录音的格式
    auido_input_format.setSampleRate(44100); //设置采样率以对赫兹采样。 以秒为单位，每秒采集多少声音数据的频率.
    auido_input_format.setChannelCount(1);   //将通道数设置为通道。
    auido_input_format.setSampleSize(16);     /*将样本大小设置为指定的sampleSize（以位为单位）通常为8或16，但是某些系统可能支持更大的样本量。*/
    auido_input_format.setCodec("audio/pcm"); //设置编码格式
    auido_input_format.setByteOrder(QAudioFormat::LittleEndian); //样本是小端字节顺序
    auido_input_format.setSampleType(QAudioFormat::SignedInt); //样本类型

    //选择设备作为输入源
    QAudioDeviceInfo info =QAudioDeviceInfo::defaultInputDevice();

    QLOG_INFO()<<(QString::fromLocal8Bit("当前的录音设备的名字:%1").arg(info.deviceName()));

    //判断输入的格式是否支持，如果不支持就使用系统支持的默认格式
    if(!info.isFormatSupported(auido_input_format))
    {
      QLOG_INFO()<<QString::fromLocal8Bit("返回与系统支持的提供的设置最接近的QAudioFormat");

      auido_input_format=info.nearestFormat(auido_input_format);
      /*
       * 返回与系统支持的提供的设置最接近的QAudioFormat。
         这些设置由所使用的平台/音频插件提供。
         它们还取决于所使用的QAudio :: Mode。
      */
    }

    //当前设备支持的编码
    QLOG_INFO()<<QString::fromLocal8Bit("当前设备支持的编码格式:");

    QStringList list=info.supportedCodecs();
    for(int i=0;i<list.size();i++)
    {
        QLOG_INFO()<<list.at(i);
    }

    QLOG_INFO()<<(QString::fromLocal8Bit("当前录音的采样率=%1").arg(auido_input_format.sampleRate()));
    QLOG_INFO()<<(QString::fromLocal8Bit("当前录音的通道数=%1").arg(auido_input_format.channelCount()));
    QLOG_INFO()<<(QString::fromLocal8Bit("当前录音的样本大小=%1").arg(auido_input_format.sampleSize()));
    QLOG_INFO()<<(QString::fromLocal8Bit("当前录音的编码格式=%1").arg(auido_input_format.codec()));

    if(m_audio_in)
    {
        delete m_audio_in;
        m_audio_in=nullptr;
    }

    m_audio_in = new QAudioInput(auido_input_format);
    m_audio_streamIn=m_audio_in->start(); //开始音频采集

    if(m_audio_streamIn == NULL)
    {
        delete m_audio_in;
        m_audio_in = NULL;
        return false;
    }

    connect(m_audio_in,SIGNAL(stateChanged(QAudio::State)), this, SLOT(handleStateChanged_input(QAudio::State)),Qt::QueuedConnection);

    //关联音频读数据信号
    connect(m_audio_streamIn,SIGNAL(readyRead()),this,SLOT(audio_ReadyRead()),Qt::QueuedConnection);

    QAudioFormat auido_out_format;

    //设置录音的格式
    auido_out_format.setSampleRate(44100); //设置采样率以对赫兹采样。 以秒为单位，每秒采集多少声音数据的频率.
    auido_out_format.setChannelCount(1);   //将通道数设置为通道。
    auido_out_format.setSampleSize(16);     /*将样本大小设置为指定的sampleSize（以位为单位）通常为8或16，但是某些系统可能支持更大的样本量。*/
    auido_out_format.setCodec("audio/pcm"); //设置编码格式
    auido_out_format.setByteOrder(QAudioFormat::LittleEndian); //样本是小端字节顺序
    auido_out_format.setSampleType(QAudioFormat::SignedInt); //样本类型

    //QAudioDeviceInfo info(QAudioDeviceInfo::defaultOutputDevice());

    if(m_audio_out)
    {
        delete m_audio_out;
        m_audio_out=nullptr;
    }

    m_audio_out = new QAudioOutput(auido_out_format);
    m_audio_out_streamIn=m_audio_out->start();

    if(m_audio_out_streamIn == NULL)
    {
        delete m_audio_out;
        m_audio_out = NULL;
        return false;
    }

    this->setIsRecordVoiceData(isRecord);

    return true;
}

void CRemoteVoice::screenCamera_Update()
{
    if(av_read_frame(m_formatContext,m_packet))
    {
        return ;
    }

    if(m_packet->stream_index==m_videoIndex)
    {
        if(avcodec_send_packet(m_codecContext,m_packet))
            return;
        if(avcodec_receive_frame(m_codecContext,m_frame))
            return;

        uint8_t* dst[]={m_screenImage.bits()};
        sws_scale(m_imgConvertContext,(const unsigned char* const*)m_frame->data,m_frame->linesize,0,m_codecParameters->height,dst,m_lineSize);
        av_free_packet(m_packet);                //清空数据包

        QImage scaleImage = m_screenImage.scaled(m_screenWidth,m_screenHeight,Qt::IgnoreAspectRatio,Qt::SmoothTransformation);

        emit signal_screenImageCaptured(scaleImage);

        QtConcurrent::run(this,&CRemoteVoice::ConvertImageToBytes,scaleImage);
    }
}

/**
 * @brief CRemoteVoice::ConvertImageToBytes 转换摄像头图像为byte数据
 * @param image 要转换的图像
 */
void CRemoteVoice::ConvertImageToBytes(const QImage& image)
{
    if(image.isNull())
        return;

    QByteArray imagebytes;
    QBuffer buffer(&imagebytes);
    buffer.open(QIODevice::WriteOnly);
    image.save(&buffer, "JPG"); // writes image into ba in PNG format

    emit signal_convertImageToBytes(imagebytes);
}

void CRemoteVoice::processconvertImageToBytes(QByteArray imgData)
{
    if(imgData.isEmpty())
        return;

    if(m_isrecordvideodata && m_out_video_fd)
    {
        if(AVI_write_frame(m_out_video_fd,imgData.data(),imgData.size(),1)<0)//向视频文件中写入一帧图像
        {
           QLOG_INFO()<<"write erro";
        }
    }

    if(m_RemoteClients.isEmpty())
        return;

    tagMultiDataHearder ptagMultiDataHearder;
    ptagMultiDataHearder.datatype = MULTITYPE_VIDEO;
    ptagMultiDataHearder.sourcesize = imgData.size();

    QByteArray compressdata = qCompress(imgData);
    ptagMultiDataHearder.compresssize = compressdata.size();

    bool isUseCompress = true;

    if(ptagMultiDataHearder.compresssize > ptagMultiDataHearder.sourcesize)
    {
        ptagMultiDataHearder.compresssize = imgData.size();
        isUseCompress = false;
    }

    QByteArray tempBytes;
    tempBytes.append((const char*)&ptagMultiDataHearder,sizeof(ptagMultiDataHearder));

    if(!isUseCompress)
        tempBytes.append(imgData);
    else
        tempBytes.append(compressdata);

    QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.begin();
    for(;iter != m_RemoteClients.end();++iter)
    {
        (*iter)->SendKcpData(tempBytes);
    }
}

void CRemoteVoice::processcameraImageCaptured(int id,QImage image)
{
    emit signal_cameraImageCaptured(id,image);

    QtConcurrent::run(this,&CRemoteVoice::ConvertImageToBytes,image);
}

void CRemoteVoice::audio_ReadyRead()
{
    if(m_audio_streamIn == NULL) return;

    QByteArray bytedata = m_audio_streamIn->readAll();

    if(m_issendvoicedata && !m_RemoteClients.isEmpty())
    {
        tagMultiDataHearder ptagMultiDataHearder;
        ptagMultiDataHearder.datatype = MULTITYPE_AUDIO;
        ptagMultiDataHearder.sourcesize = bytedata.size();

        QByteArray compressdata = qCompress(bytedata);
        ptagMultiDataHearder.compresssize = compressdata.size();

        QByteArray tempBytes;
        tempBytes.append((const char*)&ptagMultiDataHearder,sizeof(ptagMultiDataHearder));
        tempBytes.append(compressdata);

        QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.begin();
        for(;iter != m_RemoteClients.end();++iter)
        {
            (*iter)->SendKcpData(tempBytes);
        }
    }

    // 音频数据备份
    if(m_isrecordvoicedata && m_recordvoicefile.isOpen())
    {
        QByteArray compressData = qCompress(bytedata);

        tagVoiceSection pVoiceSection;
        pVoiceSection.Originallength = bytedata.size();
        pVoiceSection.Compressionlegth = compressData.size();
        pVoiceSection.recordtime = QDateTime::currentDateTime().toSecsSinceEpoch();

        m_recordvoicefile.write((const char*)&pVoiceSection,sizeof(pVoiceSection));
        m_recordvoicefile.write(compressData);
    }
}

void CRemoteVoice::handleStateChanged_input(QAudio::State newState)
{
    switch (newState) {
         case QAudio::StoppedState:
             if (m_audio_in->error() != QAudio::NoError) {
                 // Error handling
                 QLOG_INFO()<<QString::fromLocal8Bit("录音出现错误:")<<m_audio_in->error();
             } else {
                 // Finished recording
                 QLOG_INFO()<<QString::fromLocal8Bit("完成录音");
             }
             break;
         case QAudio::ActiveState:
             // Started recording - read from IO device
                QLOG_INFO()<<QString::fromLocal8Bit("开始从IO设备读取PCM声音数据.");
             break;
         default:
             // ... other cases as appropriate
             break;
     }
}

void CRemoteVoice::timerCamera_Update()
{
    // 处理摄像头捕获
    if(m_camera->state() == QCamera::ActiveState &&
            m_cameraImageCapture)
        m_cameraImageCapture->capture();
}

void CRemoteVoice::KCPUpdate()
{
    if(!m_RemoteClients.isEmpty())
    {
        // kcp更新
        QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.begin();
        for(;iter != m_RemoteClients.end();++iter)
        {
            (*iter)->kcp_update();
        }
    }

    // 处理音频记录
    processvoicerecord();

    // 处理视频记录
    processvideorecord();
}

/**
 * @brief CRemoteVoice::processvideorecord 处理视频记录
 */
void CRemoteVoice::processvideorecord(void)
{
    if(!m_isrecordvideodata)
        return;

    bool isCreateNewFile = false;

    if(!m_currentVideoRecordTime.isValid())
    {
        m_currentVideoRecordTime = QDateTime::currentDateTime();
        isCreateNewFile = true;
    }
    else
    {
        QDateTime pcurrentRecordTime = QDateTime::currentDateTime();

        if(m_currentVideoRecordTime.date().day() != pcurrentRecordTime.date().day())
        {
            m_currentVideoRecordTime = QDateTime::currentDateTime();
            isCreateNewFile = true;
        }
    }

    if(isCreateNewFile)
    {
        if(m_out_video_fd)
            AVI_close(m_out_video_fd);
        m_out_video_fd = NULL;

        m_out_video_fd = AVI_open_output_file((char*)getVoiceRecordFilePath(MULTITYPE_VIDEO).toStdString().c_str()); //把文件描述符绑定到此文件上
        if(m_out_video_fd)
        {
            AVI_set_video(m_out_video_fd,m_screenWidth,m_screenHeight,m_refreshframe,(char*)"MJPG");//设置视频文件的格式
        }
    }
}

/**
 * @brief CRemoteVoice::processvoicerecord 处理语音记录
 */
void CRemoteVoice::processvoicerecord(void)
{
    if(!m_isrecordvoicedata)
        return;

    bool isCreateNewFile = false;

    if(!m_currentRecordTime.isValid())
    {
        m_currentRecordTime = QDateTime::currentDateTime();
        isCreateNewFile = true;
    }
    else
    {
        QDateTime pcurrentRecordTime = QDateTime::currentDateTime();

        if(m_currentRecordTime.date().day() != pcurrentRecordTime.date().day())
        {
            m_currentRecordTime = QDateTime::currentDateTime();
            isCreateNewFile = true;
        }
    }

    if(isCreateNewFile)
    {
        if(m_recordvoicefile.isOpen())
            m_recordvoicefile.close();

        QFileInfo pfileInfo(getVoiceRecordFilePath(MULTITYPE_AUDIO));
        if(pfileInfo.exists())
        {
            m_recordvoicefile.setFileName(pfileInfo.absoluteFilePath());

            if(!m_recordvoicefile.open(QIODevice::ReadWrite))
            {
                QLOG_INFO()<<"CRemoteVoice::processvoicerecord create file fail:"<<pfileInfo.absoluteFilePath();
            }
        }
    }
}

void CRemoteVoice::reciverPendingDatagram()
{
    while(m_UdpSocket.hasPendingDatagrams())
    {
        QByteArray preciverDatagramData;
        QHostAddress sender;
        quint16 senderPort = 0;

        preciverDatagramData.resize(m_UdpSocket.pendingDatagramSize());

        if(m_UdpSocket.readDatagram(preciverDatagramData.data(),preciverDatagramData.size(),
                                    &sender, &senderPort) > 0)
        {
            QString kcpName = sender.toString() + "_" + QString::asprintf("%d",senderPort);
            //qDebug()<<kcpName;

            if(!m_RemoteClients.isEmpty())
            {
                QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.find(kcpName);
                if(iter != m_RemoteClients.end())
                {
                    (*iter)->onProcessReciverKcpDatagramdata(preciverDatagramData);
                }
            }
        }
    }
}

/**
 * @brief CRemoteVoice::addClient 添加一个客户端
 * @param address IP
 * @param port 端口
 * @return 如果客户端添加成功返回这个客户端，否则返回FALSE
 */
bool CRemoteVoice::addClient(QHostAddress address,int port)
{
    QString pName = address.toString() + "_" + QString::asprintf("%d",port);

    QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.find(pName);
    if(iter != m_RemoteClients.end())
        return false;

    CRemoteClient *pRemoteClient = new CRemoteClient(this,&m_UdpSocket);
    pRemoteClient->SetClientInfo(address,port);

    return addClient2(pRemoteClient);
}

/**
 * @brief CRemoteVoice::addClient 添加一个客户端
 * @param pClient 要添加的客户端
 * @return 如果客户端添加成功返回这个客户端，否则返回FALSE
 */
bool CRemoteVoice::addClient2(CRemoteClient *pClient)
{
    if(pClient == NULL)
        return false;

    QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.find(pClient->getName());
    if(iter != m_RemoteClients.end())
        return false;

    connect(pClient,SIGNAL(processPendingDatagram(CRemoteClient*,QByteArray)),this,SLOT(on_process_processPendingDatagram(CRemoteClient*,QByteArray)));

    m_RemoteClients[pClient->getName()] = pClient;

    return true;
}

/**
 * @brief CRemoteVoice::delClient 删除一个客户端
 * @param pName 要删除的客户端的名称
 */
void CRemoteVoice::delClient(QString pName)
{
    if(pName.isEmpty() || m_RemoteClients.isEmpty())
        return;

    QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.find(pName);
    if(iter != m_RemoteClients.end())
    {
        CRemoteClient *pClient = (*iter);

        disconnect(pClient,SIGNAL(processPendingDatagram(CRemoteClient*,QByteArray)),this,SLOT(on_process_processPendingDatagram(CRemoteClient*,QByteArray)));

        m_RemoteClients.erase(iter);

        delete pClient;
    }
}

/**
 * @brief CRemoteVoice::getClient 得到一个客户端
 * @param pName 要得到的客户端的名称
 * @return 如果存在这个客户端返回这个客户端，否则返回NULL
 */
CRemoteClient* CRemoteVoice::getClient(QString pName)
{
    if(pName.isEmpty() || m_RemoteClients.isEmpty())
        return NULL;

    QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.find(pName);
    if(iter != m_RemoteClients.end())
    {
        return (*iter);
    }

    return NULL;
}

/**
 * @brief CRemoteVoice::deleteAllClients 删除所有的客户端
 */
void CRemoteVoice::deleteAllClients(void)
{
    QHash<QString,CRemoteClient*>::iterator iter = m_RemoteClients.begin();
    for(;iter != m_RemoteClients.end();++iter)
    {
        disconnect((*iter),SIGNAL(processPendingDatagram(CRemoteClient*,QByteArray)),this,SLOT(on_process_processPendingDatagram(CRemoteClient*,QByteArray)));

        delete (*iter);
    }

    m_RemoteClients.clear();
}

/**
 * @brief CRemoteVoice::getVoiceRecordFilePath 得到语音记录文件完整路径
 * @return
 */
QString CRemoteVoice::getVoiceRecordFilePath(tagMultiType multitype)
{
    if(m_recordvoicecatalogue.isEmpty())
        m_recordvoicecatalogue = QCoreApplication::applicationDirPath();

    return m_recordvoicecatalogue + "/" + QDateTime::currentDateTime().toString("yyyy_MM_dd_hh_mm_ss_zzz") + (multitype==MULTITYPE_AUDIO ? ".voi" : ".avi");
}

void CRemoteVoice::on_process_processPendingDatagram(CRemoteClient* rclient,QByteArray datagramdata)
{
    tagMultiDataHearder ptagMultiDataHearder;
    memset(&ptagMultiDataHearder,0,sizeof(ptagMultiDataHearder));

    memcpy(&ptagMultiDataHearder,datagramdata.constData(),sizeof(ptagMultiDataHearder));
    datagramdata.remove(0,sizeof(ptagMultiDataHearder));

    if(ptagMultiDataHearder.compresssize == datagramdata.size())
    {
        QByteArray uncompressdata;

        if(ptagMultiDataHearder.compresssize != ptagMultiDataHearder.sourcesize)
        {
            uncompressdata = qUncompress(datagramdata);
        }
        else
        {
            uncompressdata = datagramdata;
        }

        if(ptagMultiDataHearder.sourcesize == uncompressdata.size())
        {
            switch(ptagMultiDataHearder.datatype)
            {
            case MULTITYPE_AUDIO:
            {
                if(m_audio_out_streamIn != NULL)
                    m_audio_out_streamIn->write(uncompressdata);
            }
                break;
            case MULTITYPE_VIDEO:
            {
                QLOG_INFO()<<"MULTITYPE_VIDEO";
            }
                break;
            default:
                break;
            }

            emit signal_ClientReceiveMsg(rclient,uncompressdata);
        }
    }
}

#endif
