#include "log.h"
#include "global.h"
#include "cdevicectrl.h"
#include "CRtspSession.h"

void *ThreadVideoCapture(void *pVoid)
{
    CDeviceCtrl *pDev = (CDeviceCtrl *)pVoid;
    if(NULL == pDev)
    {
        return NULL;
    }
    PRealMedia pMedia = pDev->pMedia;
    if(NULL == pMedia)
    {
        return NULL;
    }
    AVCodecContext *pOutputVideoCodecContext = pMedia->pOutputVideoStream->codec;
    if(NULL == pOutputVideoCodecContext)
    {
        return NULL;
    }
    struct v4l2_buffer              buf;
    enum v4l2_buf_type              type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(-1 == ioctl(pDev->GetDeviceHandle(), VIDIOC_STREAMON, &type))
    {
        CDebugLog::Instance()->DebugLog("VIDIOC_STREAMON failed.err:%s", strerror(errno));
        return NULL;
    }

    struct timeval                  tv;
    fd_set                          fds;
    int ret(-1);
    while(pDev->GetCapFlag())
    {
        do
        {
            FD_ZERO(&fds);
            FD_SET(pDev->GetDeviceHandle(), &fds);
            tv.tv_sec = 1;
            tv.tv_usec = 0;
            ret = select(pDev->GetDeviceHandle() + 1, &fds, NULL, NULL, &tv);
        } while ((ret == -1 && (errno = EINTR)));
        if (ret == -1)
        {
            CDebugLog::Instance()->DebugLog("select failed continue.err:%s", strerror(errno));
            continue;
        }

        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        if(-1 == ioctl(pDev->GetDeviceHandle(), VIDIOC_DQBUF, &buf))
        {
            CDebugLog::Instance()->DebugLog("VIDIOC_DQBUF failed continue.err:%s", strerror(errno));
            continue;
        }

        SemaphoreTake(pMedia->semaphore_yuv);
        FifoBufferWrite(pMedia->fifo_yuv, (unsigned char*)(pDev->GetVideoBuffer()[buf.index]).start, (pDev->GetVideoBuffer()[buf.index]).length);
        SemaphoreGive(pMedia->semaphore_yuv);

        if(-1 == ioctl(pDev->GetDeviceHandle(), VIDIOC_QBUF, &buf))
        {
            CDebugLog::Instance()->DebugLog("VIDIOC_QBUF .err:%s", strerror(errno));
            continue;
        }
    }
    CDebugLog::Instance()->DebugLog("ThreadReadAndDisplay end .");
    pthread_detach(pthread_self());
    pthread_exit(0);
}

void *ThreadVideoEncode(void *pVoid)
{
    CDeviceCtrl *pDev = (CDeviceCtrl *)pVoid;
    if(NULL == pDev)
    {
        return NULL;
    }
    PRealMedia pMedia = pDev->pMedia;
    if(NULL == pMedia)
    {
        return NULL;
    }
    AVCodecContext *pOutputVideoCodecContext = pMedia->pOutputVideoStream->codec;
    if(NULL == pOutputVideoCodecContext)
    {
        return NULL;
    }
    int ret(-1), got_size(0), length(avpicture_get_size(AV_PIX_FMT_YUYV422, _CAPTURE_WIDTH, _CAPTURE_HEIGHT));
    uint8_t *dyn_buffer = NULL;
    uint8_t *buffer = (uint8_t *)malloc(length);

//    FILE *fp = fopen("yuv2.ts", "wb");
    while(pDev->GetCapFlag())
    {
        if(FifoBufferSize(pMedia->fifo_yuv) < length)
        {
            usleep(5000);
            continue;
        }

        SemaphoreTake(pMedia->semaphore_yuv);
        FifoBufferRead(pMedia->fifo_yuv, buffer, &length);
        SemaphoreGive(pMedia->semaphore_yuv);

        avpicture_fill((AVPicture *)pMedia->pVideoFrame, buffer, AV_PIX_FMT_YUYV422, _CAPTURE_WIDTH, _CAPTURE_HEIGHT);
        //ffmpeg swscale
        if(NULL != pMedia->ptSwsContextV)
        {
            uint8_t *pu8DestData[4] = {
                pMedia->yuv_sws_buffer,
                pMedia->yuv_sws_buffer + pDev->m_CaptureWidth * pDev->m_CaptureHeight,
                pMedia->yuv_sws_buffer + pDev->m_CaptureWidth * pDev->m_CaptureHeight*5/4,
                0};
            int s32DestSlice[4] = {pDev->m_CaptureWidth, pDev->m_CaptureWidth/2,
                                   pDev->m_CaptureWidth/2, 0};

            if(sws_scale(pMedia->ptSwsContextV,
                         pMedia->pVideoFrame->data, pMedia->pVideoFrame->linesize,
                         0, pDev->m_CaptureHeight, pu8DestData, s32DestSlice))
            {
                avcodec_get_frame_defaults(pMedia->pVideoFrame);
                avpicture_fill((AVPicture *)pMedia->pVideoFrame, pMedia->yuv_sws_buffer, AV_PIX_FMT_YUV420P, _CAPTURE_WIDTH, _CAPTURE_HEIGHT);
                pMedia->pVideoFrame->pts = av_rescale(pOutputVideoCodecContext->coded_frame->pts,
                                                            AV_TIME_BASE*(int64_t)pOutputVideoCodecContext->time_base.num,
                                                            pOutputVideoCodecContext->time_base.den);
                av_init_packet(&pMedia->outPacket);
                pMedia->outPacket.data = NULL;
                pMedia->outPacket.size = 0;
                ret = avcodec_encode_video2(pOutputVideoCodecContext, &pMedia->outPacket, pMedia->pVideoFrame, &got_size);
                if(got_size != 0)
                {
                    pMedia->outPacket.dts = pMedia->outPacket.pts =
                                                  /*(frameNum++) * (90000/25);*/av_rescale_q(pMedia->frameNum++,
                                                                                             pOutputVideoCodecContext->time_base,
                                                                                             pMedia->pOutputVideoStream->time_base);

                    if (pMedia->outPacket.duration > 0)
                        pMedia->outPacket.duration = av_rescale_q(pMedia->outPacket.duration,
                                                                        pOutputVideoCodecContext->time_base,
                                                                        pMedia->pOutputVideoStream->time_base);

                    if (NULL != pOutputVideoCodecContext->coded_frame && pOutputVideoCodecContext->coded_frame->key_frame)
                    {
                        pMedia->outPacket.flags |= AV_PKT_FLAG_KEY;
                    }
                    pMedia->outPacket.stream_index = 0;
                    //outPacket.flags = packet.flags;
                    avio_open_dyn_buf(&pMedia->pOutputFormatContext->pb);
                    if(0 != av_interleaved_write_frame(pMedia->pOutputFormatContext, &pMedia->outPacket))
                    {
                    }else
                    {
                        ret = avio_close_dyn_buf(pMedia->pOutputFormatContext->pb, (uint8_t **)&dyn_buffer);
                        if (ret > 0 /*&& NULL != fp*/)
                        {
//                            fwrite(dyn_buffer, 1, ret, fp);
//                            fflush(fp);
                            SemaphoreTake(pMedia->semaphore_coded);
                            FifoBufferWrite(pMedia->fifo_coded, dyn_buffer, ret);
                            SemaphoreGive(pMedia->semaphore_coded);
                        }
                        av_free(dyn_buffer);
                        dyn_buffer = NULL;
                    }
                }else
                {
                    ret = 6;
                }
                av_free_packet(&pMedia->outPacket);
            }else
            {
            }
        }
    }
    CDebugLog::Instance()->DebugLog("ThreadReadAndDisplay end .");
    pthread_detach(pthread_self());
    pthread_exit(0);
}

void CDeviceCtrl::MediaUnInit()
{

}

PRealMedia CDeviceCtrl::MediaInit()
{
    if(NULL != pMedia)
    {
        MediaUnInit();
    }
    do
    {
    pMedia = (PRealMedia )malloc(sizeof(TRealMedia));
    memset(pMedia, 0, sizeof(TRealMedia));
    pMedia->frameNum = 0;
    pMedia->semaphore_yuv = SemaphoreCreate();
    pMedia->semaphore_coded = SemaphoreCreate();
    pMedia->fifo_yuv = FifoBufferCreate(8 * 1024 * 1024);
    pMedia->fifo_coded = FifoBufferCreate(4 * 1024 * 1024);
    pMedia->pVideoFrame = avcodec_alloc_frame();
    if(NULL == pMedia->pVideoFrame)
    {
        break;
    }
    pMedia->yuv_buffer = (uint8_t *)malloc(avpicture_get_size(AV_PIX_FMT_YUYV422, _CAPTURE_WIDTH, _CAPTURE_HEIGHT));
    if(NULL == pMedia->yuv_buffer)
        break;
    pMedia->yuv_sws_buffer = (uint8_t *)malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, _CAPTURE_WIDTH, _CAPTURE_HEIGHT));
    if(NULL == pMedia->yuv_sws_buffer)
        break;

    pMedia->ptSwsContextV = sws_getCachedContext(NULL, _CAPTURE_WIDTH, _CAPTURE_HEIGHT,
                                                 AV_PIX_FMT_YUYV422, _CAPTURE_WIDTH, _CAPTURE_HEIGHT, PIX_FMT_YUV420P,
                                                 SWS_BICUBIC, NULL, NULL, NULL);

    pMedia->pOutputFormatContext = avformat_alloc_context();

    av_dict_set(&pMedia->pOutputFormatContext->metadata, "author"   , "chengaobao", 0);
    av_dict_set(&pMedia->pOutputFormatContext->metadata, "comment"  , "chengaobao_comment", 0);
    av_dict_set(&pMedia->pOutputFormatContext->metadata, "copyright", "chengaobao_copyright", 0);
    av_dict_set(&pMedia->pOutputFormatContext->metadata, "title",   "chengaobao_title", 0);

    AVOutputFormat *pOutputFormat = av_guess_format("mpegts", NULL, NULL);
    if (NULL == pOutputFormat)
    {
        break;
    }
    pMedia->pOutputFormatContext->oformat = pOutputFormat;
    AVCodec *vCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if(!vCodec)
    {
        break ;
    }
    pMedia->pOutputVideoStream = avformat_new_stream(pMedia->pOutputFormatContext, vCodec);

    AVCodecContext *pOutputVideoCodecContext = pMedia->pOutputVideoStream->codec;

    pOutputVideoCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
    pOutputVideoCodecContext->codec_id = AV_CODEC_ID_H264;
    //pOutputVideoCodecContext->bits_per_raw_sample = inputVCodecCtx->bits_per_raw_sample;
    //pOutputVideoCodecContext->bits_per_coded_sample = inputVCodecCtx->bits_per_coded_sample;
    pOutputVideoCodecContext->chroma_sample_location = AVCHROMA_LOC_LEFT;
    pOutputVideoCodecContext->bit_rate = 50000;
    pOutputVideoCodecContext->rc_max_rate = 50000;
    pOutputVideoCodecContext->rc_min_rate = 5000;
    //pOutputVideoCodecContext->bit_rate_tolerance = inputVCodecCtx->bit_rate_tolerance;
    //pOutputVideoCodecContext->rc_buffer_size = inputVCodecCtx->rc_buffer_size;
    pOutputVideoCodecContext->rc_initial_buffer_occupancy = pOutputVideoCodecContext->rc_buffer_size * 3 / 4;
    pOutputVideoCodecContext->rc_buffer_aggressivity = (float)1.0;
    //pOutputVideoCodecContext->rc_initial_cplx = /*0.5*/inputVCodecCtx->rc_initial_cplx;
    //pOutputVideoCodecContext->extradata = inputVCodecCtx->extradata;
    //pOutputVideoCodecContext->extradata_size = inputVCodecCtx->extradata_size;
    //pOutputVideoCodecContext->keyint_min = /*5*/inputVCodecCtx->keyint_min;
    pOutputVideoCodecContext->has_b_frames = 0;
    pOutputVideoCodecContext->max_b_frames = 0;
    pOutputVideoCodecContext->level = 30;
    //pOutputVideoCodecContext->field_order = inputVCodecCtx->field_order;
    pOutputVideoCodecContext->width = _CAPTURE_WIDTH;
    pOutputVideoCodecContext->height = _CAPTURE_HEIGHT;
    pOutputVideoCodecContext->time_base.den = 8;
    pOutputVideoCodecContext->time_base.num = 1;
    pOutputVideoCodecContext->gop_size = 15;
    pOutputVideoCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
    if (pMedia->pOutputFormatContext->oformat->flags & AVFMT_GLOBALHEADER)
    {
        pOutputVideoCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }
    pOutputVideoCodecContext->qblur = 0.5f;
    pOutputVideoCodecContext->nsse_weight = 8;
    pOutputVideoCodecContext->i_quant_factor = (float)0.8;
    pOutputVideoCodecContext->b_quant_factor = 1.25;
    pOutputVideoCodecContext->b_quant_offset = 1.25;

    pOutputVideoCodecContext->me_range = 16;
    pOutputVideoCodecContext->max_qdiff = 20;
    pOutputVideoCodecContext->qmin = 30;
    pOutputVideoCodecContext->qmax = 30;
    pOutputVideoCodecContext->qcompress = 0.6f;

    av_opt_set(pOutputVideoCodecContext->priv_data, "preset", "superfast", 0);
    av_opt_set(pOutputVideoCodecContext->priv_data, "tune", "zerolatency", 0);

    if(avcodec_open2(pOutputVideoCodecContext, vCodec, NULL) < 0)
    {
        break;
    }
    avio_open_dyn_buf(&pMedia->pOutputFormatContext->pb);
    avformat_write_header(pMedia->pOutputFormatContext, NULL);
    unsigned char *buffer = NULL;
    avio_close_dyn_buf(pMedia->pOutputFormatContext->pb, (uint8_t **)&buffer);
    av_free(buffer);
    buffer = NULL;
    return pMedia;
    }while(false);
    if(NULL != pMedia)
    {
        if(NULL != pMedia->fifo_yuv)
        {
            FifoBufferDestroy(pMedia->fifo_yuv);
            pMedia->fifo_yuv = NULL;
        }
        if(NULL != pMedia->fifo_coded)
        {
            FifoBufferDestroy(pMedia->fifo_coded);
            pMedia->fifo_coded = NULL;
        }
        if(NULL != pMedia->semaphore_yuv)
        {
            SemaphoreDestroy(pMedia->semaphore_yuv);
            pMedia->semaphore_yuv = NULL;
        }
        if(NULL != pMedia->semaphore_coded)
        {
            SemaphoreDestroy(pMedia->semaphore_coded);
            pMedia->semaphore_coded = NULL;
        }
        if(NULL != pMedia->pOutputVideoStream)
        {
            avcodec_close(pMedia->pOutputVideoStream->codec);
        }
        if(NULL != pMedia->pOutputFormatContext)
        {
            avformat_free_context(pMedia->pOutputFormatContext);
            pMedia->pOutputFormatContext = NULL;
        }
        if(NULL != pMedia->pVideoFrame)
        {
            avcodec_free_frame(&pMedia->pVideoFrame);
            pMedia->pVideoFrame = NULL;
        }
        if(NULL != pMedia->yuv_buffer)
        {
            free(pMedia->yuv_buffer);
            pMedia->yuv_buffer = NULL;
        }
        if(NULL != pMedia->yuv_sws_buffer)
        {
            free(pMedia->yuv_sws_buffer);
            pMedia->yuv_sws_buffer = NULL;
        }
        if(NULL != pMedia->ptSwsContextV)
        {
            sws_freeContext(pMedia->ptSwsContextV);
            pMedia->ptSwsContextV = NULL;
        }
        free(pMedia);
        pMedia = NULL;
    }
    return NULL;
}

#ifdef _WIN32_WINNT
DWORD WINAPI SessionThreadHandler(LPVOID lpParam)
#else
void *SessionThreadHandler(void *lpParam)
#endif
{
    SOCKET Client = *(SOCKET*)lpParam;

    char         RecvBuf[1000];                    // receiver buffer
    int          res;
    CStreamer    Streamer(Client);                  // our streamer for UDP/TCP based RTP transport
    CRtspSession RtspSession(Client,&Streamer);     // our threads RTSP session and state
    bool StreamingStarted = false;
    bool Stop = false;

#ifdef _WIN32_WINNT
        HANDLE       WaitEvents[2];                     // the waitable kernel objects of our session
        HANDLE HTimer = ::CreateWaitableTimerA(NULL, false, NULL);

        WSAEVENT RtspReadEvent = WSACreateEvent();      // create READ wait event for our RTSP client socket
        WSAEventSelect(Client, RtspReadEvent, FD_READ);   // select socket read event
        WaitEvents[0] = RtspReadEvent;
        WaitEvents[1] = HTimer;

        // set frame rate timer
        double T = 40.0;                                       // frame rate
        int iT = T;
        const __int64 DueTime = -static_cast<const __int64>(iT)* 10 * 1000;
        ::SetWaitableTimer(HTimer, reinterpret_cast<const LARGE_INTEGER*>(&DueTime), iT, NULL, NULL, false);

        while (!Stop)
        {
                switch (WaitForMultipleObjects(2, WaitEvents, false, INFINITE))
                {
                case WAIT_OBJECT_0 + 0:
                {   // read client socket
                        WSAResetEvent(WaitEvents[0]);

                        memset(RecvBuf, 0x00, sizeof(RecvBuf));
                        res = recv(Client, RecvBuf, sizeof(RecvBuf), 0);

                        // we filter away everything which seems not to be an RTSP command: O-ption, D-escribe, S-etup, P-lay, T-eardown
                        if ((RecvBuf[0] == 'O') || (RecvBuf[0] == 'D') || (RecvBuf[0] == 'S') || (RecvBuf[0] == 'P') || (RecvBuf[0] == 'T'))
                        {
                                RTSP_CMD_TYPES C = RtspSession.Handle_RtspRequest(RecvBuf, res);
                                if (C == RTSP_PLAY)     StreamingStarted = true; else if (C == RTSP_TEARDOWN) Stop = true;
                        };
                        break;
                };
                case WAIT_OBJECT_0 + 1:
                {
                        if (StreamingStarted) Streamer.StreamImage(RtspSession.GetStreamID());
                        break;
                };
                };
        };
        closesocket(Client);
        return 0;
#else
        uint32_t s32Value = 8 * 1024 * 1024;
        setsockopt(Client, SOL_SOCKET, SO_SNDBUF, (char *)&s32Value, sizeof(s32Value));
        s32Value = 8 * 1024 * 1024;
        setsockopt(Client, SOL_SOCKET, SO_RCVBUF, (char *)&s32Value, sizeof(s32Value));
        s32Value = 1;
        setsockopt(Client, SOL_SOCKET, SO_REUSEADDR, (char *)&s32Value, sizeof(s32Value));

        int s32Read = 0;
        struct timeval tTimeVal;
        fd_set  tRdSet;
        while(!Stop)
        {
            res = 64* 1024;
            tTimeVal.tv_sec = 0;
            tTimeVal.tv_usec = 40000;
            FD_ZERO(&tRdSet);
            FD_SET(Client, &tRdSet);
            s32Read  = select(FD_SETSIZE, &tRdSet, NULL, NULL, &tTimeVal);
            if(s32Read > 0)
            {
                memset(RecvBuf, 0x00, sizeof(RecvBuf));
                res = recv(Client, RecvBuf, sizeof(RecvBuf), 0);
                if(0 == res)
                {
                    printf("peer shutdown.\n");
                    fflush(stdout);
                    Stop = true;
                    continue;
                }
                if ((RecvBuf[0] == 'O') || (RecvBuf[0] == 'D') || (RecvBuf[0] == 'S') || (RecvBuf[0] == 'P') || (RecvBuf[0] == 'T'))
                {
                        RTSP_CMD_TYPES C = RtspSession.Handle_RtspRequest(RecvBuf, res);
                        if (C == RTSP_PLAY)
                            StreamingStarted = true;
                        else if (C == RTSP_TEARDOWN)
                            Stop = true;
                }
                printf("recved data: %s \n", RecvBuf);
                fflush(stdout);
            }else
            {
                if (StreamingStarted)
                    Streamer.StreamImage(RtspSession.GetStreamID());
            }
        }
        return NULL;
#endif
}

int RtspServerInit()
{
    SOCKET      MasterSocket;                                 // our masterSocket(socket that listens for RTSP client connections)
    SOCKET      ClientSocket;                                 // RTSP socket to handle an client
    sockaddr_in ServerAddr;                                   // server address parameters
    sockaddr_in ClientAddr;                                   // address parameters of a new RTSP client
#ifdef _WIN32_WINNT
    WSADATA     WsaData;
    DWORD       TID;
    int         ClientAddrLen = sizeof(ClientAddr);


    int ret = WSAStartup(0x101,&WsaData);
    if (ret != 0)
        return 0;
#else
    socklen_t   ClientAddrLen = sizeof(ClientAddr);
    pthread_t   hHandle;
#endif


    ServerAddr.sin_family      = AF_INET;
    ServerAddr.sin_addr.s_addr = INADDR_ANY;
    ServerAddr.sin_port        = htons(8554);                 // listen on RTSP port 8554
    MasterSocket               = socket(AF_INET,SOCK_STREAM,0);

    uint32_t s32Value = 1;
    setsockopt(MasterSocket, SOL_SOCKET, SO_REUSEADDR, (char *)&s32Value, sizeof(s32Value));
    // bind our master socket to the RTSP port and listen for a client connection
    if (bind(MasterSocket,(sockaddr*)&ServerAddr,sizeof(ServerAddr)) != 0)
    {
        perror("bind");
        return 0;
    }
    if (listen(MasterSocket,5) != 0)
    {
        perror("listen");
        return 0;
    }

    while (true)
    {   // loop forever to accept client connections
        ClientSocket = accept(MasterSocket,(struct sockaddr*)&ClientAddr,&ClientAddrLen);
#ifdef _WIN32_WINNT
        CreateThread(NULL,0,SessionThreadHandler,&ClientSocket,0,&TID);
#else
        pthread_create(&hHandle, NULL, SessionThreadHandler, &ClientSocket);
#endif
        printf("Client connected. Client address: %s\r\n",inet_ntoa(ClientAddr.sin_addr));
    }

#ifdef _WIN32_WINNT
    closesocket(MasterSocket);
    WSACleanup();
#else
    close(MasterSocket);
#endif
    return 0;
}


