//
// Created by kenny on 25-11-10.
//
#include "../mcp_logger.h"
#include "../my_media/mcp_media.h"
#include "mcp_msg_queue.h"
#include "mcp_tx.h"

unsigned short st_timeout_live_v;
msg_queue *live_que_v;
msg_queue *live_que_a;
static uint32 increment_p =0;
/**
 * PS 流 时间戳
 * @param frequency
 * @param samplerate
 * @param ts
 * @return
 */
static uint32 streamGetTimestamp3(int frequency,double samplerate,uint32& ts)
{
    if(frequency > 0){
        LOG_DEBUG("vvvvvvvvvvvv  streamGetTimestamp3[[[video]]] tm:%u samplerate:%f \n",ts,samplerate);
        double frme=(samplerate>60.0||samplerate<15.0)?20.0:samplerate;
        ts+=(uint32)(90000.0/frme);
    }
    else{
        increment_p+=3600;//3600
        LOG_DEBUG("vvvvvvvvvvvvvvvvvvvvvvvv  [[[audio]]]  tm:%u samplerate:%f\n",ts,samplerate);
    }

    return ts;
}

/**
 * TS 流 时间戳
 * @param frequency
 * @return
 */
static uint32 streamGetTimestamp(int frequency)
{
    struct timeval tv;
    gettimeofday(&tv, 0);
    // Begin by converting from "struct timeval" units to RTP timestamp units:
    uint32 increment = (frequency*tv.tv_sec);
    increment += (uint32)(frequency*(tv.tv_usec/1000000.0) + 0.5); // note: rounding
    return increment;
}

/**
 * 停止流推送线程
 * @param p_sua
 */
void liveaudiovideo_stop_stream(SUA * p_sua)
{
    p_sua->uaf_rtp_tx = 0;
    while (p_sua->rtp_thread)
    {
        usleep(10*1000);
    }
    if (p_sua->pstxi_mutex)
    {
        sys_os_destroy_sig_mutex(p_sua->pstxi_mutex);
        p_sua->pstxi_mutex = NULL;
    }
    LOG_DEBUG("[[liveaudiovideo_stopStream]]>>>>=======[sua_stop_media]======\n");
    //liam 添加
    sua_stop_media(p_sua);
    close(p_sua->audio_rtp_media.ua_m_fd);
    p_sua->audio_rtp_media.ua_m_fd = 0;
    p_sua->uaf_a_rx_flag = 0;
    gLiveaudiovideo.Fd = NULL;
}

static BOOL streamStep(SUA * p_sua)
{
    UA_PACKET packet;
    if (p_sua->play_ctx.step > 0)
    {
        while (!hqBufIsEmpty(p_sua->media_info.v_queue)) // 下一帧
        {
            if (hqBufPeek(p_sua->media_info.v_queue, (char *)&packet))
            {
                uint8 nalu_t = packet.data[4] & 0x1F;
                if (nalu_t == H264_NAL_SPS || nalu_t == H264_NAL_PPS || nalu_t == H264_NAL_IDR)
                {
                    p_sua->play_ctx.b_step_finish = 1;
                }
                else if (hqBufGet(p_sua->media_info.v_queue, (char *)&packet))
                {
                    free(packet.buff);
                }
            }
        }
    }

    if (p_sua->play_ctx.step < 0)
    {  // 前一帧     // 目前不支持
        p_sua->play_ctx.b_step_finish = 1;
    }

    return p_sua->play_ctx.b_step_finish;
}

/**
 * 流推送线程
 * @param argv
 * @return
 */
void * streamVideoThread(void * argv)
{
    if (!live_que_v) {
        live_que_v = mq_create();
    }
    LOG_DEBUG("vvvvvvvvvvvvvvvvvvvvvvvv streamVideoThread start=[%d] vvvvvvvvvvvvvvvvvvvvvvvv\n",pthread_self());
    int sret = -1;
    SUA * p_sua = (SUA *)argv;
    // UA_PACKET packet;
    double framerate = p_sua->media_info.framerate;
    // int fdIndex=p_sua->video_rtp_media.remote_port%5;
    int nSize=0;
    st_timeout_live_v=0;

    while (p_sua->uaf_rtp_tx) {
        // 暂停或单步情况下延时等待下一步指令
        if (p_sua->play_ctx.b_pausing == 1 &&
            (p_sua->play_ctx.b_step_req == 0 ||
             (p_sua->play_ctx.b_step_req == 1 && p_sua->play_ctx.b_step_finish == 1))) {
            LOG_DEBUG( "streamVideoThread2  b_pausing 10 000.。。。\n");
            usleep(10 * 1000);
            continue;
        }
        if (p_sua->play_ctx.b_step_req == 1 && p_sua->play_ctx.b_step_finish == 0) {
            if (!streamStep(p_sua)) {
                LOG_DEBUG( "streamVideoThread2  streamStep 10 000.。。。\n");
                usleep(10 * 1000);
                continue;
            }
        }
        {
            nSize = mq_get_msg_size_que_used(live_que_v);
        }
        if (nSize > 0) {
            st_timeout_live_v=0;
           // LOG_DEBUG( "streamVideoThread  thread[live]    ~~~~~~~ queue:%d\n", nSize);
            memory_block *p_memory_block = NULL;
            {
                p_memory_block = mq_get_msg_que_unuse(live_que_v);
            }
            if (p_memory_block != NULL) {
                //LOG_DEBUG( "streamVideoThread  thread[live]    ~~~~~~~ size:%d\n",pMemBlock->GetRealBufSize());
                int retps = 0;
                uint8_t* p_data = (uint8_t*)mb_get_buff_head(p_memory_block) + 256;
                int len = (int)mb_get_real_buff_size(p_memory_block) - 256;

                if (strcasecmp(p_sua->l_v_sdp[0].encoder, "PS") == 0) {
                    uint32 ts = streamGetTimestamp3(1, framerate,p_sua->v_rtp_info.rtp_ts);
                    retps = rtp_ps_video_tx(p_sua, p_data,len,ts);
                } else if (strcasecmp(p_sua->l_v_sdp[0].encoder, "H264") == 0) {
                    //LOG_DEBUG("vvvvvvvvvvvvvvvvvvvvvvvv video_thread 视频流 RTP-H264 data=[%d] vvvvvvvvvvvvvvvvvvvvvvvv\n", packet.size);
                    retps = rtp_h264_video_tx(p_sua, p_data,len,streamGetTimestamp(90000));
                } else if (strcasecmp(p_sua->l_v_sdp[0].encoder, "H265") == 0) {
                    retps = rtp_h265_video_tx(p_sua, p_data,len,streamGetTimestamp(90000));
                } else if (strcasecmp(p_sua->l_v_sdp[0].encoder, "MPEG4") == 0) {
                    retps = rtp_video_tx(p_sua, p_data,len,streamGetTimestamp(90000));
                } else {
                    retps = rtp_h264_video_tx(p_sua, p_data,len,streamGetTimestamp(90000));
                }

                // free(packet.buff);
                if (retps < 0) {
                    LOG_DEBUG( "streamVideoThread live [%d] 退出流线程....\n", retps);
                    mq_submit_msg_que_unuse(live_que_v,p_memory_block);
                    mq_destroy(live_que_v);
                    mq_destroy(live_que_a);
                    p_sua->video_rtp_media.ua_m_fd = -1;
                    p_sua->media_info.v_index = 0;
                    p_sua->media_info.a_index = 0;
                    // 停止推送
                    liveaudiovideo_stop_stream(p_sua);
                    break;
                }
                // live_que_v.SubmitMessageQueUnuse(pMemBlock);
                // mq_submit_msg_que_unuse(live_que_v,p_memory_block);
            }
        } else {
           // LOG_DEBUG( "streamVideoThread  GetMsgSizeQueUsed==0 wait 10ms....\n");
            usleep(10 * 1000);
            st_timeout_live_v++;
            //if(p_sua->rtp_thread<=0) {
            LOG_DEBUG( "streamVideoThread  GetMsgSizeQueUsed==0 buffer thread timeout:%d end...\n",st_timeout_live_v);
            if(st_timeout_live_v>400)
                break;
           // }
        }
    }
    p_sua->media_info.v_thread = 0;
    p_sua->uaf_rtp_tx=0;
    LOG_DEBUG("vvvvvvvvvvvvvvvvvvvvvvvv streamVideoThread video_thread end=[%d] vvvvvvvvvvvvvvvvvvvvvvvv\n",pthread_self());
    return NULL;
}