﻿#include "IRtspReadManager.h"

#if defined(WIN32)
#include <windows.h>
#include <time.h>
#include <sys/timeb.h>
#endif

#include <assert.h>
#include "base/TraceLog.h"

#define SafeFree(ptr) \
    do { if (ptr) { delete ptr; ptr = NULL;} } while (0)


//char aac_adts_header[7] = { 0 };
//int chanCfg = 2;            //MPEG-4 Audio Channel Configuration. 1 Channel front-center

//static int init_aac_header() {
//    int profile = 2;   //AAC LC
//    int freqIdx = 11;   //8000HZ

//    aac_adts_header[0] = (char)0xFF;      // 11111111     = syncword
//    aac_adts_header[1] = (char)0xF1;      // 1111 1 00 1  = syncword MPEG-2 Layer CRC
//    aac_adts_header[2] = (char)(((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
//    aac_adts_header[6] = (char)0xFC;

//    return 0;
//}

//static int write_aac_header(FILE* fp, AVPacket* pkt) {
//    aac_adts_header[3] = (char)(((chanCfg & 3) << 6) + ((7 + pkt->size) >> 11));
//    aac_adts_header[4] = (char)(((7 + pkt->size) & 0x7FF) >> 3);
//    aac_adts_header[5] = (char)((((7 + pkt->size) & 7) << 5) + 0x1F);

//    fwrite(aac_adts_header, 7, 1, fp);

//    return 0;
//}

//static unsigned int GetSamplingFrequencyIndex(unsigned int sampling_frequency)
//{
//    switch (sampling_frequency) {
//    case 96000: return 0;
//    case 88200: return 1;
//    case 64000: return 2;
//    case 48000: return 3;
//    case 44100: return 4;
//    case 32000: return 5;
//    case 24000: return 6;
//    case 22050: return 7;
//    case 16000: return 8;
//    case 12000: return 9;
//    case 11025: return 10;
//    case 8000:  return 11;
//    case 7350:  return 12;
//    default:    return 0;
//    }
//}

static char g_curr_time[50];

/* 获取当前时间 */
char *SRE_GetCurrTime()
{
    memset(g_curr_time, 0, 50);
#if defined(OS_LINUX) || defined(ANDROID) || defined(IOS)
    struct timeval val;
    struct tm *ptm = NULL;

    gettimeofday(&val, NULL);
    ptm = localtime(&val.tv_sec);
    sprintf(g_curr_time,
            "%04d-%02d-%02d-%02d.%02d.%02d-%03ld",
            ptm->tm_year + 1900,
            ptm->tm_mon + 1,
            ptm->tm_mday,
            ptm->tm_hour,
            ptm->tm_min,
            ptm->tm_sec,
            val.tv_usec/1000);

#endif

#if defined(WIN32)
    struct timeb tp;
    struct tm * tm = NULL;
    ftime(&tp);
    tm = localtime(&(tp.time));
    sprintf(g_curr_time, "%d-%02d-%02d-%02d.%02d.%02d-%03d",
            tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday,
            tm->tm_hour, tm->tm_min, tm->tm_sec, tp.millitm);
#endif
    return g_curr_time;

}

IRtspReadManager::IRtspReadManager(PlayCallback* _play_back)
    : pFormatCtx(NULL)
    , audio_fp(NULL)
    , outputContext(NULL)
    , last_mux_dts(0)
    , pull_str_url_("")
    , write_mp4_file_name("")
    ,play_back(_play_back)
{
    //init_aac_header();

    //audio_fp = fopen("1.aac", "wb");
}
IRtspReadManager::~IRtspReadManager()
{
    if(audio_fp)
    {
        fclose(audio_fp);
    }

}

static std::string error_string(int err) 
{
    char errbuf[128];
    const char *errbuf_ptr = errbuf;

    if (av_strerror(err, errbuf, sizeof(errbuf)) < 0) {
        errbuf_ptr = strerror(AVUNERROR(err));
    }

    return errbuf_ptr;
}

int IRtspReadManager::StartRtspInputUrl(std::string url)
{
    LogINFO("Call %s Start.", __FUNCTION__);
    if (url.empty())
    {
        LogERR("Your Input url is null, Please check it");
        return -1;
    }

    AVDictionary *dict = NULL;
    av_dict_set(&dict, "stimeout", "2000000", 0);	// stimeout 为 tcp io 超时，微秒
    av_dict_set(&dict, "rtsp_transport", "tcp", 0);	// using tcp transport ...

    int ret = avformat_open_input(&pFormatCtx, url.c_str(), nullptr, &dict);

    if (ret < 0)
    {
        LogERR("avformat_open_input %s error,reason:%s", url.c_str(), error_string(ret).c_str());
        av_dict_free(&dict);
        return -1;
    }
    av_dict_free(&dict);

    ret = avformat_find_stream_info(pFormatCtx, nullptr);
    if (ret < 0)
    {
        avformat_close_input(&pFormatCtx);
        LogERR("avformat_find_stream_info error,reason:%s", error_string(ret).c_str());
        return -1;
    }

    // Output the stream info to standard
    av_dump_format(pFormatCtx, 0, url.c_str(), 0);

    //---------------------------------------------------视频不需要解码---直接copy就行

    // Fill video state
    AVCodec* pVCodec = nullptr;
    /* select the video stream and find the decoder*/
    video_stream_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pVCodec, 0);

    if (video_stream_index < 0)
    {
        avformat_close_input(&pFormatCtx);
        LogERR("  video_stream_index < 0 error");
        return -1;
    }

    if (!pVCodec)
    {
        avformat_close_input(&pFormatCtx);
        LogERR("pVCodec  is null");
        return -1;
    }

    //文件名自动生成
    std::string file_name;

    srand((unsigned)time(NULL));

    const char * p = strrchr(url.c_str(),'/');
    if(p != NULL && p+1 != NULL)
    {
        file_name =+ (p+1);
    }


    file_name = file_name + std::to_string(rand()) + "-" + SRE_GetCurrTime() + ".mp4";

    if (OpenOutput(file_name.c_str()) < 0)
    {
        LogERR("OpenOutput  is  error file:%s",file_name.c_str());
        avformat_close_input(&pFormatCtx);
        return -1;
    }

    write_mp4_file_name = file_name;

    base::SimpleThread::Start();

    pull_str_url_ = url;

    LogINFO("Call %s End.", __FUNCTION__);
    return 0;
}

int  IRtspReadManager::OpenOutput(std::string outUrl)
{
    int ret = avformat_alloc_output_context2(&outputContext, nullptr, "mp4", outUrl.c_str());
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "open output context failed\n");
        return -1;
    }

    for (int i = 0; i < pFormatCtx->nb_streams; i++)
    {
        AVStream *out_stream;
        AVStream *in_stream = pFormatCtx->streams[i];
        AVCodecParameters *in_codecpar = in_stream->codecpar;

        //if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
        //{
        //	//aac codec copy to
        //	out_stream = avformat_new_stream(outputContext, NULL);
        //	if (!out_stream) {
        //		fprintf(stderr, "Failed allocating output stream\n");
        //		ret = AVERROR_UNKNOWN;
        //		return -1;
        //	}

        //	ret = avcodec_parameters_from_context(out_stream->codecpar, aac_encodeCtx);
        //	if (ret < 0) {
        //		fprintf(stderr, "Could not initialize stream parameters\n");
        //		return -1;
        //	}
        //	//out_stream->codec = aac_encodeCtx;
        //}
        //else
        if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO && i == video_stream_index)
        {
            out_stream = avformat_new_stream(outputContext, NULL);
            if (!out_stream) {
                fprintf(stderr, "Failed allocating output stream\n");
                ret = AVERROR_UNKNOWN;
                return -1;
            }

            ret = avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
            if (ret < 0) {
                fprintf(stderr, "Failed to copy codec parameters\n");
                return -1;
            }
            out_stream->codecpar->codec_tag = 0;


            out_video_mux_timebase = pFormatCtx->streams[video_stream_index]->time_base;
        }

    }

    //打印输出的格式
    //av_dump_format(outputContext, 0, outUrl.c_str(), 1);


    //open file
    if (!(outputContext->flags & AVFMT_NOFILE)) {
        ret = avio_open(&outputContext->pb, outUrl.c_str(), AVIO_FLAG_WRITE);
        if (ret < 0) {
            fprintf(stderr, "Could not open output file '%s'", outUrl.c_str());
            return -1;
        }
    }

    ret = avformat_write_header(outputContext, nullptr);
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "format write header failed");
        return -1;
    }

    av_log(NULL, AV_LOG_FATAL, " Open output file success %s\n", outUrl.c_str());

    return 0;
}

int IRtspReadManager::CloseOutput()
{
    //write trail
    av_write_trailer(outputContext);

    /* close output */
    if (outputContext && !(outputContext->flags & AVFMT_NOFILE))
    {
        avio_closep(&outputContext->pb);
    }

    if (outputContext)
    {
        avformat_free_context(outputContext);
        outputContext = NULL;
    }

    return 0;
}

int IRtspReadManager::StopRtspInput()
{
    LogINFO("Call %s Start.", __FUNCTION__);
    LogINFO("stop rtst url %s.", pull_str_url_.c_str());
    base::SimpleThread::Stop();
    base::SimpleThread::Join();


    CloseOutput();
    LogDEBUG("write mp4 %s success",write_mp4_file_name.c_str());

    //关闭解码器

    if (pFormatCtx)
    {
        avformat_close_input(&pFormatCtx);
        pFormatCtx = NULL;
    }

    LogINFO("Call %s End.", __FUNCTION__);

    return 0;
}


void IRtspReadManager::Run()
{
    AVPacket *packet = av_packet_alloc();
    AVFrame *src_frame = av_frame_alloc();

    while(true)
    {
        if(base::SimpleThread::IsStop())
        {
            break;
        }

        int ret = av_read_frame(pFormatCtx, packet);

        if(ret < 0)
        {
            if(ret == AVERROR_EOF)
            {
                LogDEBUG("read eof....");


                if(play_back)
                {
                   play_back->OnReadErrorEvent(this, pull_str_url_);
                }

                break;
            }

            if (pFormatCtx->pb->error == 0)   // No error,wait for user input
            {
                //webrtc::SleepMs(100);
                continue;
            }
            else
            {
                LogERR("read error....");
                /*if (m_callback)
                {
                m_callback->OnReadErrorEvent();
                }*/
                break;
            }
        }

        if (packet->stream_index == video_stream_index)   // video stream
        {
            write_video_frame(packet);
            av_packet_unref(packet);
        }
    }

    av_packet_free(&packet);
    av_frame_free(&src_frame);
}




int IRtspReadManager::write_video_frame(AVPacket *packet)
{
    //int64_t start_time = (of->start_time == AV_NOPTS_VALUE) ? 0 : of->start_time;
    //int64_t start_time = 0;
    //int64_t ost_tb_start_time = av_rescale_q(start_time, AV_TIME_BASE_Q, out_video_mux_timebase);

    auto in_stream = pFormatCtx->streams[video_stream_index];
    auto out_stream = outputContext->streams[video_stream_index];

    int64_t start_time = pFormatCtx->streams[video_stream_index]->start_time;
    int64_t offset = av_rescale_q(start_time, pFormatCtx->streams[video_stream_index]->time_base, outputContext->streams[video_stream_index]->time_base);

    if (packet->pts != AV_NOPTS_VALUE)
        packet->pts = av_rescale_q(packet->pts, in_stream->time_base, out_stream->time_base);
    else
        packet->pts = AV_NOPTS_VALUE;


    AVRational time_base_tmp;

    time_base_tmp.den = AV_TIME_BASE;
    time_base_tmp.num = 1;

    if (packet->dts == AV_NOPTS_VALUE)
        packet->dts = av_rescale_q(packet->dts, time_base_tmp, out_stream->time_base);
    else
        packet->dts = av_rescale_q(packet->dts, in_stream->time_base, out_stream->time_base);


    //av_packet_rescale_ts(packet,
    //	pFormatCtx->streams[video_stream_index]->time_base,
    //	outputContext->streams[video_stream_index]->time_base);

    if (!(outputContext->oformat->flags & AVFMT_NOTIMESTAMPS))
    {

        if (packet->dts != AV_NOPTS_VALUE && last_mux_dts != AV_NOPTS_VALUE)
        {
            int64_t max = last_mux_dts + !(outputContext->oformat->flags & AVFMT_TS_NONSTRICT);
            if (packet->dts < max)
            {
                if (packet->pts >= packet->dts)
                {
                    packet->pts = FFMAX(packet->pts, max);
                }

                packet->dts = max;
            }
        }
    }
    last_mux_dts = packet->dts;

    //packet->pts = av_rescale_q_rnd(packet->pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)) - offset;
    //packet->dts = av_rescale_q_rnd(packet->dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)) - offset;
    //packet->duration = av_rescale_q(packet->duration, in_stream->time_base, out_stream->time_base);


    //AVStream * in_video_stream = pFormatCtx->streams[video_stream_index];
    //AVStream *out_stream = outputContext->streams[video_stream_index];

    //if (packet->pts != AV_NOPTS_VALUE)
    //	packet->pts = av_rescale_q(packet->pts, in_stream->time_base, out_stream->time_base);
    //if (packet->dts != AV_NOPTS_VALUE)
    //	packet->dts = av_rescale_q(packet->dts, in_stream->time_base, out_stream->time_base);
    //if (packet->duration > 0)
    //	packet->duration = av_rescale_q(packet->duration, in_stream->time_base, out_stream->time_base);

    packet->stream_index = video_stream_index;


    //LogINFO("video.....pts:%lld", packet->pts);

    int error = av_interleaved_write_frame(outputContext, packet);
    if (error < 0)
    {
        LogERR("Could not av_interleaved_write_frame (error '%s')\n", error_string(error).c_str());
    }


    return 0;
}
