#include "pushwork.h"
#include"dlog.h"
#include<functional>
#include"avpublishtime.h"

PushWork::PushWork()
{

}

PushWork::~PushWork()
{
    // 从源头开始释放： 先释放音频、视频捕获，再释放解码器和缓存数据。【如果先释放缓存数据，那么如果音频、视频捕获现在还在用这个缓存数据，那么就会报错了。但是反过来释放就没问题】
    if(_audio_capturer)
    {
        delete _audio_capturer;
    }
    if(_video_capturer)
    {
        delete _video_capturer;
    }
    if(_audio_encoder)
    {
        delete _audio_encoder;
    }
    if(_fltp_buf)
    {
        av_free(_fltp_buf);
    }
    if(_audio_frame)
    {
        av_frame_free(&_audio_frame);
    }
    if(_video_encoder)
    {
        delete _video_encoder;
    }
    if(_rtsp_pusher)
    {
        delete _rtsp_pusher;
    }
    LogInfo("~PushWork()");
}

RET_CODE PushWork::init(const Properties properties)
{
    int ret = 0;
    // 初始化时间戳
    AVPublishTime::GetInstance()->Rest(); // 记录推流的开始时间

    // 音频test模式
   _audio_test = properties.GetProperty("audio_test", 0);
   _input_pcm_name = properties.GetProperty("input_pcm_name", "D:\\data\\video\\demo\\48000_2_s16le.pcm");

   // 麦克风采样属性
   _mic_sample_rate = properties.GetProperty("mic_sample_rate", 48000);
   _mic_sample_format = properties.GetProperty("mic_sample_fmt", AV_SAMPLE_FMT_S16);
   _mic_channels = properties.GetProperty("mic_channels", 2);

   // 音频编码参数
   _audio_sample_rate = properties.GetProperty("audio_sample_rate", _mic_sample_rate);
   _audio_bit_rate = properties.GetProperty("audio_bit_rate", 128*1024);
   _audio_channels = properties.GetProperty("audio_channels", _mic_channels);
   _audio_channel_layout = av_get_default_channel_layout(_audio_channels);    // 由audio_channels_决定

   // 设置音频编码器
   _audio_encoder = new AACEncoder();
   if(!_audio_encoder)
   {
       LogError("new AACEncoder failed");
       return RET_FAIL;
   }

   Properties  audioCodecProperties;
   audioCodecProperties.SetProperty("sample_rate", _audio_sample_rate);
   audioCodecProperties.SetProperty("channels", _audio_channels);
   audioCodecProperties.SetProperty("bit_rate", _audio_bit_rate);        // 这里没有去设置采样格式
   if(_audio_encoder->init(audioCodecProperties) != RET_OK)
   {
        LogError("AACEncoder init failed");
        return RET_FAIL;
   }
   // 由于我们的测试文件是 s16格式的，而默认的AAC编码器需要的是fltp，故需要做重采样。
   _fltp_buf_size = av_samples_get_buffer_size(NULL, _audio_encoder->getChannels(), _audio_encoder->getFrameSamples()
                                               ,(enum AVSampleFormat)_audio_encoder->getFormat(), 1);
   _fltp_buf = (uint8_t*)av_malloc(_fltp_buf_size);
   if(!_fltp_buf)
   {
       LogError("malloc _fltp_buf failed");
       return RET_ERR_OUTOFMEMORY;
   }
   int frameByte2 = _audio_encoder->getFrameBytes();
   if(_fltp_buf_size != frameByte2)
   {
       LogError("fltp_buf_size:%d != frameByte2:%d", _fltp_buf_size, frameByte2);
       return RET_FAIL;
   }
   // 创建fltp格式的帧对象
   _audio_frame = av_frame_alloc();
   _audio_frame->format = _audio_encoder->getFormat(); //实际上是 AV_SAMPLE_FMT_FLTP
   _audio_frame->nb_samples = _audio_encoder->getFrameSamples();
   _audio_frame->channels = _audio_encoder->getChannels();
   _audio_frame->channel_layout = _audio_encoder->getChannelLayout();
   // 给该帧对象分配buf
   ret = av_frame_get_buffer(_audio_frame, 0);
   if(ret < 0)
   {
       LogError("audio_frame av_frame_get_buffer failed");
       return RET_FAIL;
   }

   // 音频捕获
   _audio_capturer = new AudioCapturer();
   Properties audioCapProperties;
   audioCapProperties.SetProperty("audio_test", 1);
   audioCapProperties.SetProperty("input_pcm_name", _input_pcm_name);
   audioCapProperties.SetProperty("channels", _mic_channels);
   audioCapProperties.SetProperty("nb_samples", 1024); //应该由编码器提供，fix me
   audioCapProperties.SetProperty("format", _mic_sample_format);
   audioCapProperties.SetProperty("byte_per_sample", 2); //fix me
   if(_audio_capturer->init(audioCapProperties) != RET_OK)
   {
       LogError("AudioCapturer Init failed");
       return RET_FAIL;
   }
   // std::bind 后面的 std::placeholders 代表的回调方法的参数，有几个参数，就需要放几个 std::placeholders。
   _audio_capturer->addCallback(std::bind(&PushWork::pcmCallback, this, std::placeholders::_1, std::placeholders::_2));

   // 视频测试模式
   _video_test = properties.GetProperty("video_test", 0);
   _input_yuv_name = properties.GetProperty("input_yuv_name", "input_1280_720_420p.yuv");

   // 桌面录制属性
   _desktop_x = properties.GetProperty("desktop_x", 0);
   _desktop_y = properties.GetProperty("desktop_y", 0);
   _desktop_width  = properties.GetProperty("desktop_width", 1920);
   _desktop_height = properties.GetProperty("desktop_height", 1080);
   _desktop_format = properties.GetProperty("desktop_pixel_format", AV_PIX_FMT_YUV420P);
   _desktop_fps = properties.GetProperty("desktop_fps", 25);

   // 视频编码参数
   _video_width  = properties.GetProperty("video_width", _desktop_width);     // 宽
   _video_height = properties.GetProperty("video_height", _desktop_height);   // 高
   _video_fps = properties.GetProperty("video_fps", _desktop_fps);             // 帧率
   _video_gop = properties.GetProperty("video_gop", _video_fps);
   _video_bit_rate = properties.GetProperty("video_bit_rate", 1024*1024);   // 先默认1M fixed:me
   _video_b_frames = properties.GetProperty("video_b_frames", 0);   // b帧数量

   // 设置视频编码器
   _video_encoder = new H264Encoder;
   Properties video_codec_properties;
   video_codec_properties.SetProperty("width", _video_width);
   video_codec_properties.SetProperty("height", _video_height);
   video_codec_properties.SetProperty("fps", _video_fps);            // 帧率
   video_codec_properties.SetProperty("b_frames", _video_b_frames);
   video_codec_properties.SetProperty("bit_rate", _video_bit_rate);    // 码率
   video_codec_properties.SetProperty("gop", _video_gop);            // gop
   if(_video_encoder->init(video_codec_properties) != RET_OK)
   {
       LogError("H264Encoder Init failed");
       return RET_FAIL;
   }


   // 视频捕获
   _video_capturer = new VideoCapturer();
   Properties  video_cap_properties;
   video_cap_properties.SetProperty("video_test", 1);
   video_cap_properties.SetProperty("input_yuv_name", _input_yuv_name);
   video_cap_properties.SetProperty("width", _desktop_width);
   video_cap_properties.SetProperty("height", _desktop_height);
   if(_video_capturer->init(video_cap_properties) != RET_OK)
   {
       LogError("VideoCapturer init failed");
       return RET_FAIL;
   }
   _video_capturer->addCallback(std::bind(&PushWork::yuvCallback, this, std::placeholders::_1, std::placeholders::_2));

   // 启动推流需要在音视频捕获数据之前，否则会有延迟
   _rtsp_url = properties.GetProperty("rtsp_url", "");
   _rtsp_transport = properties.GetProperty("rtsp_transport", "");
   _rtsp_timeout = properties.GetProperty("rtsp_timeout", 5000);
   _rtsp_pusher = new RtspPusher();
   if(!_rtsp_pusher)
   {
       LogError("new RtspPusher failed");
       return RET_FAIL;
   }
   Properties rtsp_properties;
   rtsp_properties.SetProperty("url", _rtsp_url);
   rtsp_properties.SetProperty("rtsp_transport", _rtsp_transport);
   rtsp_properties.SetProperty("timeout", _rtsp_timeout);
   if(_rtsp_pusher->init(rtsp_properties) != RET_OK)
   {
       LogError("rtsp_pusher init failed");
       return RET_FAIL;
   }
   if(_video_encoder)
   {
        if(_rtsp_pusher->configVideoStream(_video_encoder->getCodecContext()) != RET_OK)
        {
            LogError("rtsp_pusher configVideoStream failed");
            return RET_FAIL;
        }
   }
   if(_audio_encoder)
   {
        if(_rtsp_pusher->configAudioStream(_audio_encoder->getCodecContext()) != RET_OK)
        {
            LogError("rtsp_pusher configAudioStream failed");
            return RET_FAIL;
        }
   }
   if(_rtsp_pusher->connect() != RET_OK)
   {
       LogError("rtsp_pusher connect failed");
       return RET_FAIL;
   }

   // 启动音频捕获
   if(_audio_capturer->start() != RET_OK)
   {
       LogError("AudioCapturer start failed");
       return RET_FAIL;
   }
   // 启动视频捕获
   if(_video_capturer->start() != RET_OK)
   {
       LogError("VideoCapturer start failed");
       return RET_FAIL;
   }
   return RET_OK;
}

RET_CODE PushWork::deInit()
{
    if(_audio_capturer)
    {
        _audio_capturer->stop();
        delete _audio_capturer;
        _audio_capturer = NULL;
    }
    if(_video_capturer)
    {
        _video_capturer->stop();
        delete _video_capturer;
        _video_capturer = NULL;
    }
    return RET_OK;
}

///
/// 目前只支持将双通道的 s16 交错模式 -》 fltp的平面模式
/// short / 32768。 是因为存储short的时候，数据保存是 0~32768。但是存储float的时候，数据保存的是0~1的数据。
/// 故做一个按比例转换即可
/// \brief s16le_convert_to_fltp
/// \param s16le
/// \param fltp
/// \param nb_samples
///
void s16le_convert_to_fltp(short *s16le, float *fltp, int nbSamples)
{
    float *fltp_l = fltp;
    float *fltp_r = fltp + nbSamples;
    for(int i = 0; i < nbSamples; i++)
    {
        fltp_l[i] = s16le[i * 2] / 32768.0;
        fltp_r[i] = s16le[i * 2 + 1] / 32678.0;
    }
}

void PushWork::pcmCallback(uint8_t *pcm, int32_t size)
{
    int ret = 0;
    // dump输出音频数据
    if(!_pcm_s16le_fp)
    {
        // ffplay -ar 48000 -channels 2 -f s16le  -i push_dump_s16le.pcm
        _pcm_s16le_fp = fopen("push_dump_s16le.pcm", "wb");
        if(!_pcm_s16le_fp)
        {
            LogError("fopen push_dump_s16le.pcm failed");
            return ;
        }
    }
    if(_pcm_s16le_fp)
    {
        // 写入当pcm数据
        fwrite(pcm, 1, size, _pcm_s16le_fp);
        // 刷新文件缓冲区，将数据写入文件
        fflush(_pcm_s16le_fp);
    }
    // 在音频捕获的时候需要约定好采样器的采样点数和编码器的采样点数相同
    s16le_convert_to_fltp((short*)pcm, (float*)_fltp_buf, _audio_frame->nb_samples);
    // 先判断使帧可以写入数据【防止此时帧不可写入】
    ret = av_frame_make_writable(_audio_frame);
    if(ret < 0)
    {
        LogError("av_frame_makr_writable failed");
        return ;
    }
    // 将 fltp_buf 写入 audio_frame中
    ret = av_samples_fill_arrays(_audio_frame->data,
                           _audio_frame->linesize,
                           _fltp_buf,
                           _audio_frame->channels,
                           _audio_frame->nb_samples,
                           (enum AVSampleFormat)_audio_frame->format,
                           0);
    if(ret < 0)
    {
        LogError("av_samples_fill_arrays failed");
        return ;
    }
    int64_t pts = (int64_t)AVPublishTime::GetInstance()->get_audio_pts();
    int pkt_frame = 0;
    RET_CODE encode_ret = RET_OK;
    AVPacket *packet = _audio_encoder->encode(_audio_frame, pts, 0, &pkt_frame, &encode_ret);
    // 如果编码成功
    if(encode_ret == RET_OK && packet)
    {
        // dump输出音频数据
        if(!_aac_fp)
        {
            // ffplay push_dump.aac
            _aac_fp = fopen("push_dump.aac", "wb");
            if(!_aac_fp)
            {
                LogError("fopen push_dump.aac failed");
                return ;
            }
        }
        if(_aac_fp)
        {
            uint8_t adts_header[7];
            if(_audio_encoder->getAdtsHeader(adts_header, packet->size) != RET_OK)
            {
                LogError("getAdthHeader failed");
                return ;
            }
            fwrite(adts_header, 1, 7, _aac_fp);
            fwrite(packet->data, 1, packet->size, _aac_fp);
            fflush(_aac_fp);
        }
    }
//    LogInfo("PcmCallback pts:%ld", pts);
    if(packet)
    {
        LogInfo("PcmCallback packet->pts:%ld", packet->pts);
        _rtsp_pusher->push(packet, E_AUDIO_TYPE);
    }
    else
    {
        LogInfo("PcmCallback packet is null");
    }
}

void PushWork::yuvCallback(uint8_t *yuv, int32_t size)
{
    // dump输出视频数据
    if(!_yuv_fp)
    {
        // ffplay -f rawvideo -video_size 1280x720 -pix_fmt yuv420p -i push_dump.yuv
        _yuv_fp = fopen("push_dump.yuv", "wb");
        if(!_yuv_fp)
        {
            LogError("fopen push_dump.yuv failed");
            return ;
        }
    }
    if(_yuv_fp)
    {
        // 写入当yuv数据
        fwrite(yuv, 1, size, _yuv_fp);
        // 刷新文件缓冲区，将数据写入文件
        fflush(_yuv_fp);
    }
    int64_t pts = (int64_t)AVPublishTime::GetInstance()->get_video_pts();
    int pkt_frame = 0;
    RET_CODE encode_ret = RET_OK;
    AVPacket *packet = _video_encoder->encode(yuv, size, pts, &pkt_frame, &encode_ret);
    if(packet)
    {
        if(!_h264_fp)
        {
            // ffplay push_dump.h264
            _h264_fp = fopen("push_dump.h264", "wb");
            if(!_h264_fp)
            {
                LogError("fopen push_dump.h264 failed");
                return ;
            }
            // 写入 sps 和 pps
            uint8_t start_code[] = {0,0,0,1};
            fwrite(start_code, 1, 4, _h264_fp);
            fwrite(_video_encoder->get_sps_data(), 1, _video_encoder->get_sps_size(), _h264_fp);
            fwrite(start_code, 1, 4, _h264_fp);
            fwrite(_video_encoder->get_pps_data(), 1, _video_encoder->get_pps_size(), _h264_fp);
        }
        if(_h264_fp)
        {
            fwrite(packet->data, 1, packet->size, _h264_fp);
            fflush(_h264_fp);
        }
    }
//    LogInfo("YuvCallback pts:%d", pts);
    if(packet)
    {
        LogInfo("YuvCallback packet->pts:%d", packet->pts);
        _rtsp_pusher->push(packet, E_VIDEO_TYPE);
//        av_packet_free(&packet);
    }
    else
    {
        LogInfo("video packet is null");
    }
}
