/*
 * CMediaSource.cpp
 *
 *  Created on: 2016年3月18日
 *      Author: terry
 */

#include "CMediaSource.h"
#include "CLog.h"
#include "DateTime.h"
#include "TStringCast.h"
#include "FfmpegUtil.h"
#include "Path.h"
#include "AacHelper.h"


namespace av
{

static int check_interrupt(void* opaque)
{
    CMediaSource* obj = (CMediaSource*)opaque;
    return obj->checkInterrupt();
}

CMediaSource::CMediaSource():
		m_dict(),
		m_fmtContext(),
		m_videoIdx(-1),
		m_audioIdx(-1),
		m_state(STATE_STOPPED),
        m_interrupted(),
        m_paused(),
		m_isFile(),
		m_curTime()
{
	//uint8_t config[2] = { 0x11, 0x90 };
	uint8_t config[2] = { 0x0a, 0x10 };
	//uint8_t config[2] = { 0x10, 0x90 };
	
	std::string aacConfig = AacHelper::makeConfig(1, 44100, 2);

	int profile = 0;
	int channels = 0;
	int freq = 0;
	AacHelper::parseConfig(config, sizeof(config), profile, freq, channels);
	
	assert(channels > 0);
}

CMediaSource::~CMediaSource()
{
	close();
}

int CMediaSource::open(const std::string& url, const std::string& params)
{
    CLog::info("CMediaSource(%p) open begin. %s\n", this, url.c_str());

	if (isOpen())
	{
		close();
	}

	m_url = url;
	m_params = params;
	m_isFile = isFile();

	int rc = openInput(url, params);
	if (rc != 0)
	{
		CLog::warning("failed to open url. rc:%d\n", rc);
		return rc;
	}

	fetchMediaFormat();

	CLog::info("CMediaSource(%p) open end. video codec:%d, profile: %d, width:%d, height:%d\n",
		this,
		m_format.m_codec, m_format.m_profile,
		m_format.m_width, m_format.m_height);

    CLog::info("audio codec:%d, profile: %d, channels:%d, rate:%d\n",
        this, 
		m_format.m_audioCodec, m_format.m_audioProfile,
		m_format.m_channels, m_format.m_sampleRate);

	return 0;
}

void CMediaSource::close()
{
	closeInput();

    m_interrupted = 0;
}

bool CMediaSource::isOpen()
{
	return isInputOpen();
}

bool CMediaSource::getFormat(MediaFormat& fmt)
{
	fmt = m_format;
	return true;
}

int CMediaSource::getDuration()
{
	return m_format.m_duration;
}

int CMediaSource::play()
{
	if (!m_fmtContext)
	{
		return ENOENT;
	}

    if (!m_paused)
    {
        return 0;
    }

    return av_read_play(m_fmtContext);
}

int CMediaSource::pause()
{
	if (!m_fmtContext)
	{
		return ENOENT;
	}

    m_paused = true;
	return av_read_pause(m_fmtContext);
}

void CMediaSource::stop()
{
	//
    m_paused = true;
}

int CMediaSource::getState()
{
	comn::AutoCritSec lock(m_cs);
	return m_state;
}

bool CMediaSource::seekable()
{
	return (m_format.m_duration > 0);
}

int CMediaSource::seek(int64_t offset)
{
	if (!m_fmtContext)
	{
		return ENOENT;
	}

	int64_t pos = offset;
	return av_seek_frame(m_fmtContext, -1, pos * (AV_TIME_BASE / 1000), 0);
}

int64_t CMediaSource::getTime()
{
	return m_curTime;
}

int CMediaSource::setScale(float scale)
{
	return ENOSYS;
}

float CMediaSource::getScale()
{
	return 1.0;
}

int CMediaSource::read(AVPacket& pkt)
{
	if (!m_fmtContext)
	{
		return ENOENT;
	}

	int rc = doRead(pkt);
	//while (rc == AVERROR(EAGAIN) && (m_interrupted <= 0))
	//{
	//	rc = doRead(pkt);
	//}
	return rc;
}


int CMediaSource::openInput(const std::string& url, const std::string& params)
{
    comn::DateTime timeStart = comn::DateTime::now();

	parseParams(&m_dict, params);

	AVInputFormat* fmt = NULL;
    m_fmtContext = avformat_alloc_context();

    m_fmtContext->interrupt_callback.callback = check_interrupt;
    m_fmtContext->interrupt_callback.opaque = this;

    ffmpeg::av_dict_get_int(m_dict, "max_delay", m_fmtContext->max_delay);

	int rc = avformat_open_input(&m_fmtContext, url.c_str(), fmt, &m_dict);
	if (rc != 0)
	{
		return ENOSYS;
	}

	 //= AV_TIME_BASE / 4;
	ffmpeg::av_dict_get_int(m_dict, "max_analyze_duration", m_fmtContext->max_analyze_duration);

	ffmpeg::av_dict_get_int(m_dict, "probesize", m_fmtContext->probesize);

	rc = avformat_find_stream_info(m_fmtContext, NULL);
	if (rc != 0)
	{
		avformat_close_input(&m_fmtContext);
		return ENOSPC;
	}

	comn::DateTime timeNow = comn::DateTime::now();
	time_t ms = timeNow.subtract(timeStart);
	CLog::info("elapse of open url. %d\n", ms);

	AVCodec* vCodec = NULL;
	AVCodec* aCodec = NULL;
	m_videoIdx = av_find_best_stream(m_fmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, &vCodec, 0);
	m_audioIdx = av_find_best_stream(m_fmtContext, AVMEDIA_TYPE_AUDIO, -1, m_videoIdx, &aCodec, 0);

	return 0;
}

void CMediaSource::closeInput()
{
	if (m_fmtContext)
	{
		avformat_close_input(&m_fmtContext);
	}

	m_videoIdx = -1;
	m_audioIdx = -1;

	if (m_dict)
	{
		av_dict_free(&m_dict);
	}
}

bool CMediaSource::isInputOpen()
{
	return (m_fmtContext != NULL);
}

void CMediaSource::fetchMediaFormat()
{
	if (m_videoIdx >= 0)
	{
		AVStream* stream = m_fmtContext->streams[m_videoIdx];
        AVCodecContext* codec = stream->codec;
		m_format.m_codec = stream->codec->codec_id;
        m_format.m_profile = codec->profile;
		m_format.m_width = stream->codec->width;
		m_format.m_height = stream->codec->height;
		m_format.m_bitrate = stream->codec->bit_rate;

        if (codec->extradata_size > 0)
        {
            m_format.m_videoProp.assign((char*)codec->extradata, codec->extradata_size);
        }
	}

	if (m_audioIdx >= 0)
	{
		AVStream* stream = m_fmtContext->streams[m_audioIdx];
        AVCodecContext* codec = stream->codec;
		m_format.m_audioCodec = stream->codec->codec_id;
        m_format.m_audioProfile = codec->profile;

		m_format.m_channels = stream->codec->channels;
		m_format.m_sampleRate = stream->codec->sample_rate;
		m_format.m_sampleBits = stream->codec->bits_per_coded_sample;
		m_format.m_frameSize = stream->codec->frame_size;

        //m_format.m_clockrate = stream->time_base.den;

        if (codec->extradata_size > 0)
        {
            m_format.m_audioConfig.assign((char*)codec->extradata, codec->extradata_size);
        }
	}

    if (m_fmtContext->duration != AV_NOPTS_VALUE)
    {
	    m_format.m_duration = m_fmtContext->duration * 1000 / AV_TIME_BASE;
    }

}

void CMediaSource::setState(StreamState state)
{
	comn::AutoCritSec lock(m_cs);
	m_state = state;
}

int CMediaSource::parseParams(AVDictionary **pm, const std::string& params)
{
	int rc = av_dict_parse_string(pm, params.c_str(), "=", ";", 0);
	//av_dict_set(pm, "stimeout", "5000000", 0);
    
	av_dict_set(pm, "connect", "1", 0);

	return rc;
}

void CMediaSource::interrupt()
{
    comn::AutoCritSec lock(m_cs);
    m_interrupted = true;
}

bool CMediaSource::isLive()
{
	if (isFile())
	{
		return false;
	}

    return true;
	return (m_format.m_duration <= 0);
}


int CMediaSource::checkInterrupt()
{
    comn::AutoCritSec lock(m_cs);
    return m_interrupted;
}

int CMediaSource::doRead(AVPacket& pkt)
{
	comn::DateTime timeStart = comn::DateTime::now();

	av_init_packet(&pkt);
	int rc = av_read_frame(m_fmtContext, &pkt);
	if (rc == 0)
	{
		AVStream* stream = m_fmtContext->streams[pkt.stream_index];
		AVRational tb_dst = av_make_q(1, AV_TIME_BASE);
		av_packet_rescale_ts(&pkt, stream->time_base, tb_dst);

		if (pkt.stream_index == m_videoIdx)
		{
			pkt.stream_index = MEDIA_TYPE_VIDEO;

			if ((pkt.pts == AV_NOPTS_VALUE) && isFile())
			{
				pkt.pts = m_curTime + pkt.duration;
			}

			m_curTime = pkt.pts;
		}
		else if (pkt.stream_index == m_audioIdx)
		{
			pkt.stream_index = MEDIA_TYPE_AUDIO;

			m_curTime = pkt.pts;
		}
		else
		{
			av_free_packet(&pkt);
			rc = AVERROR(EAGAIN);
		}

        time_t ms = timeStart.getElapse();
		if (ms > 200)
		{
			CLog::info("-- elapse of doRead. %d\n", ms);
		}
	}

	return rc;
}

bool CMediaSource::isFile()
{
	return comn::Path::exist(m_url);
}



} /* namespace av */
