/*
* AudioFramedSource.cpp
*
*  Created on: 2015年11月26日
*      Author: terry
*/

#include "AudioFramedSource.h"
#include "GroupsockHelper.hh"
#include <algorithm>
#include "CLog.h"

static const size_t MAX_QUEUE_SIZE = 30;

namespace av
{


	AudioFramedSource::AudioFramedSource(UsageEnvironment& env, MediaSourcePtr source, int clockRate) :
		FramedSource(env),
		m_source(source),
		m_taskID(),
		m_streamEvent(),
		m_clockRate(clockRate),
		m_lastPts(),
		m_idle()
	{
		m_linkSink.reset(new LinkMediaSink(this));

		m_source->addSink(m_linkSink);

		gettimeofday(&m_startTime, 0);

		CLog::info("AudioFramedSource(%p)\n", this);
	}

	AudioFramedSource::~AudioFramedSource()
	{
		CLog::info("~AudioFramedSource(%p)\n", this);

		if (m_source)
		{
			m_source->removeSink(m_linkSink);
		}

		m_linkSink->resetSink(NULL);

		m_pktQueue.clear();
	}

	void AudioFramedSource::doGetNextFrame()
	{
		if (m_streamEvent == STREAM_EVENT_END)
		{
			handleClosure();
			return;
		}

		if (!m_taskID)
		{
			CLog::info("audio doGetNextFrame. create task.\n");

			m_taskID = envir().taskScheduler().createEventTrigger(taskFunc);
		}

		if (m_pktQueue.size() > 0)
		{
			m_idle = !onTask();
		}
		else
		{

		}

	}

	void AudioFramedSource::doStopGettingFrames()
	{
		if (m_taskID)
		{
			envir().taskScheduler().deleteEventTrigger(m_taskID);
			m_taskID = EventTriggerId();
		}

		m_pktQueue.clear();
	}

	void AudioFramedSource::onMediaFormat(const MediaFormat& fmt)
	{

	}

	void AudioFramedSource::onMediaPacket(MediaPacketPtr& pkt)
	{
		if (!pkt || pkt->isVideo())
		{
			return;
		}

		if (!m_taskID)
		{
			CLog::info("audio taskID is empty.\n");
		}

		size_t queSize = m_pktQueue.push(pkt);

		if (queSize > MAX_QUEUE_SIZE)
		{
			CLog::info("#video too many pkt. queue:%d\n", queSize);
			m_pktQueue.clear();
			envir().taskScheduler().triggerEvent(m_taskID, this);
		}
		else if (queSize > MAX_QUEUE_SIZE / 3)
		{
			envir().taskScheduler().triggerEvent(m_taskID, this);
		}
		else if (queSize == 1)
		{
			envir().taskScheduler().triggerEvent(m_taskID, this);
		}

	}

	void AudioFramedSource::onMediaEvent(int event)
	{
		m_streamEvent = event;
	}

	void AudioFramedSource::taskFunc(void* clientData)
	{
		AudioFramedSource* pSource = (AudioFramedSource*)clientData;
		pSource->doGetNextFrame();
	}

	bool AudioFramedSource::onTask()
	{
		if (!isCurrentlyAwaitingData())
		{
			return false;
		}

		MediaPacketPtr mediaPacket;
		if (m_pktQueue.pop(mediaPacket))
		{
			MediaPacket& pkt = *mediaPacket.get();

			uint8_t* data = pkt.data;
			int size = pkt.size;

			int len = std::min(size, (int)fMaxSize);
			memcpy(fTo, data, len);
			fFrameSize = len;

			int64_t pts = pkt.pts * 1000000 / m_clockRate;

			if (m_lastPts == 0)
			{
				m_lastPts = pts;
			}

			int64_t duration = pts - m_lastPts;

			if ((fPresentationTime.tv_sec == 0) && (fPresentationTime.tv_usec == 0))
			{
				gettimeofday(&fPresentationTime, NULL);

				m_lastPts = pts;
			}
			else
			{
				long usec = (long)(duration + fPresentationTime.tv_usec);
				fPresentationTime.tv_sec += usec / 1000000;
				fPresentationTime.tv_usec = usec % 1000000;
			}

			m_lastPts = pts;

			// struct timeval tvNow;
			// gettimeofday(&tvNow, NULL);
			// long delta = (fPresentationTime.tv_sec - tvNow.tv_sec) * 1000 + (fPresentationTime.tv_usec - tvNow.tv_usec)/1000;
			// if (delta >= 100 || delta <= -100)
			// {
			// CLog::info("audio pts:%lld. fpts:%ld.%ld, clock:%ld.%ld, delta:%ld, queue:%d\n",
			// pts, fPresentationTime.tv_sec, fPresentationTime.tv_usec / 1000,
			// tvNow.tv_sec, tvNow.tv_usec / 1000, delta,
			// m_pktQueue.size());
			// }

			afterGetting(this);

			return true;
		}
		else
		{
			//CLog::info("------- audio no pkt\n");
			return false;
		}
	}


} /* namespace av */
