extern "C"{

#ifdef __cplusplus
 #define __STDC_CONSTANT_MACROS
 #ifdef _STDINT_H
  #undef _STDINT_H
 #endif
#endif

}

#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <assert.h>

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>

#include <sys/types.h>
#include <sys/syscall.h>


extern "C"
{
	#include <time.h>
	#include "encode.h"
}

static UsageEnvironment *_env = 0;

#define SINK_PORT 3030

#define FRAME_PER_SEC 30.0

extern OMX_BUFFERHEADERTYPE *encoder_output_buffer;

pid_t gettid()
{
	return syscall(SYS_gettid);
}


// 使用 webcam + x264
class WebcamFrameSource : public FramedSource
{
	void *mp_capture, *mp_compress;	// v4l2 + x264 encoder
	int m_started;
	void *mp_token;

public:
	WebcamFrameSource (UsageEnvironment &env)
		: FramedSource(env)
	{

		m_started = 0;
		mp_token = 0;
		video_encode_init();
	}

	~WebcamFrameSource ()
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		closeEncode();
		//if (m_started) {
			//envir().taskScheduler().unscheduleDelayedTask(mp_token);
		//}
		envir().taskScheduler().unscheduleDelayedTask(mp_token);
	}

protected:
	virtual void doGetNextFrame ()
	{
		if (m_started) 
		{
			fprintf(stderr,"%s returned!!\n",__func__);
			return;
		}
		m_started = 1;

		// 根据 fps, 计算等待时间
		double delay = 1000.0 / FRAME_PER_SEC;
		int to_delay = delay * 1000;	// us
		mp_token = envir().taskScheduler().scheduleDelayedTask(to_delay,
				getNextFrame, this);
	}
 virtual unsigned maxFrameSize() const// 这个很重要, 如果不设置, 可能导致 getNextFrame() 出现 fMaxSize 小于实际编码帧的情况, 导致图像不完整
{
	 return 100*1024; 
}

private:
	static void getNextFrame (void *ptr)
	{
		((WebcamFrameSource*)ptr)->getNextFrame1();
	}

	void getNextFrame1 ()
	{
		m_started = 0;
			
		int ret = video_encode_test();
		if(ret < 0)
			{
				fprintf(stderr,"-------------------------%d\n",ret);
				closeEncode();
				m_started = 1;
				return;
			} 
   

    fFrameSize=encoder_output_buffer->nFilledLen;
    if(encoder_output_buffer->nOffset > 0)
    {
    	printf("encoder_output_buffer->nOffset=%s\n",encoder_output_buffer->nOffset);
    }
    printf("fMaxSize=%d\n",fMaxSize);
    	if(fFrameSize > 0)
    	{
		    if (fFrameSize > fMaxSize) 
		    {
					fNumTruncatedBytes = fFrameSize - fMaxSize;
					fFrameSize = fMaxSize;
					fprintf(stderr,"fNumTruncatedBytes=%d\n",fNumTruncatedBytes);
				}
				else 
				{
					fNumTruncatedBytes = 0;
				}
			  
    	memmove(fTo, encoder_output_buffer->pBuffer, fFrameSize);
	    m_started = 0;
	    gettimeofday(&fPresentationTime, 0);
	    fDurationInMicroseconds = 100000;
			afterGetting(this);

    	} else
    	{
    		fprintf(stderr,"fFrameSize=%d\n",fFrameSize);		
    	}
	} 
};

class WebcamOndemandMediaSubsession : public OnDemandServerMediaSubsession
{
public:
	static WebcamOndemandMediaSubsession *createNew (UsageEnvironment &env, FramedSource *source)
	{
		return new WebcamOndemandMediaSubsession(env, source);
	}

protected:
	WebcamOndemandMediaSubsession (UsageEnvironment &env, FramedSource *source)
		: OnDemandServerMediaSubsession(env, True) // reuse the first source
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		mp_source = source;
		mp_sdp_line = 0;
	}

	~WebcamOndemandMediaSubsession ()
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		if (mp_sdp_line) free(mp_sdp_line);
	}

private:
	static void afterPlayingDummy (void *ptr)
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		// ok
		WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession*)ptr;
		This->m_done = 0xff;
	}

	static void chkForAuxSDPLine (void *ptr)
	{
		WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession *)ptr;
		This->chkForAuxSDPLine1();
	}

	void chkForAuxSDPLine1 ()
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		if (mp_dummy_rtpsink->auxSDPLine())
			m_done = 0xff;
		else {
			int delay = 100*1000;	// 100ms
			nextTask() = envir().taskScheduler().scheduleDelayedTask(delay,
					chkForAuxSDPLine, this);
		}
	}

protected:
	virtual const char *getAuxSDPLine (RTPSink *sink, FramedSource *source)
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		if (mp_sdp_line) return mp_sdp_line;

		mp_dummy_rtpsink = sink;
		mp_dummy_rtpsink->startPlaying(*source, 0, 0);
		//mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this);
		chkForAuxSDPLine(this);
		m_done = 0;
		envir().taskScheduler().doEventLoop(&m_done);
		mp_sdp_line = strdup(mp_dummy_rtpsink->auxSDPLine());
		mp_dummy_rtpsink->stopPlaying();

		return mp_sdp_line;
	}

	virtual RTPSink *createNewRTPSink(Groupsock *rtpsock, unsigned char type, FramedSource *source)
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		return H264VideoRTPSink::createNew(envir(), rtpsock, type);
	}

	virtual FramedSource *createNewStreamSource (unsigned sid, unsigned &bitrate)
	{
		fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
		bitrate = 500;
		return H264VideoStreamFramer::createNew(envir(), new WebcamFrameSource(envir()));
	}

private:
	FramedSource *mp_source;	// 对应 WebcamFrameSource
	char *mp_sdp_line;
	RTPSink *mp_dummy_rtpsink;
	char m_done;
};


int main (int argc, char **argv)
{
	// env
	TaskScheduler *scheduler = BasicTaskScheduler::createNew();
	_env = BasicUsageEnvironment::createNew(*scheduler);

	// test
	//test(*_env);

	// rtsp server
	RTSPServer *rtspServer = RTSPServer::createNew(*_env, 9554);
	if (!rtspServer) {
		fprintf(stderr, "ERR: create RTSPServer err\n");
		::exit(-1);
	}

	// add live stream
	do {
		WebcamFrameSource *webcam_source = 0;

		//获取摄像头
		ServerMediaSession *sms = ServerMediaSession::createNew(*_env, "webcam", 0, "Session from RaspberryPi Camera"); 
		sms->addSubsession(WebcamOndemandMediaSubsession::createNew(*_env, webcam_source));
		rtspServer->addServerMediaSession(sms);

		char *url = rtspServer->rtspURL(sms);
		*_env << "using url \"" << url << "\"\n";
		delete [] url;
		
	} while (0);

	// run loop
	_env->taskScheduler().doEventLoop();

	return 1;
}

