#include <netinet/in.h>
#include <sys/socket.h>
#include <stdlib.h>

#include <libxml/parser.h>
#include <libxml/xpath.h>

#include "rtpsession.h"
#include "rtpudpv4transmitter.h"
#include "rtpipv4address.h"
#include "rtpsessionparams.h"
#include "rtperrors.h"
#include "rtppacket.h"


#include "com_sct_mycamapp_CamJni.h"
#include "screen.h"

#include "sct/core/common.h"
#include "sct/core/mutex.h"
#include "sct/core/thread.h"
#include "sct/framework/module.h"
#include "sct/framework/object.h"

#include "sct/encode/encodeinterface.h"
#include "sct/decode/decodeinterface.h"

#include "sct/mux/tsmux.h"
#include "sct/demux/tsdemux.h"

#include "sct/rtspserver/rtspserverinterface.h"
#include "sct/rtspclient/rtspclientinterface.h"

#include "sct/httpclient/httpclientinterface.h"

#define SOCKET int
#define SOCKADDR struct sockaddr

#define SCT_TS_AUDIO_AAC_PID	0X200
#define SCT_TS_VIDEO_AVC_PID	0x400

using namespace std;
using namespace jrtplib;
using namespace sct;
using namespace sct::framework;

#define checkerror(rtperr) if(rtperr < 0) SCT_LOGE("%s", RTPGetErrorString(rtperr).c_str())

static const char* gUrlRootPath = "/";
static const char* gLabelConf = "conf";
static const char* gLabelRtsp = "rtsp";
static const char* gLabelUrls = "urls";
static const char* gXMLVersion = "1.0";
static const char* gMethodGet = "GET";
static const char* gMethodPost = "POST";


xmlXPathObjectPtr
getnodeset (xmlDocPtr doc, xmlChar *xpath){
	xmlXPathContextPtr context;
	xmlXPathObjectPtr result;
	context = xmlXPathNewContext(doc);
	if (context == NULL) {
		printf("Error in xmlXPathNewContext\n");
		return NULL;
	}
	result = xmlXPathEvalExpression(xpath, context);
	xmlXPathFreeContext(context);
	if (result == NULL) {
		printf("Error in xmlXPathEvalExpression\n");
		return NULL;
	}
	if(xmlXPathNodeSetIsEmpty(result->nodesetval)){
		xmlXPathFreeObject(result);
		printf("No result\n");
		return NULL;
	}
	return result;
}


enum CamState
{
    CS_Terminated = SCT_ENUM_OFFSET,
    CS_Inited,
    CS_Running,
};

enum PlayerState
{
	PS_Terminated = SCT_ENUM_OFFSET,
	PS_Inited,
	PS_Decoding,
	PS_Stopped,
};


class CPCMData
{
public:
	uint8_t* data;
	int len;
	int max_len;
	sct::CMutex * mutex;
};

class CAVParam
{
public:
	int profile;
	int frequency;
	int channelConfig;
};

class CTsMod;
class CRTSPMod;
class CMediaSource;
class CCamJni: public CEncodeCallBack, public CDecodeCallback, public CThreadCallBack,
	public CObject
{
public:
	RTPSession * mSess;
	uint32_t mStreamId;

    CEncodeInterface * mVideoEncoder;
    CEncodeInterface * mAudioEncoder;
    CEncodeParam mVParam;
    CEncodeParam mAParam;

    int64_t mBaseTime;

	CTsMod * mTsMod;
	CRTSPMod * mRTSPMod;

    CRTSPServerInterface * mRtsp;
    CServerMediaSession * mTsSession;
    CServerMediaSubsession *mTsSubsession;

    CServerMediaSession * mRawSession;
    CServerMediaSubsession * mRawH264Subsession;
    CServerMediaSubsession * mRawAACSubsession;

    ///player
    CMediaSource * mMediaSource;

	sct::CMutex * mPlayerMutex;
	int mPlayerState;

	CThread * mThdVideoDecode;
	CThread * mThdAudioDecode;

	int64_t mStreamTime;
	double mNxtPlayTime;
	///

	CDecodeInterface * mVideoDecoder;
	CDecodeInterface * mAudioDecoder;

	CPCMData * mPcmData;
	int mPcmHungry;

	CHttpClientInterface * mHttpClient;
	uint8_t* mHttpBuff;
	int mHttpBuffMaxSz;

	int mTimerBootup;
	int mTimerPeriod;
	CThread * mObjectLoop;
protected:
	virtual void onGenData(CEncodeInterface * caller,
				const void* buf, const int len,
				const CEncodeInterface::CBlockInfo & info);

	virtual void onGenData(CDecodeInterface* caller,
				const void* buf, const int len,
				const CDecodeInterface::CBlockInfo & info);

	virtual int main(CThread * caller);

	virtual void onTimerTimeout(const int id);
};

static CCamJni * gCamJni = NULL;
class CTsMod:public CTsMuxCallback, public framework::CModule, public CThreadCallBack
{
public:
	CTsMod():
		mOut(new framework::CPin(this)),
		mTsMux(new CTSMux(1)),
		mState(0),
		mProgram(NULL),
		mH264Stream(NULL),
		mAACStream(NULL),
		mThd(NULL)
	{
		mTsMux->addCallback(this);
		mProgram = mTsMux->createProgram(1);
		mH264Stream = mTsMux->createStream(mProgram, CTSMux::ST_H264);
		mAACStream = mTsMux->createStream(mProgram, CTSMux::ST_AAC);
		mTsMux->init();
	}

	virtual ~CTsMod()
	{
		stop();
		SCT_DELETE(mOut);
		SCT_DELETE(mTsMux);
	}
	virtual framework::CPin* getPin(const char* name)
	{
		return mOut;
	}

	void start()
	{
		if(0 != mState)
		{
			return;
		}

		mState = 1;
//		pthread_create(&mThd, NULL, tsMuxThd, this);
		mThd = new CThread("ts mux");
		mThd->addCallback(this);
		mThd->start();
	}

	void stop()
	{
		if(mState != 1)
		{
			return;
		}

		mState = 0;
//		pthread_join(mThd, NULL);
		SCT_DELETE(mThd);
//		mThd = -1;
	}

	void putVideoData(const void * p, const int len, const uint64_t pts0)
	{
		int64_t pts = pts0;
		pts *= 9;
		pts /= 100;
		mTsMux->buildPESPacket(mH264Stream, p, len, pts, pts);
	}

	void putAudioData(const void * p, const int len, const uint64_t pts0)
	{
		int64_t pts = pts0;
		pts *= 9;
		pts /= 100;
		mTsMux->buildPESPacket(mAACStream, p, len, pts, pts);
	}
protected:
	virtual void onTsPacketBuilded(CTSMux*caller, const void * p)
	{
		int64_t pts = currentTime()*1000000;

		mOut->send(p, 188, &pts);
	}

	virtual int main(CThread * caller)
	{
		if(caller == mThd)
		{
			tsMuxThd(this);
		}
		return 0;
	}
private:
	static void* tsMuxThd(void*p0)
	{
		CTsMod*self = (CTsMod*)p0;
		double lastPatPmtTime = 0;
		double lastPcrTime = 0;


		while(self->mState)
		{
			if(currentTime() - lastPatPmtTime > 0.4)
			{
				lastPatPmtTime = currentTime();
				self->mTsMux->buildPATPacket();
				self->mTsMux->buildPMTPacket();
				self->mTsMux->buildNULLPacket();
			}

			if(currentTime() - lastPcrTime > 0.03)
			{
				lastPcrTime = currentTime();

				int64_t pts = currentTime()*1000000-gCamJni->mBaseTime;
				pts *= 9;
				pts /= 100;

				self->mTsMux->buildPCRPacket(self->mProgram, pts);
			}
			else
			{
				usleep(10000);
			}
		}

		return NULL;
	}

private:
	framework::CPin *mOut;
	CTSMux * mTsMux;
	int mState;
	CTSMux::CProgramInfo * mProgram;
	CTSMux::CStreamInfo * mH264Stream;
	CTSMux::CStreamInfo * mAACStream;

	CThread * mThd;
};

class CRTSPMod:public framework::CModule
{
public:
	CRTSPMod():mTsMux(new framework::CPin(this)),
		mRtsp(CRTSPServerInterface::create("rtspserver"))
	{
		mSession = mRtsp->createServerMediaSession("/mod");
		CRTSPServerInterface::CStreamInfo info;
		info.type = CRTSPServerInterface::TS;
		mSubsession = mRtsp->createServerMediaSubsession(mSession, info);
		mRtsp->init();
		SCT_LOGD("stream by mod:%s", mRtsp->getUrl(mSession));

		mBuff = new uint8_t[188*7];
		mIndex = 0;
		mMutex = new CMutex;
	}

	virtual ~CRTSPMod(){SCT_DELETE(mRtsp); SCT_DELETE(mTsMux);
		SCT_DELETE_ARRAY(mBuff); SCT_DELETE(mMutex);}

	virtual framework::CPin* getPin(const char* name)
	{
		return mTsMux;
	}

	std::string getUrl()
	{
		return mRtsp->getUrl(mSession);
	}

protected:
	virtual void onRecv(framework::CPin*pin, const void*data, const int len, const void* arg)
	{
		const int64_t*pts = (const int64_t*)(arg);
		mRtsp->putMediaData(mSubsession, (void*)data, len, *pts);

		///hack
		gCamJni->mRtsp->putMediaData(
				gCamJni->mTsSubsession, (void*)data, len, *pts);

		///send rtppacket, clean recv data
		CAutoLock locker(mMutex);
		memcpy(mBuff + 188*mIndex++, data, len);
		if(7 == mIndex)
		{
			mIndex = 0;

			uint32_t buf = htonl(gCamJni->mStreamId);
			int status = gCamJni->mSess->SendPacketEx(mBuff, 188*7, 33, 1, 4500, 1, &buf, 1);
			checkerror(status);
		}

		gCamJni->mSess->BeginDataAccess();
		if (gCamJni->mSess->GotoFirstSourceWithData())
		{
			do
			{
				RTPPacket *pack;
				while ((pack = gCamJni->mSess->GetNextPacket()) != NULL)
				{
					gCamJni->mSess->DeletePacket(pack);
				}
			} while (gCamJni->mSess->GotoNextSourceWithData());
		}
		gCamJni->mSess->EndDataAccess();
	}

private:
	framework::CPin *mTsMux;

	CRTSPServerInterface* mRtsp;
	CServerMediaSession * mSession;
	CServerMediaSubsession * mSubsession;

	CMutex * mMutex;
	uint8_t * mBuff;
	int mIndex;
};

class CMediaSource:public CRTSPClientCallback, public CTsDemuxCallback, public CThreadCallBack
{
private:
	class CDataItem
	{
	public:
		int64_t pts;
		uint8_t * data;
		uint32_t len;
		uint32_t drop_frame_pre;

		static void freeDataItem(CDataItem * p)
		{
			SCT_DELETE_ARRAY(p->data);
			SCT_DELETE(p);
		}
	};

public:

	virtual void onRecvData(CRTSPClientInterface* caller, const void* d,
				const int len,
				const CRTSPClientInterface::CBlockInfo & info)
	{
		if(info.type == CRTSPServerInterface::TS)
		{
			mTsDemux->demux((void*)d);
		}
		else if(info.type == CRTSPServerInterface::H264)
		{
			putdata(&mVideoData, info.pts, (uint8_t*)d, len, info.drop_frame_pre);
		}
		else if(info.type == CRTSPServerInterface::AAC)
		{

			putdata(&mAudioData, info.pts, (uint8_t*)d, len, info.drop_frame_pre);
		}
		else
		{

		}
	}

	void putdata(std::list<CDataItem*>*li, const int64_t pts, const uint8_t*d, const uint32_t len, const uint32_t dropFramePre)
	{
		CAutoLock locker(mMutex);

		std::list<CDataItem*>*plist = li;
		int64_t maxDelay = 3000000;

		if(!plist->empty())
		{
			int64_t firstpts = plist->front()->pts;

			///fixme: pts value never wrap to 0
			firstpts = pts - firstpts;

			if(firstpts < 0) firstpts = -firstpts;

			if(firstpts > maxDelay)
			{
				SCT_LOGD("%lld %lld", firstpts, maxDelay);
				CDataItem::freeDataItem(plist->front());
				plist->pop_front();
			}
		}

		CDataItem * item = new CDataItem;
		item->data = new uint8_t[len];
		memcpy(item->data, d, len);
		item->len = len;
		item->pts = pts;
		item->drop_frame_pre = 0;

		plist->push_back(item);
	}

	virtual void onRecvPesPacket(CTsDemux* caller, const CTsDemux::CPes * pes)
	{
		std::list<CDataItem*>*plist=NULL;
		if(pes->type == CTSMux::ST_H264)
		{
			plist = &mVideoData;
		}
		else
		{
			plist = &mAudioData;
		}

		int64_t pts = pes->pts*100/9;
		putdata(plist, pts, pes->buf, pes->len, 0);
	}

	bool hasVideoStream(CAVParam * param) {return true;}
	bool hasAudioStream(CAVParam * param) {return true;}

	int64_t video_realTimeDelay()
	{
		CAutoLock locker(mMutex);

		if(mVideoData.empty())
		{
			return -1;
		}


		return mVideoData.back()->pts - mVideoData.front()->pts;// + n)%n;

	}

	int video_getData(int64_t * pts, uint8_t*d, const uint32_t max, uint32_t * dropFramePre, int * ntrunc)
	{
		CAutoLock locker(mMutex);

		if(mVideoData.empty())
		{
			return -1;
		}

		CDataItem * item = mVideoData.front();
		int ret = item->len;

		if(ret > max)
		{
			ret = max;
		}

		if(ntrunc)
		{
			*ntrunc = item->len - ret;
		}

		if(pts)
		{
			*pts = item->pts;
		}


		if(d)
		{
			memcpy(d, item->data, ret);
		}

		if(dropFramePre)
		{
			*dropFramePre = item->drop_frame_pre;
		}

		CDataItem::freeDataItem(item);
		mVideoData.pop_front();

		return ret;
	}

	int video_peekPTS(int64_t * pts)
	{
		CAutoLock locker(mMutex);

		*pts = 0;

		if(mVideoData.empty())
		{
			return -1;
		}

		*pts = mVideoData.front()->pts;

		return 0;
	}

	bool video_hasBeenSync() {return true;}

	int64_t audio_realTimeDelay()
	{
		CAutoLock locker(mMutex);

		if(mAudioData.empty())
		{
			return -1;
		}

		return mAudioData.back()->pts - mAudioData.front()->pts;

	}

	int audio_getData(int64_t * pts, uint8_t*d, const uint32_t max, uint32_t * dropFramePre, int * ntrunc)
	{
		CAutoLock locker(mMutex);

		if(mAudioData.empty())
		{
			return -1;
		}

		CDataItem * item = mAudioData.front();
		int ret = item->len;

		if(ret > max)
		{
			ret = max;
		}

		if(ntrunc)
		{
			*ntrunc = item->len - ret;
		}

		if(pts)
		{
			*pts = item->pts;
		}


		if(d)
		{
			memcpy(d, item->data, ret);
		}

		if(dropFramePre)
		{
			*dropFramePre = item->drop_frame_pre;
		}

		CDataItem::freeDataItem(item);
		mAudioData.pop_front();

		return ret;
	}

	int audio_peekPTS(int64_t * pts)
	{
		CAutoLock locker(mMutex);

		*pts = 0;

		if(mAudioData.empty())
		{
			return -1;
		}

		*pts = mAudioData.front()->pts;

		return 0;
	}

	bool audio_hasBeenSync(){return true;}

	void setRtspClientUrl(const char * url0)
	{
		std::string url = url0;

		url = gCamJni->mRTSPMod->getUrl();
		//url = gCamJni->_rtsp->getUrl(gCamJni->mTsSession);
		//url = gCamJni->_rtsp->getUrl(gCamJni->mRawSession);

		SCT_LOGD("play url:%s", url.c_str());
		mRTSPClient->setServerUrl(url.c_str());
	}

	CMediaSource():mRTSPClient(NULL), mTsDemux(NULL), mMutex(NULL),
			mThd(NULL), mState(0)
	{
		mMutex = new CMutex;

		mRTSPClient = CRTSPClientInterface::create("rtspclient");
		mRTSPClient->setServerUrl(gCamJni->mRTSPMod->getUrl().c_str());
		mRTSPClient->addCallback(this);

		mTsDemux = new CTsDemux;
		mTsDemux->addCallback(this);

		mState = 1;
//		pthread_create(&mThd, NULL, rtsp, this);
		mThd = new CThread("rtsp recv thread");
		mThd->addCallback(this);
		mThd->start();
	}

	virtual ~CMediaSource()
	{
		if(1 == mState)
		{
			mState = 0;
//			pthread_join(mThd, NULL);
			SCT_DELETE(mThd);
		}

		SCT_DELETE(mMutex);
		while(!mVideoData.empty())
		{
			CDataItem*p = mVideoData.front();
			mVideoData.pop_front();

			CDataItem::freeDataItem(p);
		}

		while(!mAudioData.empty())
		{
			CDataItem* p = mAudioData.front();
			mAudioData.pop_front();

			CDataItem::freeDataItem(p);
		}

		SCT_DELETE(mTsDemux);
		SCT_DELETE(mRTSPClient);
	}

protected:
	virtual int main(CThread * caller)
	{
		if(caller == mThd)
		{
			rtsp(this);
		}

		return 0;
	}
private:
	static void* rtsp(void* p)
	{
		CMediaSource*self = (CMediaSource*)p;

		while(self->mState)
		{
			self->mRTSPClient->demux();
		}

		return NULL;
	}

private:
	CRTSPClientInterface * mRTSPClient;
	CTsDemux * mTsDemux;

	std::list<CDataItem*> mVideoData;
	std::list<CDataItem*> mAudioData;
	CMutex* mMutex;

	CThread *  mThd;
	int mState;
};


void CCamJni::onGenData(CEncodeInterface * caller,
				const void* buf, const int len,
				const CEncodeInterface::CBlockInfo & info)
{
	if(caller == mVideoEncoder)
	{
		gCamJni->mTsMod->putVideoData(buf, len, info.pts);

		gCamJni->mRtsp->putMediaData(gCamJni->mRawH264Subsession,
				(void*)buf, len, info.pts);

	}
	else if(caller == mAudioEncoder)
	{
		gCamJni->mTsMod->putAudioData(buf, len, info.pts);

		gCamJni->mRtsp->putMediaData(gCamJni->mRawAACSubsession,
				(void*)buf, len, info.pts);

	}
	else
	{
		SCT_LOGD();
	}
}

void CCamJni::onGenData(CDecodeInterface* caller,
			const void* buf, const int len,
			const CDecodeInterface::CBlockInfo & info)
{
	if(caller == mVideoDecoder)
	{
		sct_screen_putdata(buf, len, info.width, info.height);
	}
	else if(caller == mAudioDecoder)
	{
		CPCMData * pin = gCamJni->mPcmData;

		CAutoLock locker(pin->mutex);

		if(pin->len + len < pin->max_len)
		{
			memcpy(pin->data+pin->len, buf, len);
			pin->len += len;
		}
		else
		{
			SCT_LOGD("lost audio data");
		}
	}
	else
	{

	}
}

JNIEXPORT void JNICALL Java_com_sct_mycamapp_CamJni_create
  (JNIEnv *, jclass, jint w, jint h, jint fps, jint num)
{
    if(gCamJni)
    {
        return;
    }

    CEncodeInterface::initlib();
    CDecodeInterface::initlib();
    CHttpClientInterface::initlib();

    gCamJni = new CCamJni;

    gCamJni->mHttpBuffMaxSz = 4096;

    gCamJni->mHttpBuff = new uint8_t[gCamJni->mHttpBuffMaxSz];
    bzero(gCamJni->mHttpBuff, gCamJni->mHttpBuffMaxSz);

    gCamJni->mHttpClient = CHttpClientInterface::create("httpclient");

    int res = 0;

    ///gen req xml, to get stream id
	xmlNodePtr root, node, sub;
	xmlDocPtr doc;
	xmlChar *xmlbuff;
	int buffersize;

	doc = xmlNewDoc(BAD_CAST gXMLVersion);
	root = xmlNewNode(NULL, BAD_CAST gLabelConf);
	xmlDocSetRootElement(doc, root);

	///build body
	node = xmlNewNode(NULL, BAD_CAST "api");
	xmlAddChild(root, node);

	sub = xmlNewNode(NULL, BAD_CAST "mediacenter");
	xmlAddChild(node, sub);

	node = sub;
	sub = xmlNewNode(NULL, BAD_CAST "filestorages");
	xmlAddChild(node, sub);

	node = sub;
	sub = xmlNewNode(NULL, BAD_CAST "filestorage");
	xmlNewProp(sub, BAD_CAST "machine", BAD_CAST "20160527a001");
	xmlNewProp(sub, BAD_CAST "streammajor", BAD_CAST "front");

	///get time
	struct timeval tv;
	gettimeofday(&tv,0);

	time_t rawtime = tv.tv_sec;

	struct tm timeinfo;
	localtime_r(&rawtime, &timeinfo);

	char buffer [80];
	strftime (buffer, 80, "%y-%m-%d_%T", &timeinfo);

	char buf[80];
	sprintf(buf, "%s.%03d", buffer, (int)(tv.tv_usec/1000));

	xmlNewProp(sub, BAD_CAST "streamminor", BAD_CAST buf);

	xmlAddChild(node, sub);

	xmlDocDumpFormatMemory(doc, &xmlbuff, &buffersize, 1);

	SCT_LOGD("snd:%d:%s", buffersize, xmlbuff);
	res = gCamJni->mHttpClient->post("http://192.168.1.67:8088/api/mediacenter/filestorages",
				xmlbuff,
				buffersize,
	    		gCamJni->mHttpBuff,
				gCamJni->mHttpBuffMaxSz);
	SCT_LOGD("ret:%d:%s", res, gCamJni->mHttpBuff);

	xmlFree(xmlbuff);
	xmlFreeDoc(doc);

	///try to get addr
	doc = xmlReadMemory((const char*)gCamJni->mHttpBuff, gCamJni->mHttpBuffMaxSz,
			"noname.xml", NULL, 0);

	///get stream id
	int id = -1;

	xmlXPathObjectPtr result;
	xmlNodeSetPtr nodeset;
	int i;
	xmlChar *value;
	result = getnodeset (doc, (xmlChar*)"/conf/api/mediacenter/filestorages/filestorage");
	if(result)
	{
		nodeset = result->nodesetval;
		for (i=0; i < nodeset->nodeNr; i++) {
			value = xmlNodeGetContent(nodeset->nodeTab[i]);

			id = atoi((char*)value);

			xmlFree(value);
		}

		xmlXPathFreeObject (result);
	}

	xmlFreeDoc(doc);

	char * strUrl = new char[1000];

	sprintf(strUrl, "http://192.168.1.67:8088/api/mediacenter/filestorages/streamid/%d/addr", id);
	res = gCamJni->mHttpClient->get(strUrl,
			gCamJni->mHttpBuff,
			gCamJni->mHttpBuffMaxSz);

	SCT_LOGD("ret:%d:%s", res, gCamJni->mHttpBuff);
	SCT_DELETE_ARRAY(strUrl);

	///now parse ip, port
	doc = xmlReadMemory((const char*)gCamJni->mHttpBuff, gCamJni->mHttpBuffMaxSz,
			"noname.xml", NULL, 0);

	std::string ip;
	result = getnodeset (doc, (xmlChar*)"/conf/api/mediacenter/filestorages/stream/addr/ip");
	if(result)
	{
		nodeset = result->nodesetval;
		for (i=0; i < nodeset->nodeNr; i++) {
			value = xmlNodeGetContent(nodeset->nodeTab[i]);

			ip = (char*)value;

			xmlFree(value);
		}

		xmlXPathFreeObject (result);
	}

	int port = 0;
	result = getnodeset (doc, (xmlChar*)"/conf/api/mediacenter/filestorages/stream/addr/port");
	if(result)
	{
		nodeset = result->nodesetval;
		for (i=0; i < nodeset->nodeNr; i++) {
			value = xmlNodeGetContent(nodeset->nodeTab[i]);

			port = atoi((char*)value);

			xmlFree(value);
		}

		xmlXPathFreeObject (result);
	}

	SCT_LOGD("target info:%s %d %d", ip.c_str(), port , id);
	xmlFreeDoc(doc);

	///now create rtp session
	RTPUDPv4TransmissionParams transparams;
	RTPSessionParams sessparams;

	sessparams.SetOwnTimestampUnit(1.0/90000);

	sessparams.SetAcceptOwnPackets(false);
	transparams.SetPortbase(4400);

	gCamJni->mSess = new RTPSession;
	int status = gCamJni->mSess->Create(sessparams, &transparams);
	checkerror( status );

	uint8_t aip[4];
	sscanf(ip.c_str(), "%hhu.%hhu.%hhu.%hhu",
			aip + 0,
			aip + 1,
			aip + 2,
			aip + 3);

    RTPIPv4Address addr(aip, port);

    status = gCamJni->mSess->AddDestination(addr);
    checkerror(status);
    gCamJni->mStreamId = id;
    /////////////
    gCamJni->mVideoEncoder = CEncodeInterface::create("encode", CT_H264);

    gCamJni->mVParam.bitrate = 1000000;
    gCamJni->mVParam.fps = fps;
    gCamJni->mVParam.gopsize = 15;
    gCamJni->mVParam.height = h;
    gCamJni->mVParam.width = w;
    gCamJni->mVParam.max_b_frames = 0;

    gCamJni->mVideoEncoder->init(gCamJni->mVParam);
    gCamJni->mVideoEncoder->addCallback(gCamJni);

    gCamJni->mAudioEncoder = CEncodeInterface::create("encode", CT_AAC);

    gCamJni->mAParam.bitrate = 64000;
    gCamJni->mAParam.channel_layout = PCM_CH_STEREO;
    gCamJni->mAParam.sample_fmt = PCM_FMT_S16;
    gCamJni->mAParam.sample_num_in_frame = num;
    gCamJni->mAParam.sample_rate = 44100;

    gCamJni->mAudioEncoder->init(gCamJni->mAParam);
    gCamJni->mAudioEncoder->addCallback(gCamJni);

    gCamJni->mBaseTime = currentTime()*1000000;

    ////////////////
    gCamJni->mTsMod = new CTsMod();
    gCamJni->mRTSPMod = new CRTSPMod();

    gCamJni->mTsMod->getPin(NULL)->addRecver(
    		gCamJni->mRTSPMod->getPin(NULL));

	gCamJni->mRtsp = CRTSPServerInterface::create("rtspserver");
    gCamJni->mTsSession = gCamJni->mRtsp->createServerMediaSession("/ts");
    CRTSPServerInterface::CStreamInfo tsInfo;
    tsInfo.type = CRTSPServerInterface::TS;
    gCamJni->mTsSubsession = gCamJni->mRtsp->createServerMediaSubsession(
    		gCamJni->mTsSession, tsInfo);

    gCamJni->mRawSession = gCamJni->mRtsp->createServerMediaSession("/raw");

    CRTSPServerInterface::CStreamInfo h264Info;
    h264Info.type = CRTSPServerInterface::H264;
    gCamJni->mRawH264Subsession = gCamJni->mRtsp->createServerMediaSubsession(
    		gCamJni->mRawSession, h264Info);

    CRTSPServerInterface::CStreamInfo aacInfo;
    aacInfo.type = CRTSPServerInterface::AAC;
    gCamJni->mRawAACSubsession = gCamJni->mRtsp->createServerMediaSubsession(
    		gCamJni->mRawSession, aacInfo);

    gCamJni->mRtsp->init();

    SCT_LOGD("stream by ts:%s", gCamJni->mRtsp->getUrl(gCamJni->mTsSession));
    SCT_LOGD("stream by raw:%s", gCamJni->mRtsp->getUrl(gCamJni->mRawSession));

    ///////////
    gCamJni->mPcmData = new CPCMData;

    gCamJni->mPcmData->max_len = 10000*4*2;
    gCamJni->mPcmData->data = new uint8_t[gCamJni->mPcmData->max_len];
    gCamJni->mPcmData->mutex = new CMutex;
    gCamJni->mPcmData->len = 0;

    gCamJni->mPcmHungry = 0;

    gCamJni->mPlayerMutex = new CMutex;
    gCamJni->mPlayerState = PS_Terminated;

    gCamJni->mVideoDecoder = NULL;
    gCamJni->mAudioDecoder = NULL;

    ////
    gCamJni->mTsMod->start();

    CObject::initlib();
    gCamJni->mTimerBootup = gCamJni->addTimer(3000, CObject::TT_OneShot);
    gCamJni->mObjectLoop = new CThread("object_loop");
    gCamJni->mObjectLoop->addCallback(gCamJni);
    gCamJni->mObjectLoop->start();
}


void CCamJni::onTimerTimeout(const int id)
{
//	SCT_LOGD("timer:%d", id);

	if(id == mTimerBootup)
	{
		mTimerPeriod = addTimer(1000, TT_Periodic);
	}
	else if(id == mTimerPeriod)
	{
		///keepalive

		char * strUrl = new char[1000];

		sprintf(strUrl, "http://192.168.1.67:8088/api/mediacenter/filestorages/streamid/%d/status", gCamJni->mStreamId);
		int res = gCamJni->mHttpClient->get(strUrl,
				gCamJni->mHttpBuff,
				gCamJni->mHttpBuffMaxSz);

		if(res > 0){gCamJni->mHttpBuff[res] = '\0';}
//		SCT_LOGD("ret:%d :%s", res, gCamJni->mHttpBuff);
		SCT_DELETE_ARRAY(strUrl);
	}
}


enum PlayBufferState
{
	PBS_DropAndLookForI = SCT_ENUM_OFFSET,
	PBS_Buffering,
	PBS_Playing,
};

static void* decode_thread_audio(void* arg)
{
	uint8_t * buf = new uint8_t[2000000];

	int tAudioBufferTime = 260000;

	int shouldprint = 1;

	int bAudioBufferNeedReset = 1;
	int syncAudioTovideo = 0;

	double & nxtPlayTime = gCamJni->mNxtPlayTime;
	int64_t & streamTime = gCamJni->mStreamTime;

	while(0 != streamTime)
	{
		usleep(100000);
	}

	while(PS_Decoding == gCamJni->mPlayerState)
	{
		if(gCamJni->mMediaSource->hasAudioStream(NULL) && (!gCamJni->mMediaSource->audio_hasBeenSync()) )
		{
			gCamJni->mMediaSource->audio_getData(NULL, NULL, 100000, NULL, NULL);
			bAudioBufferNeedReset = 1;
			usleep(100000);
			continue;
		}

		if(bAudioBufferNeedReset)
		{
			bAudioBufferNeedReset = 0;

			while(gCamJni->mMediaSource->audio_getData(NULL, NULL, 100000, NULL, NULL)>0)
			{
				;
			}
		}

		///test audio
		int64_t audio_delay = gCamJni->mMediaSource->audio_realTimeDelay();
		if(audio_delay > 2*tAudioBufferTime)
		{
			SCT_LOGD("reset audio buffer:%lld", audio_delay);
			while(gCamJni->mMediaSource->audio_getData(NULL, NULL, 100000, NULL, NULL)>0)
			{
				;
			}
		}

		if( (!syncAudioTovideo) && audio_delay < tAudioBufferTime )
		{
			if(shouldprint)
			{
				SCT_LOGD("===========================>delay play audio!:%lld %d", audio_delay, tAudioBufferTime);
			}
			shouldprint = 0;
		}
		else
		{
			if(0 == shouldprint)
			{
				SCT_LOGD("=========================>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>==>now play audio!:%lld %d", audio_delay, tAudioBufferTime);
			}
			shouldprint = 1;

			if(gCamJni->mPcmData->len < gCamJni->mPcmData->max_len/2)
			{
				int64_t pts;
//				int canPlay = 1;
				bool hasSync = gCamJni->mMediaSource->audio_hasBeenSync();
				int64_t pts2;
				uint32_t dflag;
				int ret2 = -1;

				while(1)
				{
					int ret = gCamJni->mMediaSource->audio_peekPTS(&pts);
					if(ret >= 0)
					{
						int64_t syncdiff = pts - streamTime;
						if(streamTime && (syncdiff < -400000))
						{
							SCT_LOGD("aac audio pkg too late, drop it:stream:%lld %lld", streamTime, syncdiff);
							while(gCamJni->mMediaSource->audio_getData(&pts, NULL, 100000, NULL, NULL)>0)
							{
								;
							}

							syncAudioTovideo = 0;
						}
						else
						{
							syncAudioTovideo = 1;
							break;
						}
					}
					else
					{
						break;
					}
				}

				if(1 == syncAudioTovideo)
				{
					ret2 = gCamJni->mMediaSource->audio_getData(&pts2, buf, 2000000, &dflag, NULL);;
					//ret2 = -1;
					if(ret2 > 0)
					{

						if(dflag)
						{
							SCT_LOGD("network drop pakcet>>>>>>d:%lld, sync:%d, data:%d %lld %d",
									audio_delay,
									hasSync,
									ret2, pts2, dflag);
						}

						gCamJni->mAudioDecoder->decode(buf, ret2, 0);
					}
					else
					{
						usleep(10000);
					}
				}
			}

		}
	}

	SCT_DELETE(buf);

	return NULL;
}

static void* decode_thread(void* arg)
{
	uint8_t * buf = new uint8_t[2000000];
	uint8_t * bufAdj = new uint8_t[2000000];
	int len;
	int64_t pts;
	uint32_t dropFramePre;

	int state = PBS_DropAndLookForI;

	double & nxtPlayTime = gCamJni->mNxtPlayTime;
	int64_t & streamTime = gCamJni->mStreamTime;

	///reset to default, sync to audio thread.
	nxtPlayTime = 0;
	streamTime = 0;

	int tBaseBufferTime =   90000;
	int tMaxBufferTime =  1000000;


	SCT_LOGD("base time:%d    max time:%d", tBaseBufferTime, tMaxBufferTime);

	int bVideoBufferNeedReset = 1;

	while(PS_Decoding == gCamJni->mPlayerState)
	{

		if(gCamJni->mMediaSource->hasVideoStream(NULL) && (!gCamJni->mMediaSource->video_hasBeenSync()) )
		{
			gCamJni->mMediaSource->video_getData(NULL, NULL, 1000000, NULL, NULL);
			bVideoBufferNeedReset = 1;
			usleep(100000);
			continue;
		}



		if(bVideoBufferNeedReset)
		{
			bVideoBufferNeedReset = 0;

			while(gCamJni->mMediaSource->video_getData(NULL, NULL, 1000000, NULL, NULL)>0)
			{
				;
			}
		}

        int64_t rtDelay = gCamJni->mMediaSource->video_realTimeDelay();

        if(rtDelay<-1)
        {
        	gCamJni->mMediaSource->video_getData(NULL, NULL, 1000000, NULL, NULL);
        	state = PBS_DropAndLookForI;
        	SCT_LOGD("%lld", rtDelay);
        }

        if(PBS_DropAndLookForI == state && rtDelay > tBaseBufferTime)
        {
		   len = gCamJni->mMediaSource->video_getData(&pts, buf, 2000000, NULL, NULL);
		   if(len > 0)
		   {
			   memcpy(bufAdj, buf, len);

			   int nalu_type = (* (bufAdj+4) )&0x1f;
			   if( 7 == nalu_type || 8 == nalu_type || 5 == nalu_type )
			   {
				   state = PBS_Buffering;

				   try
				   {
					   gCamJni->mVideoDecoder->decode(bufAdj, len, 0);
				   }
				   catch(int n)
				   {
					   //ignore
				   }

				   ///sync
				   nxtPlayTime = sct::currentTime() + 0.02;
				   gCamJni->mMediaSource->video_peekPTS(&streamTime);

				   continue;///if you not, will cause frame lost.
			   }
		   }

		}

        if(PBS_Buffering == state)
        {
            if(rtDelay > tBaseBufferTime)
            {

                state = PBS_Playing;
            }
        }

        if(PBS_Playing == state && rtDelay < 10000)
        {
            state = PBS_Buffering;
        }

        if(PBS_Playing == state && rtDelay > tMaxBufferTime) {
            //drop some frame. cause the decode is so slow
            SCT_LOGD("XXXX..........start.......  .dropframe...............:%lld", rtDelay);
            state = PBS_DropAndLookForI;
        }


        if(PBS_Playing == state)
        {
            double cur = sct::currentTime();
            if (cur > nxtPlayTime ) {
                len = gCamJni->mMediaSource->video_getData(&pts, buf, 2000000, &dropFramePre, NULL);
                if(len > 0)
                {
                    if(dropFramePre)
                    {
                        state = PBS_DropAndLookForI;
                        SCT_LOGD("XXXX......................lost frame detected!");
                        continue;
                    }

                    ///to decode one frame
                    memcpy(bufAdj, buf, len);

                    streamTime = pts;

                    ///decode one frame
                    try
                    {
                    	gCamJni->mVideoDecoder->decode(bufAdj, len, 0);
                    }
                    catch (int n)
                    {
                    	///ignore
                    }

                    //calculate next systime
                    int64_t nxtPTS;
                    int ret;
                    ret = gCamJni->mMediaSource->video_peekPTS(&nxtPTS);
                    if (ret < 0) {
                        //force play out
                        SCT_LOGD("no next data, try to buffering...");
                        state = PBS_Buffering;
                        //no need to update sysTime and stream Time.
                    }
                    else {
                        nxtPTS -= streamTime;
                        double d = nxtPTS / 1000000.0;

                        if (d < 0) {
                            //fixme: should has better way
                            SCT_LOGD("XXX...................error, may cause by pts wrap..., try to drop packet:%lld %lld %f",
                            		nxtPTS, streamTime, d);
                            state = PBS_DropAndLookForI;
                        }

                        if( d > 0.5)
                        {
                            SCT_LOGD("D is too big:%d, some thing wrong?", d);
                            d = 0.04;
                        }

                        if(d > 0.02 && rtDelay > tBaseBufferTime + 200000)
                        {
                        	double d1 = d;
                        	d -= 0.005;
                        	SCT_LOGD("---------->%f %f", d1, d);

                        }


                        nxtPlayTime = nxtPlayTime + d;
                    }
                }
            }
        }

	}

    SCT_DELETE_ARRAY(buf);
    SCT_DELETE_ARRAY(bufAdj);

	return NULL;
}


JNIEXPORT void JNICALL Java_com_sct_mycamapp_CamJni_start
  (JNIEnv *env, jclass, jstring url)
{
	SCT_LOGD();

	CAutoLock locker(gCamJni->mPlayerMutex);

	if(PS_Terminated != gCamJni->mPlayerState) return;

	gCamJni->mMediaSource = new CMediaSource;

	const char * str = env->GetStringUTFChars(url, 0);
	gCamJni->mMediaSource->setRtspClientUrl(str);
	env->ReleaseStringUTFChars(url, str);

	gCamJni->mVideoDecoder = CDecodeInterface::create("decode", CT_H264);
	gCamJni->mAudioDecoder = CDecodeInterface::create("decode", CT_AAC);

	gCamJni->mVideoDecoder->init();
	gCamJni->mAudioDecoder->init();

	gCamJni->mVideoDecoder->addCallback(gCamJni);
	gCamJni->mAudioDecoder->addCallback(gCamJni);

	gCamJni->mPlayerState = PS_Inited;
	gCamJni->mPlayerState = PS_Decoding;

	gCamJni->mNxtPlayTime = -1;
	gCamJni->mStreamTime = -1;

//	pthread_create(&gCamJni->mThdVideoDecode, NULL, decode_thread, NULL);
	gCamJni->mThdVideoDecode = new CThread("video decode thread");
	gCamJni->mThdVideoDecode->addCallback(gCamJni);
	gCamJni->mThdVideoDecode->start();

	gCamJni->mThdAudioDecode = new CThread("audio decode thread");
	gCamJni->mThdAudioDecode->addCallback(gCamJni);
	gCamJni->mThdAudioDecode->start();
}

JNIEXPORT void JNICALL Java_com_sct_mycamapp_CamJni_stop
  (JNIEnv *, jclass)
{
	SCT_LOGD();

	CAutoLock locker(gCamJni->mPlayerMutex);
	if(PS_Terminated == gCamJni->mPlayerState) return;

	gCamJni->mPlayerState = PS_Stopped;

//	pthread_join(gCamJni->mThdAudioDecode, NULL);
	SCT_DELETE(gCamJni->mThdAudioDecode);
//	pthread_join(gCamJni->mThdVideoDecode, NULL);
	SCT_DELETE(gCamJni->mThdVideoDecode);

	SCT_DELETE(gCamJni->mVideoDecoder);
	SCT_DELETE(gCamJni->mAudioDecoder);

	SCT_DELETE(gCamJni->mMediaSource);

	gCamJni->mPlayerState = PS_Terminated;
}


JNIEXPORT void JNICALL Java_com_sct_mycamapp_CamJni_destroy
  (JNIEnv *, jclass)
{
	if(gCamJni)
	{
		SCT_LOGD("should never destroy it!!!");
	}
}

JNIEXPORT jint JNICALL Java_com_sct_mycamapp_CamJni_getpcmdata
  (JNIEnv * env, jclass, jbyteArray buf0, jint)
{
	jint ret = 0;

	CAutoLock locker(gCamJni->mPcmData->mutex);

	int shoudprint = 1;

	while(1)
	{
		if(0 == gCamJni->mPcmData->len)
		{
			locker.unlock();
			usleep(10000);
			locker.relock();

			if(shoudprint)
			{
				shoudprint = 0;
			}

			gCamJni->mPcmHungry = 1;

			continue;
		}
		gCamJni->mPcmHungry = 0;

		if(0 == shoudprint)
		{
			shoudprint = 1;
		}

		jbyte* p = env->GetByteArrayElements(buf0, 0);

		ret = gCamJni->mPcmData->len;
		memcpy(p, gCamJni->mPcmData->data, ret);

		env->ReleaseByteArrayElements(buf0, p, 0);

		gCamJni->mPcmData->len = 0;
		break;
	}

	return ret;
}


JNIEXPORT void JNICALL Java_com_sct_mycamapp_CamJni_onPCMFrame
  (JNIEnv * env, jclass, jbyteArray buf0, jint len)
{
	jbyte* p = env->GetByteArrayElements(buf0, 0);

	int64_t pts = currentTime()*1000000;
	pts -= gCamJni->mBaseTime;

	gCamJni->mAudioEncoder->encodeOneFrame(pts, p, len);

	env->ReleaseByteArrayElements(buf0, p, 0);
}

JNIEXPORT void JNICALL Java_com_sct_mycamapp_CamJni_onFrame
  (JNIEnv * env, jclass, jbyteArray buf0)
{
	jbyte* p = env->GetByteArrayElements(buf0, 0);

	int64_t pts = currentTime()*1000000;
	pts -= gCamJni->mBaseTime;

	gCamJni->mVideoEncoder->encodeOneFrame(pts, p, 0);

	env->ReleaseByteArrayElements(buf0, p, 0);

}

JNIEXPORT jstring JNICALL Java_com_sct_mycamapp_CamJni_getURL
  (JNIEnv * env, jclass)
{
	std::string url = gCamJni->mRTSPMod->getUrl();
	SCT_LOGD("url:%s",url.c_str());
	return env->NewStringUTF(url.c_str());
}

int CCamJni::main(CThread * caller)
{
	if(caller == mThdVideoDecode)
	{
		decode_thread(this);
	}
	else if(caller == mThdAudioDecode)
	{
		decode_thread_audio(this);
	}
	else if(caller == mObjectLoop)
	{
		while(1)
		{
			CObject::tick();
			usleep(10000);
		}
	}
	return 0;
}
