/**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)

This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
more details.

You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
**********/
// "liveMedia"
// Copyright (c) 1996-2010 Live Networks, Inc.  All rights reserved.
// RTP sink for H.264 video (RFC 3984)
// Implementation

#include "MyH264VideoRTPSink.hh"
#include "MyH264VideoStreamFramer.hh"
#include <sys/ioctl.h>
#include <basetypes.h>
#include "iav_drv.h"
#include "iav_drv_ex.h"
#include "../debug/debug.h"


////////// H264VideoRTPSink implementation //////////

MyH264VideoRTPSink
::MyH264VideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs,
			 unsigned char rtpPayloadFormat,
			 unsigned profile_level_id,
			 char const* sprop_parameter_sets_str)
	: VideoRTPSink(env, RTPgs, rtpPayloadFormat, 90000, "H264"),
	  fOurFragmenter(NULL),fFmtpSDPLine(NULL) {
	// Set up the "a=fmtp:" SDP line for this stream:
 /* :TODO:2014/9/18 14:36:11:Sean: amba s opened by Sean set sdp sps in init */
#if 0
  char const* fmtpFmt =
		"a=fmtp:%d packetization-mode=1"
		";profile-level-id=%06X"
		";sprop-parameter-sets=%s\r\n";
  unsigned fmtpFmtSize = strlen(fmtpFmt)
		+ 3 /* max char len */
		+ 8 /* max unsigned len in hex */
		+ strlen(sprop_parameter_sets_str);
  char* fmtp = new char[fmtpFmtSize];
  sprintf(fmtp, fmtpFmt,
				  rtpPayloadFormat,
	  profile_level_id,
				  sprop_parameter_sets_str);
  fFmtpSDPLine = strDup(fmtp);
  delete[] fmtp;
#endif
 /* :TODO:End---  */
}

MyH264VideoRTPSink::~MyH264VideoRTPSink()
{
	delete[] fFmtpSDPLine;
	Medium::close(fOurFragmenter);
	fSource = NULL;
}

MyH264VideoRTPSink*
MyH264VideoRTPSink::createNew(UsageEnvironment& env, Groupsock* RTPgs,
				  unsigned char rtpPayloadFormat,
				  unsigned profile_level_id,
				  char const* sprop_parameter_sets_str) {
  return new MyH264VideoRTPSink(env, RTPgs, rtpPayloadFormat,
					  profile_level_id, sprop_parameter_sets_str);
}

Boolean MyH264VideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) {
	// Our source must be an appropriate framer:
  return source.isH264VideoStreamFramer();
}

Boolean MyH264VideoRTPSink::continuePlaying() {
	// First, check whether we have a 'fragmenter' class set up yet.
	// If not, create it now:
  if (fOurFragmenter == NULL) {
      PRT_DBG("maxSize[%d]\n", OutPacketBuffer::maxSize);
	  fOurFragmenter = new MyH264FUAFragmenter(envir(), fSource, OutPacketBuffer::maxSize,
						 ourMaxPacketSize() - 12/*RTP hdr size*/);
	  PRT_DBG("MyH264VideoRTPSink::continuePlaying\n");	//jay
	  fSource = fOurFragmenter;
	}
	
	// Then call the parent class's implementation:
  return MultiFramedRTPSink::continuePlaying();
}

void MyH264VideoRTPSink::stopPlaying() {
	// First, call the parent class's implementation, to stop our fragmenter object
	// (and its source):
  MultiFramedRTPSink::stopPlaying();

	// Then, close our 'fragmenter' object:
  Medium::close(fOurFragmenter); fOurFragmenter = NULL;
  fSource = NULL;
}

void MyH264VideoRTPSink::doSpecialFrameHandling(unsigned /*fragmentationOffset*/,
							  unsigned char* /*frameStart*/,
							  unsigned /*numBytesInFrame*/,
							  struct timeval frameTimestamp,
							  unsigned /*numRemainingBytes*/) {
	// Set the RTP 'M' (marker) bit iff
	// 1/ The most recently delivered fragment was the end of
	//		(or the only fragment of) an NAL unit, and
	// 2/ This NAL unit was the last NAL unit of an 'access unit' (i.e. video frame).
  if (fOurFragmenter != NULL) {
	  MyH264VideoStreamFramer* framerSource
			= (MyH264VideoStreamFramer*)(fOurFragmenter->inputSource());
		// This relies on our fragmenter's source being a "MPEG4VideoStreamFramer".
	  if (fOurFragmenter->lastFragmentCompletedNALUnit()
	&& framerSource != NULL && framerSource->currentNALUnitEndsAccessUnit()) {
		  setMarkerBit();
		}
	}

	setTimestamp(frameTimestamp);
//	printf("@@@pts %d %d\n", frameTimestamp.tv_sec, frameTimestamp.tv_usec);	//jay
}

Boolean MyH264VideoRTPSink
::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/,
				 unsigned /*numBytesInFrame*/) const {
  return False;
}
//get sdp
char const* MyH264VideoRTPSink::auxSDPLine() {
    FUN_IN();
	if (fFmtpSDPLine == NULL) {
		MyH264VideoStreamFramer* framerSource = (MyH264VideoStreamFramer*)(fOurFragmenter->inputSource());
		char profile_level_id[3];//Sean KEY
		while (framerSource->getSPS()==NULL || framerSource->getPPS() == NULL) {
          
            PRT_DBG("sps or pps is NULL framerSource->getSPS()[%s]\n", framerSource->getSPS());

			framerSource->fBitstreamParser->parse();
		}
		memcpy(profile_level_id, framerSource->getProfileLevelID(),3);
        
        PRT_DBG();
        
		char* sprop_parameter_sets_str = new char[512];
		sprintf(sprop_parameter_sets_str, "%s,%s", framerSource->getSPS(),framerSource->getPPS());

		char const* fmtpFmt =
			"a=fmtp:%d packetization-mode=1"
			"; profile-level-id=%02X%02X%02X"
			"; sprop-parameter-sets=%s\r\n";
		unsigned fmtpFmtSize = strlen(fmtpFmt)
			+ 3 /* max char len */
			+ 8 /* max unsigned len in hex */
			+ strlen(sprop_parameter_sets_str);
		char* fmtp = new char[fmtpFmtSize];
		sprintf(fmtp, fmtpFmt,
			rtpPayloadType(),
			profile_level_id[0],
			profile_level_id[1],
			profile_level_id[2],
			sprop_parameter_sets_str);
		fFmtpSDPLine = strDup(fmtp);

		delete[] fmtp;
		delete[] sprop_parameter_sets_str;
	}
    FUN_OUT();
	return fFmtpSDPLine;
}


////////// H264FUAFragmenter implementation //////////

MyH264FUAFragmenter::MyH264FUAFragmenter(UsageEnvironment& env,
						 FramedSource* inputSource,
						 unsigned inputBufferMax,
						 unsigned maxOutputPacketSize)
	: FramedFilter(env, inputSource),
	  fInputBufferSize(inputBufferMax+1), fMaxOutputPacketSize(maxOutputPacketSize),
	  fNumValidDataBytes(1), fCurDataOffset(1), fSaveNumTruncatedBytes(0),
	  fLastFragmentCompletedNALUnit(True) {
  fInputBuffer = new unsigned char[fInputBufferSize];
}

MyH264FUAFragmenter::~MyH264FUAFragmenter() {
  delete[] fInputBuffer;
  fInputSource = NULL; // so that the subsequent ~FramedFilter doesn't delete it
}

void MyH264FUAFragmenter::doGetNextFrame() {
  if (fNumValidDataBytes == 1) {
//  	 printf("		  Fragmenter: read new NAL unit\n");		//jay
		// We have no NAL unit data currently in the buffer.  Read a new one:
	  fInputSource->getNextFrame(&fInputBuffer[1], fInputBufferSize - 1,
						 afterGettingFrame, this,
						 FramedSource::handleClosure, this);
	} else {
//	printf("						  Fragmenter: deliver fragment to RTP sink\n");		//jay
		// We have NAL unit data in the buffer.  There are three cases to consider:
		// 1. There is a new NAL unit in the buffer, and it's small enough to deliver
		//	  to the RTP sink (as is).
		// 2. There is a new NAL unit in the buffer, but it's too large to deliver to
		//	  the RTP sink in its entirety.  Deliver the first fragment of this data,
		//	  as a FU-A packet, with one extra preceding header byte.
		// 3. There is a NAL unit in the buffer, and we've already delivered some
		//	  fragment(s) of this.  Deliver the next fragment of this data,
		//	  as a FU-A packet, with two extra preceding header bytes.

	  if (fMaxSize < fMaxOutputPacketSize) { // shouldn't happen, fMaxSize = 600000, fMaxOutputPacketSize = 1448
		  envir() << "H264FUAFragmenter::doGetNextFrame(): fMaxSize ("
				<< fMaxSize << ") is smaller than expected\n";
		} else {
		  fMaxSize = fMaxOutputPacketSize;
		}

	  fLastFragmentCompletedNALUnit = True; // by default

	  if (fCurDataOffset == 1) { // case 1 or 2
		  if (fNumValidDataBytes - 1 <= fMaxSize) { // case 1
		  
			printf("		  Fragmenter: small nalu\n");		//jay
			memmove(fTo, &fInputBuffer[1], fNumValidDataBytes - 1);
			fFrameSize = fNumValidDataBytes - 1;
			fCurDataOffset = fNumValidDataBytes;
			
			} else { // case 2
			
				printf("		  Fragmenter: first fragment\n");		//jay
				// We need to send the NAL unit data as FU-A packets.  Deliver the first
				// packet now.  Note that we add FU indicator and FU header bytes to the front
				// of the packet (reusing the existing NAL header byte for the FU header).
				fInputBuffer[0] = (fInputBuffer[1] & 0xE0) | 28; // FU indicator (28 means FU-A)
				fInputBuffer[1] = 0x80 | (fInputBuffer[1] & 0x1F); // FU header (with S bit)
				memmove(fTo, fInputBuffer, fMaxSize);
				fFrameSize = fMaxSize;
				fCurDataOffset += fMaxSize - 1;
				fLastFragmentCompletedNALUnit = False;
			}
		} else { // case 3
		
			printf("		  Fragmenter: next fragment\n");		//jay
			// We are sending this NAL unit data as FU-A packets.  We've already sent the
			// first packet (fragment).  Now, send the next fragment.  Note that we add
			// FU indicator and FU header bytes to the front.	(We reuse these bytes that
			// we already sent for the first fragment, but clear the S bit, and add the E
			// bit if this is the last fragment.)
			fInputBuffer[fCurDataOffset-2] = fInputBuffer[0]; // FU indicator
			fInputBuffer[fCurDataOffset-1] = fInputBuffer[1]&~0x80; // FU header (no S bit)
			unsigned numBytesToSend = 2 + fNumValidDataBytes - fCurDataOffset;
			if (numBytesToSend > fMaxSize) {
				// We can't send all of the remaining data this time:
				numBytesToSend = fMaxSize;
				fLastFragmentCompletedNALUnit = False;
			} else {
			// This is the last fragment:
				fInputBuffer[fCurDataOffset-1] |= 0x40; // set the E bit in the FU header
				fNumTruncatedBytes = fSaveNumTruncatedBytes;
			}
			memmove(fTo, &fInputBuffer[fCurDataOffset-2], numBytesToSend);
			fFrameSize = numBytesToSend;
			fCurDataOffset += numBytesToSend - 2;
		}

	  if (fCurDataOffset >= fNumValidDataBytes) {
			// We're done with this data.  Reset the pointers for receiving new data:
		  fNumValidDataBytes = fCurDataOffset = 1;
		}

		// Complete delivery to the client:
	  printf("						  Fragmenter: afterGetting\n");		//jay
	  FramedSource::afterGetting(this);
	}
}

void MyH264FUAFragmenter::afterGettingFrame(void* clientData, unsigned frameSize,
					  unsigned numTruncatedBytes,
					  struct timeval presentationTime,
					  unsigned durationInMicroseconds) {
  MyH264FUAFragmenter* fragmenter = (MyH264FUAFragmenter*)clientData;
	printf("			  Fragmenter::afterGettingFrame\n");	//jay
  fragmenter->afterGettingFrame1(frameSize, numTruncatedBytes, presentationTime,
				 durationInMicroseconds);
	printf("			  Fragmenter::afterGettingFrame - end\n");	//jay
}

void MyH264FUAFragmenter::afterGettingFrame1(unsigned frameSize,
						 unsigned numTruncatedBytes,
						 struct timeval presentationTime,
						 unsigned durationInMicroseconds) {
  fNumValidDataBytes += frameSize;
  fSaveNumTruncatedBytes = numTruncatedBytes;
  fPresentationTime = presentationTime;
  fDurationInMicroseconds = durationInMicroseconds;

	// Deliver data to the client:
  doGetNextFrame();
}
