#include "pxRTSPClient.h"

CPxRTSPClient::CPxRTSPClient(void)
{
	allowProxyServers = False;
	controlConnectionUsesTCP = True;
	supportCodecSelection = False;

	//char const* clientProtocolName = "RTSP";

	strcpy_s(clientProtocolName, 16, "RTSP");

	ourRTSPClient = NULL;

	progName = NULL;
	env = NULL;
	ourClient = NULL;
	ourAuthenticator = NULL;
	streamURL = NULL;
	session = NULL;
	sessionTimerTask = NULL;
	sessionTimeoutBrokenServerTask = NULL;
	arrivalCheckTimerTask = NULL;
	interPacketGapCheckTimerTask = NULL;
	qosMeasurementTimerTask = NULL;
	periodicFileOutputTask = NULL;
	createReceivers = True;
	outputQuickTimeFile = False;
	generateMP4Format = False;
	qtOut = NULL;
	outputAVIFile = False;
	aviOut = NULL;
	audioOnly = False;
	videoOnly = False;
	singleMedium = NULL;
	verbosityLevel = 1; // by default, print verbose output
	duration = 0;
	durationSlop = -1.0; // extra seconds to play at the end
	initialSeekTime = 0.0f;
	initialAbsoluteSeekTime = NULL;
	initialAbsoluteSeekEndTime = NULL;
	scale = 1.0f;
	endTime;
	interPacketGapMaxTime = 0;
	totNumPacketsReceived = ~0; // used if checking inter-packet gaps
	playContinuously = False;
	simpleRTPoffsetArg = -1;
	sendOptionsRequest = True;
	sendOptionsRequestOnly = False;
	oneFilePerFrame = False;
	notifyOnPacketArrival = False;
	sendKeepAlivesToBrokenServers = False;
	sessionTimeoutParameter = 0;
	streamUsingTCP = False;
	forceMulticastOnUnspecified = False;
	desiredPortNum = 0;
	tunnelOverHTTPPortNum = 0;
	username = NULL;
	password = NULL;
	proxyServerName = NULL;
	proxyServerPortNum = 0;
	desiredAudioRTPPayloadFormat = 0;
	mimeSubtype = NULL;
	movieWidth = 240; // default
	movieWidthOptionSet = False;
	movieHeight = 180; // default
	movieHeightOptionSet = False;
	movieFPS = 15; // default
	movieFPSOptionSet = False;
	fileNamePrefix = "";
	fileSinkBufferSize = 100000;
	socketInputBufferSize = 0;
	packetLossCompensate = False;
	syncStreams = False;
	generateHintTracks = False;
	waitForResponseToTEARDOWN = True;
	qosMeasurementIntervalMS = 0; // 0 means: Don't output QOS data
	userAgent = NULL;
	fileOutputInterval = 0; // seconds
	fileOutputSecondsSoFar = 0; // seconds
	createHandlerServerForREGISTERCommand = False;
	handlerServerForREGISTERCommandPortNum = 0;
	handlerServerForREGISTERCommand;
	usernameForREGISTER = NULL;
	passwordForREGISTER = NULL;
	authDBForREGISTER = NULL;

	subsession = NULL;
	madeProgress = False;

	areAlreadyShuttingDown = False;
	shutdownExitCode;

	qosRecordHead = NULL; 
}

CPxRTSPClient::~CPxRTSPClient(void)
{
}

// openRTSP
Medium* CPxRTSPClient::createClient(UsageEnvironment& env, char const* url, int verbosityLevel, char const* applicationName) 
{
	extern portNumBits tunnelOverHTTPPortNum;
	return ourRTSPClient = RTSPClient::createNew(env, url, verbosityLevel, applicationName, tunnelOverHTTPPortNum);
}

void CPxRTSPClient::assignClient(Medium* client) 
{
	ourRTSPClient = (RTSPClient*)client;
}

void CPxRTSPClient::getOptions(RTSPClient::responseHandler* afterFunc) 
{ 
	ourRTSPClient->sendOptionsCommand(afterFunc, ourAuthenticator);
}

void CPxRTSPClient::getSDPDescription(RTSPClient::responseHandler* afterFunc) 
{
	ourRTSPClient->sendDescribeCommand(afterFunc, ourAuthenticator);
}

void CPxRTSPClient::setupSubsession(MediaSubsession* subsession, 
	Boolean streamUsingTCP, 
	Boolean forceMulticastOnUnspecified, 
	RTSPClient::responseHandler* afterFunc) 
{
	ourRTSPClient->sendSetupCommand(*subsession, afterFunc, False, streamUsingTCP, 
		forceMulticastOnUnspecified, ourAuthenticator);
}

void CPxRTSPClient::startPlayingSession(MediaSession* session, 
	double start, 
	double end, 
	float scale, 
	RTSPClient::responseHandler* afterFunc) 
{
	ourRTSPClient->sendPlayCommand(*session, afterFunc, start, end, scale, ourAuthenticator);
}

void CPxRTSPClient::startPlayingSession(MediaSession* session, 
	char const* absStartTime, 
	char const* absEndTime, 
	float scale, 
	RTSPClient::responseHandler* afterFunc) 
{
	ourRTSPClient->sendPlayCommand(*session, afterFunc, absStartTime, absEndTime, 
		scale, ourAuthenticator);
}

void CPxRTSPClient::tearDownSession(MediaSession* session, RTSPClient::responseHandler* afterFunc) 
{
	ourRTSPClient->sendTeardownCommand(*session, afterFunc, ourAuthenticator);
}

void CPxRTSPClient::setUserAgentString(char const* userAgentString) 
{
	ourRTSPClient->setUserAgentString(userAgentString);
}

// playCommon
void CPxRTSPClient::continueAfterClientCreation0(RTSPClient* newRTSPClient, Boolean requestStreamingOverTCP) {
	if (newRTSPClient == NULL) return;

	streamUsingTCP = requestStreamingOverTCP;

	assignClient(ourClient = newRTSPClient);
	streamURL = newRTSPClient->url();

	// Having handled one "REGISTER" command (giving us a "rtsp://" URL to stream from), we don't handle any more:
	Medium::close(handlerServerForREGISTERCommand); handlerServerForREGISTERCommand = NULL;

	continueAfterClientCreation1();
}

void CPxRTSPClient::continueAfterClientCreation1() 
{
	setUserAgentString(userAgent);

	if (sendOptionsRequest) 
	{
		// Begin by sending an "OPTIONS" command:
		getOptions(continueAfterOPTIONS);
	} 
	else 
	{
		continueAfterOPTIONS(NULL, 0, NULL);
	}
}

//void CPxRTSPClient::continueAfterClientCreation1(UsageEnvironment& env) 
//{
//	setUserAgentString(userAgent);
//
//	if (sendOptionsRequest) 
//	{
//		// Begin by sending an "OPTIONS" command:
//		getOptions(continueAfterOPTIONS);
//	} 
//	else 
//	{
//		continueAfterOPTIONS(NULL, 0, NULL);
//	}
//}

void CPxRTSPClient::continueAfterOPTIONS(RTSPClient*, int resultCode, char* resultString) 
{
	env->m_eRTSPState = kePxRTSPState_OPTIONS;

	if (sendOptionsRequestOnly) 
	{
		if (resultCode != 0) {
			*env << clientProtocolName << " \"OPTIONS\" request failed: " << resultString << "\n";
		} else {
			*env << clientProtocolName << " \"OPTIONS\" request returned: " << resultString << "\n";
		}
		shutdown();
	}

	delete[] resultString;

	// Next, get a SDP description for the stream:
	getSDPDescription(continueAfterDESCRIBE);
}

void CPxRTSPClient::continueAfterDESCRIBE(RTSPClient*, int resultCode, char* resultString) 
{
	env->m_eRTSPState = kePxRTSPState_DESCRIBE;

	if (resultCode != 0) 
	{
		*env << "Failed to get a SDP description for the URL \"" << streamURL << "\": " << resultString << "\n";
		delete[] resultString;
		shutdown();
	}

	char* sdpDescription = resultString;
	*env << "Opened URL \"" << streamURL << "\", returning a SDP description:\n" << sdpDescription << "\n";

	// Create a media session object from this SDP description:
	session = MediaSession::createNew(*env, sdpDescription);
	delete[] sdpDescription;
	if (session == NULL) {
		*env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";
		shutdown();
	} else if (!session->hasSubsessions()) {
		*env << "This session has no media subsessions (i.e., no \"m=\" lines)\n";
		shutdown();
	}

	// Then, setup the "RTPSource"s for the session:
	MediaSubsessionIterator iter(*session);
	MediaSubsession *subsession;
	Boolean madeProgress = False;
	char const* singleMediumToTest = singleMedium;
	while ((subsession = iter.next()) != NULL) {
		// If we've asked to receive only a single medium, then check this now:
		if (singleMediumToTest != NULL) {
			if (strcmp(subsession->mediumName(), singleMediumToTest) != 0) {
				*env << "Ignoring \"" << subsession->mediumName()
					<< "/" << subsession->codecName()
					<< "\" subsession, because we've asked to receive a single " << singleMedium
					<< " session only\n";
				continue;
			} else {
				// Receive this subsession only
				singleMediumToTest = "xxxxx";
				// this hack ensures that we get only 1 subsession of this type
			}
		}

		if (desiredPortNum != 0) {
			subsession->setClientPortNum(desiredPortNum);
			desiredPortNum += 2;
		}

		if (createReceivers) {
			if (!subsession->initiate(simpleRTPoffsetArg)) {
				*env << "Unable to create receiver for \"" << subsession->mediumName()
					<< "/" << subsession->codecName()
					<< "\" subsession: " << env->getResultMsg() << "\n";
			} else {
				*env << "Created receiver for \"" << subsession->mediumName()
					<< "/" << subsession->codecName() << "\" subsession (";
				if (subsession->rtcpIsMuxed()) {
					*env << "client port " << subsession->clientPortNum();
				} else {
					*env << "client ports " << subsession->clientPortNum()
						<< "-" << subsession->clientPortNum()+1;
				}
				*env << ")\n";
				madeProgress = True;

				if (subsession->rtpSource() != NULL) {
					// Because we're saving the incoming data, rather than playing
					// it in real time, allow an especially large time threshold
					// (1 second) for reordering misordered incoming packets:
					unsigned const thresh = 1000000; // 1 second
					subsession->rtpSource()->setPacketReorderingThresholdTime(thresh);

					// Set the RTP source's OS socket buffer size as appropriate - either if we were explicitly asked (using -B),
					// or if the desired FileSink buffer size happens to be larger than the current OS socket buffer size.
					// (The latter case is a heuristic, on the assumption that if the user asked for a large FileSink buffer size,
					// then the input data rate may be large enough to justify increasing the OS socket buffer size also.)
					int socketNum = subsession->rtpSource()->RTPgs()->socketNum();
					unsigned curBufferSize = getReceiveBufferSize(*env, socketNum);
					if (socketInputBufferSize > 0 || fileSinkBufferSize > curBufferSize) {
						unsigned newBufferSize = socketInputBufferSize > 0 ? socketInputBufferSize : fileSinkBufferSize;
						newBufferSize = setReceiveBufferTo(*env, socketNum, newBufferSize);
						if (socketInputBufferSize > 0) { // The user explicitly asked for the new socket buffer size; announce it:
							*env << "Changed socket receive buffer size for the \""
								<< subsession->mediumName()
								<< "/" << subsession->codecName()
								<< "\" subsession from "
								<< curBufferSize << " to "
								<< newBufferSize << " bytes\n";
						}
					}
				}
			}
		} else {
			if (subsession->clientPortNum() == 0) {
				*env << "No client port was specified for the \""
					<< subsession->mediumName()
					<< "/" << subsession->codecName()
					<< "\" subsession.  (Try adding the \"-p <portNum>\" option.)\n";
			} else {
				madeProgress = True;
			}
		}
	}
	if (!madeProgress) shutdown();

	// Perform additional 'setup' on each subsession, before playing them:
	setupStreams();
}


void CPxRTSPClient::continueAfterSETUP(RTSPClient* client, int resultCode, char* resultString) 
{
	env->m_eRTSPState = kePxRTSPState_SETUP;

	if (resultCode == 0) 
	{
		*env << "Setup \"" << subsession->mediumName()
			<< "/" << subsession->codecName()
			<< "\" subsession (";
		if (subsession->rtcpIsMuxed()) {
			*env << "client port " << subsession->clientPortNum();
		} else {
			*env << "client ports " << subsession->clientPortNum()
				<< "-" << subsession->clientPortNum()+1;
		}
		*env << ")\n";
		madeProgress = True;
	} else {
		*env << "Failed to setup \"" << subsession->mediumName()
			<< "/" << subsession->codecName()
			<< "\" subsession: " << resultString << "\n";
	}
	delete[] resultString;

	if (client != NULL) sessionTimeoutParameter = client->sessionTimeoutParameter();

	// Set up the next subsession, if any:
	setupStreams();
}

void CPxRTSPClient::createOutputFiles(char const* periodicFilenameSuffix) 
{
	char outFileName[1000] = {0};

	if (outputQuickTimeFile || outputAVIFile) 
	{
		if (periodicFilenameSuffix[0] == '\0') 
		{
			// Normally (unless the '-P <interval-in-seconds>' option was given) we output to 'stdout':
			sprintf(outFileName, "stdout");
		} 
		else 
		{
			// Otherwise output to a type-specific file name, containing "periodicFilenameSuffix":
			char const* prefix = fileNamePrefix[0] == '\0' ? "output" : fileNamePrefix;
			snprintf(outFileName, sizeof outFileName, "%s%s.%s", prefix, periodicFilenameSuffix,
				outputAVIFile ? "avi" : generateMP4Format ? "mp4" : "mov");
		}

		if (outputQuickTimeFile) 
		{
			qtOut = QuickTimeFileSink::createNew(*env, *session, outFileName,
				fileSinkBufferSize,
				movieWidth, movieHeight,
				movieFPS,
				packetLossCompensate,
				syncStreams,
				generateHintTracks,
				generateMP4Format);

			if (qtOut == NULL) 
			{
				*env << "Failed to create a \"QuickTimeFileSink\" for outputting to \""
					<< outFileName << "\": " << env->getResultMsg() << "\n";
				shutdown();
			} 
			else 
			{
				*env << "Outputting to the file: \"" << outFileName << "\"\n";
			}

			qtOut->startPlaying(sessionAfterPlaying, NULL);
		} 
		else 
		{ // outputAVIFile
			aviOut = AVIFileSink::createNew(*env, *session, outFileName,
				fileSinkBufferSize,
				movieWidth, movieHeight,
				movieFPS,
				packetLossCompensate);

			if (aviOut == NULL) 
			{
				*env << "Failed to create an \"AVIFileSink\" for outputting to \""
					<< outFileName << "\": " << env->getResultMsg() << "\n";
				shutdown();
			} 
			else 
			{
				*env << "Outputting to the file: \"" << outFileName << "\"\n";
			}

			aviOut->startPlaying(sessionAfterPlaying, NULL);
		}
	} 
	else 
	{
		// Create and start "FileSink"s for each subsession:
		madeProgress = False;
		MediaSubsessionIterator iter(*session);
		while ((subsession = iter.next()) != NULL) {
			if (subsession->readSource() == NULL) continue; // was not initiated

			// Create an output file for each desired stream:
			if (singleMedium == NULL || periodicFilenameSuffix[0] != '\0') {
				// Output file name is
				//     "<filename-prefix><medium_name>-<codec_name>-<counter><periodicFilenameSuffix>"
				static unsigned streamCounter = 0;
				snprintf(outFileName, sizeof outFileName, "%s%s-%s-%d%s",
					fileNamePrefix, subsession->mediumName(),
					subsession->codecName(), ++streamCounter, periodicFilenameSuffix);
			} else {
				// When outputting a single medium only, we output to 'stdout
				// (unless the '-P <interval-in-seconds>' option was given):
				sprintf(outFileName, "stdout");
			}

			FileSink* fileSink = NULL;
			Boolean createOggFileSink = False; // by default

			if (strcmp(subsession->mediumName(), "video") == 0) 
			{
				if (strcmp(subsession->codecName(), "H264") == 0) 
				{
					// For H.264 video stream, we use a special sink that adds 'start codes',
					// and (at the start) the SPS and PPS NAL units:
					/* fileSink = H264VideoFileSink::createNew(*env, outFileName,
					subsession->fmtp_spropparametersets(),
					fileSinkBufferSize, oneFilePerFrame);*/

					// For H.264 video stream, we use a special sink that insert start_codes:     
					unsigned int  num = 0;    
					SPropRecord * sps = parseSPropParameterSets(subsession->fmtp_spropparametersets(),num); 

					fileSink = H264VideoFileSink::createNew(*env, outFileName, 
						subsession->fmtp_spropparametersets(),
						fileSinkBufferSize, oneFilePerFrame);    
					struct  timeval tv={0,0};    
					unsigned char  start_code[4] = {0x00, 0x00, 0x00, 0x01};    
					fileSink->addData(start_code, 4, tv);    
					fileSink->addData(sps[0].sPropBytes,sps[0].sPropLength,tv);    
					fileSink->addData(start_code, 4, tv);    
					fileSink->addData(sps[1].sPropBytes,sps[1].sPropLength,tv);    
					delete [] sps;  
				} 
				else if (strcmp(subsession->codecName(), "H265") == 0) 
				{
					// For H.265 video stream, we use a special sink that adds 'start codes',
					// and (at the start) the VPS, SPS, and PPS NAL units:
					fileSink = H265VideoFileSink::createNew(*env, outFileName,
						subsession->fmtp_spropvps(),
						subsession->fmtp_spropsps(),
						subsession->fmtp_sproppps(),
						fileSinkBufferSize, oneFilePerFrame);
				} 
				else if (strcmp(subsession->codecName(), "THEORA") == 0) 
				{
					createOggFileSink = True;
				}
			} 
			else if (strcmp(subsession->mediumName(), "audio") == 0) 
			{
				if (strcmp(subsession->codecName(), "AMR") == 0 ||
					strcmp(subsession->codecName(), "AMR-WB") == 0) 
				{
					// For AMR audio streams, we use a special sink that inserts AMR frame hdrs:
					fileSink = AMRAudioFileSink::createNew(*env, outFileName,
						fileSinkBufferSize, oneFilePerFrame);
				} 
				else if (strcmp(subsession->codecName(), "VORBIS") == 0 ||
					strcmp(subsession->codecName(), "OPUS") == 0) 
				{
					createOggFileSink = True;
				}
			}

			if (createOggFileSink) 
			{
				fileSink = OggFileSink
					::createNew(*env, outFileName,
					subsession->rtpTimestampFrequency(), subsession->fmtp_config());
			} else if (fileSink == NULL) {
				// Normal case:
				fileSink = FileSink::createNew(*env, outFileName,
					fileSinkBufferSize, oneFilePerFrame);
			}
			subsession->sink = fileSink;

			if (subsession->sink == NULL) {
				*env << "Failed to create FileSink for \"" << outFileName
					<< "\": " << env->getResultMsg() << "\n";
			} else {
				if (singleMedium == NULL) {
					*env << "Created output file: \"" << outFileName << "\"\n";
				} else {
					*env << "Outputting data from the \"" << subsession->mediumName()
						<< "/" << subsession->codecName()
						<< "\" subsession to \"" << outFileName << "\"\n";
				}

				if (strcmp(subsession->mediumName(), "video") == 0 &&
					strcmp(subsession->codecName(), "MP4V-ES") == 0 &&
					subsession->fmtp_config() != NULL) 
				{
					// For MPEG-4 video RTP streams, the 'config' information
					// from the SDP description contains useful VOL etc. headers.
					// Insert this data at the front of the output file:
					unsigned configLen;
					unsigned char* configData
						= parseGeneralConfigStr(subsession->fmtp_config(), configLen);
					struct timeval timeNow;
					gettimeofday(&timeNow, NULL);
					fileSink->addData(configData, configLen, timeNow);
					delete[] configData;
				}

				subsession->sink->startPlaying(*(subsession->readSource()),
					subsessionAfterPlaying,
					subsession);

				// Also set a handler to be called if a RTCP "BYE" arrives
				// for this subsession:
				if (subsession->rtcpInstance() != NULL) 
				{
					subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, subsession);
				}

				madeProgress = True;
			}
		}
		if (!madeProgress) shutdown();
	}
}

void CPxRTSPClient::createPeriodicOutputFiles() {
	// Create a filename suffix that notes the time interval that's being recorded:
	char periodicFileNameSuffix[100] = {0};
	/*snprintf(periodicFileNameSuffix, sizeof periodicFileNameSuffix, "-%05d-%05d",
	fileOutputSecondsSoFar, fileOutputSecondsSoFar + fileOutputInterval);*/
	sprintf(periodicFileNameSuffix, "-%05d-%05d", fileOutputSecondsSoFar, fileOutputSecondsSoFar + fileOutputInterval);
	createOutputFiles(periodicFileNameSuffix);

	// Schedule an event for writing the next output file:
	periodicFileOutputTask
		= env->taskScheduler().scheduleDelayedTask(fileOutputInterval*1000000,
		(TaskFunc*)periodicFileOutputTimerHandler,
		(void*)NULL);
}

void CPxRTSPClient::setupStreams() {
	static MediaSubsessionIterator* setupIter = NULL;
	if (setupIter == NULL) setupIter = new MediaSubsessionIterator(*session);
	while ((subsession = setupIter->next()) != NULL) {
		// We have another subsession left to set up:
		if (subsession->clientPortNum() == 0) continue; // port # was not set

		setupSubsession(subsession, streamUsingTCP, forceMulticastOnUnspecified, continueAfterSETUP);
		return;
	}

	// We're done setting up subsessions.
	delete setupIter;
	if (!madeProgress) shutdown();

	// Create output files:
	if (createReceivers) {

		fileOutputInterval = 50; // add by gzl

		if (fileOutputInterval > 0) 
		{
			createPeriodicOutputFiles();
		} else {
			createOutputFiles("");
		}
	}

	// Finally, start playing each subsession, to start the data flow:
	if (duration == 0) {
		if (scale > 0) duration = session->playEndTime() - initialSeekTime; // use SDP end time
		else if (scale < 0) duration = initialSeekTime;
	}
	if (duration < 0) duration = 0.0;

	endTime = initialSeekTime;
	if (scale > 0) {
		if (duration <= 0) endTime = -1.0f;
		else endTime = initialSeekTime + duration;
	} else {
		endTime = initialSeekTime - duration;
		if (endTime < 0) endTime = 0.0f;
	}

	char const* absStartTime = initialAbsoluteSeekTime != NULL ? initialAbsoluteSeekTime : session->absStartTime();
	char const* absEndTime = initialAbsoluteSeekEndTime != NULL ? initialAbsoluteSeekEndTime : session->absEndTime();
	if (absStartTime != NULL) {
		// Either we or the server have specified that seeking should be done by 'absolute' time:
		startPlayingSession(session, absStartTime, absEndTime, scale, continueAfterPLAY);
	} else {
		// Normal case: Seek by relative time (NPT):
		startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
	}
}

void CPxRTSPClient::continueAfterPLAY(RTSPClient*, int resultCode, char* resultString) 
{
	env->m_eRTSPState = kePxRTSPState_PLAY;

	if (resultCode != 0) {
		*env << "Failed to start playing session: " << resultString << "\n";
		delete[] resultString;
		shutdown();
		return;
	} else {
		*env << "Started playing session\n";
	}
	delete[] resultString;

	if (qosMeasurementIntervalMS > 0) {
		// Begin periodic QOS measurements:
		beginQOSMeasurement();
	}

	// Figure out how long to delay (if at all) before shutting down, or
	// repeating the playing
	Boolean timerIsBeingUsed = False;
	double secondsToDelay = duration;
	if (duration > 0) {
		// First, adjust "duration" based on any change to the play range (that was specified in the "PLAY" response):
		double rangeAdjustment = (session->playEndTime() - session->playStartTime()) - (endTime - initialSeekTime);
		if (duration + rangeAdjustment > 0.0) duration += rangeAdjustment;

		timerIsBeingUsed = True;
		double absScale = scale > 0 ? scale : -scale; // ASSERT: scale != 0
		secondsToDelay = duration/absScale + durationSlop;

		int64_t uSecsToDelay = (int64_t)(secondsToDelay*1000000.0);
		sessionTimerTask = env->taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)sessionTimerHandler, (void*)NULL);
	}

	char const* actionString
		= createReceivers? "Receiving streamed data":"Data is being streamed";
	if (timerIsBeingUsed) {
		*env << actionString
			<< " (for up to " << secondsToDelay
			<< " seconds)...\n";
	} else {
#ifdef USE_SIGNALS
		pid_t ourPid = getpid();
		*env << actionString
			<< " (signal with \"kill -HUP " << (int)ourPid
			<< "\" or \"kill -USR1 " << (int)ourPid
			<< "\" to terminate)...\n";
#else
		*env << actionString << "...\n";
#endif
	}

	sessionTimeoutBrokenServerTask = NULL;

	// Watch for incoming packets (if desired):
	checkForPacketArrival(NULL);
	checkInterPacketGaps(NULL);
	checkSessionTimeoutBrokenServer(NULL);
}

void CPxRTSPClient::closeMediaSinks() {
	Medium::close(qtOut); qtOut = NULL;
	Medium::close(aviOut); aviOut = NULL;

	if (session == NULL) return;
	MediaSubsessionIterator iter(*session);
	MediaSubsession* subsession;
	while ((subsession = iter.next()) != NULL) {
		Medium::close(subsession->sink);
		subsession->sink = NULL;
	}
}

void CPxRTSPClient::subsessionAfterPlaying(void* clientData) 
{
	// Begin by closing this media subsession's stream:
	MediaSubsession* subsession = (MediaSubsession*)clientData;
	Medium::close(subsession->sink);
	subsession->sink = NULL;

	// Next, check whether *all* subsessions' streams have now been closed:
	MediaSession& session = subsession->parentSession();
	MediaSubsessionIterator iter(session);
	while ((subsession = iter.next()) != NULL) {
		if (subsession->sink != NULL) return; // this subsession is still active
	}

	// All subsessions' streams have now been closed
	sessionAfterPlaying();
}

void CPxRTSPClient::subsessionByeHandler(void* clientData) 
{
	struct timeval timeNow;
	gettimeofday(&timeNow, NULL);
	unsigned secsDiff = timeNow.tv_sec - startTime.tv_sec;

	MediaSubsession* subsession = (MediaSubsession*)clientData;
	*env << "Received RTCP \"BYE\" on \"" << subsession->mediumName()
		<< "/" << subsession->codecName()
		<< "\" subsession (after " << secsDiff
		<< " seconds)\n";

	// Act now as if the subsession had closed:
	subsessionAfterPlaying(subsession);
}

void CPxRTSPClient::sessionAfterPlaying(void* /*clientData*/) 
{
	if (!playContinuously) {
		shutdown(0);
	} else {
		// We've been asked to play the stream(s) over again.
		// First, reset state from the current session:
		if (env != NULL) {
			env->taskScheduler().unscheduleDelayedTask(periodicFileOutputTask);
			env->taskScheduler().unscheduleDelayedTask(sessionTimerTask);
			env->taskScheduler().unscheduleDelayedTask(sessionTimeoutBrokenServerTask);
			env->taskScheduler().unscheduleDelayedTask(arrivalCheckTimerTask);
			env->taskScheduler().unscheduleDelayedTask(interPacketGapCheckTimerTask);
			env->taskScheduler().unscheduleDelayedTask(qosMeasurementTimerTask);
		}
		totNumPacketsReceived = ~0;

		startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY);
	}
}

void CPxRTSPClient::sessionTimerHandler(void* /*clientData*/) 
{
	sessionTimerTask = NULL;

	sessionAfterPlaying();
}

void CPxRTSPClient::periodicFileOutputTimerHandler(void* /*clientData*/) 
{
	fileOutputSecondsSoFar += fileOutputInterval;

	// First, close the existing output files:
	closeMediaSinks();

	// Then, create new output files:
	createPeriodicOutputFiles();
}

void CPxRTSPClient::scheduleNextQOSMeasurement() 
{
	nextQOSMeasurementUSecs += qosMeasurementIntervalMS*1000;
	struct timeval timeNow;
	gettimeofday(&timeNow, NULL);
	unsigned timeNowUSecs = timeNow.tv_sec*1000000 + timeNow.tv_usec;
	int usecsToDelay = nextQOSMeasurementUSecs - timeNowUSecs;

	qosMeasurementTimerTask = env->taskScheduler().scheduleDelayedTask(
		usecsToDelay, (TaskFunc*)periodicQOSMeasurement, (void*)NULL);
}

void CPxRTSPClient::periodicQOSMeasurement(void* /*clientData*/) 
{
	struct timeval timeNow;
	gettimeofday(&timeNow, NULL);

	for (qosMeasurementRecord* qosRecord = qosRecordHead;
		qosRecord != NULL; qosRecord = qosRecord->fNext) {
			qosRecord->periodicQOSMeasurement(timeNow);
	}

	// Do this again later:
	scheduleNextQOSMeasurement();
}

void qosMeasurementRecord::periodicQOSMeasurement(struct timeval const& timeNow) 
{
		unsigned secsDiff = timeNow.tv_sec - measurementEndTime.tv_sec;
		int usecsDiff = timeNow.tv_usec - measurementEndTime.tv_usec;
		double timeDiff = secsDiff + usecsDiff/1000000.0;
		measurementEndTime = timeNow;

		RTPReceptionStatsDB::Iterator statsIter(fSource->receptionStatsDB());
		// Assume that there's only one SSRC source (usually the case):
		RTPReceptionStats* stats = statsIter.next(True);
		if (stats != NULL) {
			double kBytesTotalNow = stats->totNumKBytesReceived();
			double kBytesDeltaNow = kBytesTotalNow - kBytesTotal;
			kBytesTotal = kBytesTotalNow;

			double kbpsNow = timeDiff == 0.0 ? 0.0 : 8*kBytesDeltaNow/timeDiff;
			if (kbpsNow < 0.0) kbpsNow = 0.0; // in case of roundoff error
			if (kbpsNow < kbits_per_second_min) kbits_per_second_min = kbpsNow;
			if (kbpsNow > kbits_per_second_max) kbits_per_second_max = kbpsNow;

			unsigned totReceivedNow = stats->totNumPacketsReceived();
			unsigned totExpectedNow = stats->totNumPacketsExpected();
			unsigned deltaReceivedNow = totReceivedNow - totNumPacketsReceived;
			unsigned deltaExpectedNow = totExpectedNow - totNumPacketsExpected;
			totNumPacketsReceived = totReceivedNow;
			totNumPacketsExpected = totExpectedNow;

			double lossFractionNow = deltaExpectedNow == 0 ? 0.0
				: 1.0 - deltaReceivedNow/(double)deltaExpectedNow;
			//if (lossFractionNow < 0.0) lossFractionNow = 0.0; //reordering can cause
			if (lossFractionNow < packet_loss_fraction_min) {
				packet_loss_fraction_min = lossFractionNow;
			}
			if (lossFractionNow > packet_loss_fraction_max) {
				packet_loss_fraction_max = lossFractionNow;
			}
		}
}

void CPxRTSPClient::beginQOSMeasurement() 
{
	// Set up a measurement record for each active subsession:
	struct timeval startTime;
	gettimeofday(&startTime, NULL);
	nextQOSMeasurementUSecs = startTime.tv_sec*1000000 + startTime.tv_usec;
	qosMeasurementRecord* qosRecordTail = NULL;
	MediaSubsessionIterator iter(*session);
	MediaSubsession* subsession;
	while ((subsession = iter.next()) != NULL) 
	{
		RTPSource* src = subsession->rtpSource();
		if (src == NULL) continue;

		qosMeasurementRecord* qosRecord
			= new qosMeasurementRecord(startTime, src);
		if (qosRecordHead == NULL) qosRecordHead = qosRecord;
		if (qosRecordTail != NULL) qosRecordTail->fNext = qosRecord;
		qosRecordTail  = qosRecord;
	}

	// Then schedule the first of the periodic measurements:
	scheduleNextQOSMeasurement();
}

void CPxRTSPClient::printQOSData(int exitCode) 
{
	*env << "begin_QOS_statistics\n";

	// Print out stats for each active subsession:
	qosMeasurementRecord* curQOSRecord = qosRecordHead;
	if (session != NULL) {
		MediaSubsessionIterator iter(*session);
		MediaSubsession* subsession;
		while ((subsession = iter.next()) != NULL) {
			RTPSource* src = subsession->rtpSource();
			if (src == NULL) continue;

			*env << "subsession\t" << subsession->mediumName()
				<< "/" << subsession->codecName() << "\n";

			unsigned numPacketsReceived = 0, numPacketsExpected = 0;

			if (curQOSRecord != NULL) {
				numPacketsReceived = curQOSRecord->totNumPacketsReceived;
				numPacketsExpected = curQOSRecord->totNumPacketsExpected;
			}
			*env << "num_packets_received\t" << numPacketsReceived << "\n";
			*env << "num_packets_lost\t" << int(numPacketsExpected - numPacketsReceived) << "\n";

			if (curQOSRecord != NULL) {
				unsigned secsDiff = curQOSRecord->measurementEndTime.tv_sec
					- curQOSRecord->measurementStartTime.tv_sec;
				int usecsDiff = curQOSRecord->measurementEndTime.tv_usec
					- curQOSRecord->measurementStartTime.tv_usec;
				double measurementTime = secsDiff + usecsDiff/1000000.0;
				*env << "elapsed_measurement_time\t" << measurementTime << "\n";

				*env << "kBytes_received_total\t" << curQOSRecord->kBytesTotal << "\n";

				*env << "measurement_sampling_interval_ms\t" << qosMeasurementIntervalMS << "\n";

				if (curQOSRecord->kbits_per_second_max == 0) {
					// special case: we didn't receive any data:
					*env <<
						"kbits_per_second_min\tunavailable\n"
						"kbits_per_second_ave\tunavailable\n"
						"kbits_per_second_max\tunavailable\n";
				} else {
					*env << "kbits_per_second_min\t" << curQOSRecord->kbits_per_second_min << "\n";
					*env << "kbits_per_second_ave\t"
						<< (measurementTime == 0.0 ? 0.0 : 8*curQOSRecord->kBytesTotal/measurementTime) << "\n";
					*env << "kbits_per_second_max\t" << curQOSRecord->kbits_per_second_max << "\n";
				}

				*env << "packet_loss_percentage_min\t" << 100*curQOSRecord->packet_loss_fraction_min << "\n";
				double packetLossFraction = numPacketsExpected == 0 ? 1.0
					: 1.0 - numPacketsReceived/(double)numPacketsExpected;
				if (packetLossFraction < 0.0) packetLossFraction = 0.0;
				*env << "packet_loss_percentage_ave\t" << 100*packetLossFraction << "\n";
				*env << "packet_loss_percentage_max\t"
					<< (packetLossFraction == 1.0 ? 100.0 : 100*curQOSRecord->packet_loss_fraction_max) << "\n";

				RTPReceptionStatsDB::Iterator statsIter(src->receptionStatsDB());
				// Assume that there's only one SSRC source (usually the case):
				RTPReceptionStats* stats = statsIter.next(True);
				if (stats != NULL) {
					*env << "inter_packet_gap_ms_min\t" << stats->minInterPacketGapUS()/1000.0 << "\n";
					struct timeval totalGaps = stats->totalInterPacketGaps();
					double totalGapsMS = totalGaps.tv_sec*1000.0 + totalGaps.tv_usec/1000.0;
					unsigned totNumPacketsReceived = stats->totNumPacketsReceived();
					*env << "inter_packet_gap_ms_ave\t"
						<< (totNumPacketsReceived == 0 ? 0.0 : totalGapsMS/totNumPacketsReceived) << "\n";
					*env << "inter_packet_gap_ms_max\t" << stats->maxInterPacketGapUS()/1000.0 << "\n";
				}

				curQOSRecord = curQOSRecord->fNext;
			}
		}
	}

	*env << "end_QOS_statistics\n";
	delete qosRecordHead;
}

void CPxRTSPClient::shutdown(int exitCode) 
{
	if (areAlreadyShuttingDown) return; // in case we're called after receiving a RTCP "BYE" while in the middle of a "TEARDOWN".
	areAlreadyShuttingDown = True;

	shutdownExitCode = exitCode;
	if (env != NULL) {
		env->taskScheduler().unscheduleDelayedTask(periodicFileOutputTask);
		env->taskScheduler().unscheduleDelayedTask(sessionTimerTask);
		env->taskScheduler().unscheduleDelayedTask(sessionTimeoutBrokenServerTask);
		env->taskScheduler().unscheduleDelayedTask(arrivalCheckTimerTask);
		env->taskScheduler().unscheduleDelayedTask(interPacketGapCheckTimerTask);
		env->taskScheduler().unscheduleDelayedTask(qosMeasurementTimerTask);
	}

	if (qosMeasurementIntervalMS > 0) {
		printQOSData(exitCode);
	}

	// Teardown, then shutdown, any outstanding RTP/RTCP subsessions
	Boolean shutdownImmediately = True; // by default
	if (session != NULL) {
		RTSPClient::responseHandler* responseHandlerForTEARDOWN = NULL; // unless:
		if (waitForResponseToTEARDOWN) {
			shutdownImmediately = False;
			responseHandlerForTEARDOWN = continueAfterTEARDOWN;
		}
		tearDownSession(session, responseHandlerForTEARDOWN);
	}

	if (shutdownImmediately) continueAfterTEARDOWN(NULL, 0, NULL);
}

void CPxRTSPClient::continueAfterTEARDOWN(RTSPClient*, int /*resultCode*/, char* resultString) 
{
	env->m_eRTSPState = kePxRTSPState_TEARDOWN;

	delete[] resultString;

	// Now that we've stopped any more incoming data from arriving, close our output files:
	closeMediaSinks();
	Medium::close(session);

	// Finally, shut down our client:
	delete ourAuthenticator;
	delete authDBForREGISTER;
	Medium::close(ourClient);

	// Adios...
	exit(shutdownExitCode);
}

void CPxRTSPClient::signalHandlerShutdown(int /*sig*/) 
{
	*env << "Got shutdown signal\n";
	waitForResponseToTEARDOWN = False; // to ensure that we end, even if the server does not respond to our TEARDOWN
	shutdown(0);
}

void CPxRTSPClient::checkForPacketArrival(void* /*clientData*/) 
{
	if (!notifyOnPacketArrival) return; // we're not checking

	// Check each subsession, to see whether it has received data packets:
	unsigned numSubsessionsChecked = 0;
	unsigned numSubsessionsWithReceivedData = 0;
	unsigned numSubsessionsThatHaveBeenSynced = 0;

	MediaSubsessionIterator iter(*session);
	MediaSubsession* subsession;
	while ((subsession = iter.next()) != NULL) {
		RTPSource* src = subsession->rtpSource();
		if (src == NULL) continue;
		++numSubsessionsChecked;

		if (src->receptionStatsDB().numActiveSourcesSinceLastReset() > 0) {
			// At least one data packet has arrived
			++numSubsessionsWithReceivedData;
		}
		if (src->hasBeenSynchronizedUsingRTCP()) {
			++numSubsessionsThatHaveBeenSynced;
		}
	}

	unsigned numSubsessionsToCheck = numSubsessionsChecked;
	// Special case for "QuickTimeFileSink"s and "AVIFileSink"s:
	// They might not use all of the input sources:
	if (qtOut != NULL) {
		numSubsessionsToCheck = qtOut->numActiveSubsessions();
	} else if (aviOut != NULL) {
		numSubsessionsToCheck = aviOut->numActiveSubsessions();
	}

	Boolean notifyTheUser;
	if (!syncStreams) {
		notifyTheUser = numSubsessionsWithReceivedData > 0; // easy case
	} else {
		notifyTheUser = numSubsessionsWithReceivedData >= numSubsessionsToCheck
			&& numSubsessionsThatHaveBeenSynced == numSubsessionsChecked;
		// Note: A subsession with no active sources is considered to be synced
	}
	if (notifyTheUser) 
	{
		struct timeval timeNow;
		gettimeofday(&timeNow, NULL);
		char timestampStr[100];
		sprintf(timestampStr, "%ld%03ld", timeNow.tv_sec, (long)(timeNow.tv_usec/1000));
		*env << (syncStreams ? "Synchronized d" : "D")
			<< "ata packets have begun arriving [" << timestampStr << "]\007\n";
		return;
	}

	// No luck, so reschedule this check again, after a delay:
	int uSecsToDelay = 100000; // 100 ms
	arrivalCheckTimerTask
		= env->taskScheduler().scheduleDelayedTask(uSecsToDelay,
		(TaskFunc*)checkForPacketArrival, NULL);
}

void CPxRTSPClient::checkInterPacketGaps(void* /*clientData*/) 
{
	if (interPacketGapMaxTime == 0) return; // we're not checking

	// Check each subsession, counting up how many packets have been received:
	unsigned newTotNumPacketsReceived = 0;

	MediaSubsessionIterator iter(*session);
	MediaSubsession* subsession;
	while ((subsession = iter.next()) != NULL) {
		RTPSource* src = subsession->rtpSource();
		if (src == NULL) continue;
		newTotNumPacketsReceived += src->receptionStatsDB().totNumPacketsReceived();
	}

	if (newTotNumPacketsReceived == totNumPacketsReceived) {
		// No additional packets have been received since the last time we
		// checked, so end this stream:
		*env << "Closing session, because we stopped receiving packets.\n";
		interPacketGapCheckTimerTask = NULL;
		sessionAfterPlaying();
	} else {
		totNumPacketsReceived = newTotNumPacketsReceived;
		// Check again, after the specified delay:
		interPacketGapCheckTimerTask
			= env->taskScheduler().scheduleDelayedTask(interPacketGapMaxTime*1000000,
			(TaskFunc*)checkInterPacketGaps, NULL);
	}
}

void CPxRTSPClient::checkSessionTimeoutBrokenServer(void* /*clientData*/) 
{
	if (!sendKeepAlivesToBrokenServers) return; // we're not checking

	// Send an "OPTIONS" request, starting with the second call
	if (sessionTimeoutBrokenServerTask != NULL) {
		getOptions(NULL);
	}

	unsigned sessionTimeout = sessionTimeoutParameter == 0 ? 60/*default*/ : sessionTimeoutParameter;
	unsigned secondsUntilNextKeepAlive = sessionTimeout <= 5 ? 1 : sessionTimeout - 5;
	// Reduce the interval a little, to be on the safe side

	sessionTimeoutBrokenServerTask 
		= env->taskScheduler().scheduleDelayedTask(secondsUntilNextKeepAlive*1000000,
		(TaskFunc*)checkSessionTimeoutBrokenServer, NULL);
}

int CPxRTSPClient::gettimeofday(struct timeval *tv, void* tz) 
{
	FILETIME ft;
	GetSystemTimeAsFileTime(&ft);

	uint64_t value = ((uint64_t) ft.dwHighDateTime << 32) | ft.dwLowDateTime;

	tv->tv_usec = (long) ((value / 10LL) % 1000000LL);
	tv->tv_sec = (long) ((value - 116444736000000000LL) / 10000000LL);

	return (0);
}


