#include "stdafx.h"
#include "IPCameraStreamReceiver.h"

#include "BasicUsageEnvironment.hh"
#include "RTSPOverHTTPServer.hh"
#include "version.hh"
#include <stdlib.h>
#include "IPCameraVideoSink.h"
#include "IPCameraAudioSink.h"

#include "GlobalInfoInstance.h"
#include "IPCameraStreamSource.h"
#include "UtilInstance.h"
#include "LogFileWriter.h"

void subsessionAfterPlaying(void* clientData);
void sessionAfterPlaying(void* clientData);
void sessionTimerHandler(void* clientData);
void subsessionByeHandler(void* clientData);

void periodicQOSMeasurement(void* clientData);
void checkInterPacketGaps(void* clientData);
void checkForPacketArrival(void* clientData);

CIPCameraStreamReceiver::CIPCameraStreamReceiver(int int_stream_id)
{
	m_int_stream_id = int_stream_id;
	m_mode = RTSP_TCP_MODE;
	strcpy(moduleName, "RTSP TCP Client");

	//in playcommon
	ourClient = NULL;
	session = NULL;
	sessionTimerTask = NULL;
	arrivalCheckTimerTask = NULL;
	interPacketGapCheckTimerTask = NULL;
	qosMeasurementTimerTask = NULL;
	createReceivers = True;
	outputQuickTimeFile = False;
	generateMP4Format = False;
	qtOut = NULL;
	outputAVIFile = False;
	aviOut = NULL;
	audioOnly = False;
	videoOnly = False;
	singleMedium = NULL;
	verbosityLevel = 1; // by default, print verbose output
	duration = 0;
	durationSlop = -1.0; // extra seconds to play at the end
	initialSeekTime = 0.0f;
	scale = 1.0f;
	interPacketGapMaxTime = 0;
	totNumPacketsReceived = ~0; // used if checking inter-packet gaps
	playContinuously = True;//False; 
	simpleRTPoffsetArg = -1;
	sendOptionsRequest = False;
	sendOptionsRequestOnly = False;
	oneFilePerFrame = False;
	notifyOnPacketArrival = False;
	streamUsingTCP = True;    //RTP/RTSP,  RTP/RTSP/TCP
	tunnelOverHTTPPortNum = 0;
	username = NULL;
	password = NULL;
	proxyServerName = NULL;
	proxyServerPortNum = 0;
	desiredAudioRTPPayloadFormat = 0;
	mimeSubtype = NULL;
	movieWidth = 1280; // default
	movieWidthOptionSet = False;
	movieHeight = 720; // default
	movieHeightOptionSet = False;
	movieFPS = 30; // default
	movieFPSOptionSet = False;
	fileNamePrefix = "";
	fileSinkBufferSize = 500 * 1024;
	socketInputBufferSize = 0;
	packetLossCompensate = True;
	syncStreams = False;
	generateHintTracks = False;
	qosMeasurementIntervalMS = 0; // 0 means: Don't output QOS data
	statusCode = 0;

	qosRecordHead = NULL;

	//KIPCameraStreamReceiver
	allowProxyServers = False;
	controlConnectionUsesTCP = True;
	supportCodecSelection = False;
	clientProtocolName = "RTSP";

	scheduler = NULL;
	env = NULL;

	ZeroMemory(spsParameter, 1024);
	m_isStoped = FALSE;
	m_isStarted = FALSE;
	m_needShutdown = FALSE;
}

CIPCameraStreamReceiver::~CIPCameraStreamReceiver()
{

}

void CIPCameraStreamReceiver::Run(void* This)
{ 
	// Begin by setting up our usage environment:
	scheduler = BasicTaskScheduler::createNew();
	env = BasicUsageEnvironment::createNew(*scheduler);
	//////////////
	int argc = -1;
	int argcTemp = -1;
	char* argvTemp[4];
	char** argv = argvTemp;

	if (m_mode == RTSP_MODE)
	{
		argc = 2;
		argcTemp = 2;

		argv[0] = new char[1000];
		strcpy(argv[0], "VLC media player");
		argv[1] = new char[1000];
		strcpy(argv[1], m_rtspServerUri);
		strcpy(moduleName, "RTSP Client");
	}
	else if (m_mode == RTSP_TCP_MODE)
	{
		argc = 3;
		argcTemp = 3;

		argv[0] = new char[1000];
		strcpy(argv[0], "VLC media player");
		argv[1] = new char[1000];
		strcpy(argv[1], "-t");
		argv[2] = new char[1000];
		strcpy(argv[2], m_rtspServerUri);
		strcpy(moduleName, "RTSP TCP Client");
	}
	else if (m_mode == RTSP_OVER_HTTP_MODE)
	{
		argc = 4;
		argcTemp = 4;

		argv[0] = new char[1000];
		strcpy(argv[0], "RTSP Over HTTP Client");
		argv[1] = new char[1000];
		strcpy(argv[1], "-T");
		argv[2] = new char[1000];
		strcpy(argv[2], "8000");
		argv[3] = new char[1000];
		strcpy(argv[3], m_rtspServerUri);
		strcpy(moduleName, "RTSP Over HTTP Client");
	}
	else
		return;
//////////////
	progName = argv[0];

	gettimeofday(&startTime, NULL);

	#ifdef USE_SIGNALS
	  // Allow ourselves to be shut down gracefully by a SIGHUP or a SIGUSR1:
	  // signal(SIGHUP, signalHandlerShutdown);
	  // signal(SIGUSR1, signalHandlerShutdown);
	#endif

	unsigned short desiredPortNum = 0;

	// unfortunately we can't use getopt() here, as Windoze doesn't have it
	while (argc > 2) 
	{
		char* const opt = argv[1];

		if (opt[0] != '-') 
			usage();

		switch (opt[1]) 
		{

			case 'p': 
				{ // specify start port number
					int portArg;

					if (sscanf(argv[2], "%d", &portArg) != 1) 
					{
						usage();
					}
					if (portArg <= 0 || portArg >= 65536 || portArg&1) 
					{
						*env << "bad port number: " << portArg
						<< " (must be even, and in the range (0,65536))\n";
						usage();
					}

					desiredPortNum = (unsigned short)portArg;
					++argv; --argc;

					break;
				}

			case 'r': 
				{ // do not receive data (instead, just 'play' the stream(s))
				    createReceivers = False;
				    break;
				}

			case 'q':
				{ // output a QuickTime file (to stdout)
					outputQuickTimeFile = True;
					break;
				}

			case '4': 
				{ // output a 'mp4'-format file (to stdout)
					outputQuickTimeFile = True;
					generateMP4Format = True;
					break;
				}

			case 'i': 
				{ // output an AVI file (to stdout)
					outputAVIFile = True;
					break;
				}

			case 'I': 
				{ // specify input interface...
					NetAddressList addresses(argv[2]);
					if (addresses.numAddresses() == 0)
					{
						*env << "Failed to find network address for \"" << argv[2] << "\"";
						break;
					}

					ReceivingInterfaceAddr = *(unsigned*)(addresses.firstAddress()->data());
					++argv; --argc;

					break;
				}

			case 'a': 
				{ // receive/record an audio stream only
					audioOnly = True;
					singleMedium = "audio";
					break;
				}

			case 'v': 
				{ // receive/record a video stream only
					videoOnly = True;
					singleMedium = "video";
					break;
				}

			case 'V': 
				{ // disable verbose output
					verbosityLevel = 0;
					break;
				}

			case 'd': 
				{ // specify duration, or how much to delay after end time
					float arg;

					if (sscanf(argv[2], "%g", &arg) != 1) 
					{
						usage();
					}

					if (argv[2][0] == '-')
					{// not "arg<0", in case argv[2] was "-0"
						// a 'negative' argument was specified; use this for "durationSlop":
						duration = 0; // use whatever's in the SDP
						durationSlop = -arg;
					} 
					else 
					{
						duration = arg;
						durationSlop = 0;
					}

					++argv; --argc;
					break;
				}

			case 'D': 
				{ // specify maximum number of seconds to wait for packets:
					if (sscanf(argv[2], "%u", &interPacketGapMaxTime) != 1) 
					{
						usage();
					}

					++argv; --argc;
					break;
				}

			case 'c': 
				{ // play continuously
					playContinuously = True;
					break;
				}

			case 'S': 
				{ // specify an offset to use with "SimpleRTPSource"s
					if (sscanf(argv[2], "%d", &simpleRTPoffsetArg) != 1) 
					{
						usage();
					}

					if (simpleRTPoffsetArg < 0) 
					{
						*env << "offset argument to \"-S\" must be >= 0\n";
						usage();
					}

					++argv; --argc;
					break;
				}

			case 'O': 
				{ // Don't send an "OPTIONS" request before "DESCRIBE"
					sendOptionsRequest = False;
					break;
				}

			case 'o': 
				{ // Send only the "OPTIONS" request to the server
					sendOptionsRequestOnly = True;
					break;
				}

			case 'm': 
				{ // output multiple files - one for each frame
					oneFilePerFrame = True;
					break;
				}

			case 'n': 
				{ // notify the user when the first data packet arrives
					notifyOnPacketArrival = True;
					break;
				}

			case 't': 
				{
					// stream RTP and RTCP over the TCP 'control' connection
					if (controlConnectionUsesTCP)
					{
						streamUsingTCP = True;
					} 
					else 
					{
						usage();
					}

					break;
				}

			case 'T': 
				{
					// stream RTP and RTCP over a HTTP connection
					if (controlConnectionUsesTCP) 
					{
						if (argc > 3 && argv[2][0] != '-') 
						{
							// The next argument is the HTTP server port number:
							if (sscanf(argv[2], "%hu", &tunnelOverHTTPPortNum) == 1
								&& tunnelOverHTTPPortNum > 0) 
							{
								++argv; --argc;
								break;
							}
						}
					}

					// If we get here, the option was specified incorrectly:
					usage();
					break;
				}

			case 'u': 
				{ // specify a username and password
					username = argv[2];
					password = argv[3];
					argv+=2; argc-=2;

					if (allowProxyServers && argc > 3 && argv[2][0] != '-') 
					{
						// The next argument is the name of a proxy server:
						proxyServerName = argv[2];
						++argv; --argc;

						if (argc > 3 && argv[2][0] != '-') 
						{
							// The next argument is the proxy server port number:
							if (sscanf(argv[2], "%hu", &proxyServerPortNum) != 1)
							{
								usage();
							}

							++argv; --argc;
						}
					}

					break;
				}

			case 'A': 
				{ // specify a desired audio RTP payload format
					unsigned formatArg;
					if (sscanf(argv[2], "%u", &formatArg) != 1
						|| formatArg >= 96) 
					{
						usage();
					}

					desiredAudioRTPPayloadFormat = (unsigned char)formatArg;
					++argv; --argc;
					break;
				}

			case 'M': 
				{ // specify a MIME subtype for a dynamic RTP payload type
					mimeSubtype = argv[2];

					if (desiredAudioRTPPayloadFormat==0) 
						desiredAudioRTPPayloadFormat =96;

					++argv; --argc;
					break;
				}

			case 'w': 
				{ // specify a width (pixels) for an output QuickTime or AVI movie
					if (sscanf(argv[2], "%hu", &movieWidth) != 1)
					{
						usage();
					}

					movieWidthOptionSet = True;
					++argv; --argc;
					break;
				}

			case 'h': 
				{ // specify a height (pixels) for an output QuickTime or AVI movie
					if (sscanf(argv[2], "%hu", &movieHeight) != 1) 
					{
						usage();
					}

					movieHeightOptionSet = True;
					++argv; --argc;
					break;
				}

			case 'f': 
				{ // specify a frame rate (per second) for an output QT or AVI movie
					if (sscanf(argv[2], "%u", &movieFPS) != 1) 
					{
						usage();
					}

					movieFPSOptionSet = True;
					++argv; --argc;
					break;
				}

			case 'F': 
				{ // specify a prefix for the audio and video output files
					fileNamePrefix = argv[2];
					++argv; --argc;
					break;
				}

			case 'b': 
				{ // specify the size of buffers for "FileSink"s
					if (sscanf(argv[2], "%u", &fileSinkBufferSize) != 1) 
					{
						usage();
					}

					++argv; --argc;
					break;
				}

			case 'B': 
				{ // specify the size of input socket buffers
					if (sscanf(argv[2], "%u", &socketInputBufferSize) != 1) 
					{
						usage();
					}

					++argv; --argc;
					break;
				}

			// Note: The following option is deprecated, and may someday be removed:
			case 'l': 
				{ // try to compensate for packet loss by repeating frames
					packetLossCompensate = True;
					break;
				}

			case 'y': 
				{ // synchronize audio and video streams
					syncStreams = True;
					break;
				}

			case 'H': 
				{ // generate hint tracks (as well as the regular data tracks)
					generateHintTracks = True;
					break;
				}

			case 'Q': 
				{ // output QOS measurements
					qosMeasurementIntervalMS = 3000; // default: 3 second

					if (argc > 3 && argv[2][0] != '-') 
					{
						// The next argument is the measurement interval,
						// in multiples of 100 ms
						if (sscanf(argv[2], "%u", &qosMeasurementIntervalMS) != 1) 
						{
							usage();
						}

						qosMeasurementIntervalMS *= 100;
						++argv; --argc;
					}
					
					break;
				}

			case 's': 
				{ // specify initial seek time (trick play)
					double arg;
					if (sscanf(argv[2], "%lg", &arg) != 1 || arg < 0)
					{
						usage();
					}

					initialSeekTime = arg;
					++argv; --argc;
					break;
				}

			case 'z': 
				{ // scale (trick play)
					float arg;

					if (sscanf(argv[2], "%g", &arg) != 1 || arg == 0.0f) 
					{
						usage();
					}

						scale = arg;
						++argv; --argc;
						break;
				}

			default: 
				{
					usage();
					break;
				}
		}

		++argv; --argc;
	}


	if (argc != 2) 
		usage();

	if (outputQuickTimeFile && outputAVIFile) 
	{
		*env << "The -i and -q (or -4) flags cannot both be used!\n";
		usage();
	}

	Boolean outputCompositeFile = outputQuickTimeFile || outputAVIFile;

	if (!createReceivers && outputCompositeFile) 
	{
		*env << "The -r and -q (or -4 or -i) flags cannot both be used!\n";
		usage();
	}

	if (outputCompositeFile && !movieWidthOptionSet) 
	{
		*env << "Warning: The -q, -4 or -i option was used, but not -w.  Assuming a video width of "
		 << movieWidth << " pixels\n";
	}
	if (outputCompositeFile && !movieHeightOptionSet) 
	{
		*env << "Warning: The -q, -4 or -i option was used, but not -h.  Assuming a video height of "
		 << movieHeight << " pixels\n";
	}
	if (outputCompositeFile && !movieFPSOptionSet) 
	{
		*env << "Warning: The -q, -4 or -i option was used, but not -f.  Assuming a video frame rate of "
		 << movieFPS << " frames-per-second\n";
	}
	if (audioOnly && videoOnly) 
	{
		*env << "The -a and -v flags cannot both be used!\n";
		usage();
	}
	if (sendOptionsRequestOnly && !sendOptionsRequest)
	{
		*env << "The -o and -O flags cannot both be used!\n";
		usage();
	}
	if (tunnelOverHTTPPortNum > 0) 
	{
		if (streamUsingTCP) 
		{
			*env << "The -t and -T flags cannot both be used!\n";
			usage();
		} 
		else 
		{
			streamUsingTCP = True;
		}
	}
	if (!createReceivers && notifyOnPacketArrival) 
	{
		*env << "Warning: Because we're not receiving stream data, the -n flag has no effect\n";
	}
	if (durationSlop < 0) 
	{
		// This parameter wasn't set, so use a default value.
		// If we're measuring QOS stats, then don't add any slop, to avoid
		// having 'empty' measurement intervals at the end.
		durationSlop = qosMeasurementIntervalMS > 0 ? 0.0 : 5.0;
	}

	char url[1000];
	strcpy(url, argv[1]);

	CIPCameraStreamSource* ptr_ipcamera_source = (CIPCameraStreamSource*)(((CIPCameraStreamReceiver*)This)->m_ptr_stream_source);

	if (IsBadReadPtr(ptr_ipcamera_source, 4))
	{
		for (int i=0; i<argcTemp; i++)
			delete[] argvTemp[i];

		return;
	}

	ptr_ipcamera_source->m_extraDataSize = 0;

	// Create our client object:
	ourClient = createClient(*env, verbosityLevel, progName);
	if (ourClient == NULL) 
	{
		*env << "Failed to create " << clientProtocolName
			<< " client: " << env->getResultMsg() << "\n";

		for (int i=0; i<argcTemp; i++)
			delete[] argvTemp[i];

		return;
	}

	sendOptionsRequest = FALSE;

	if (sendOptionsRequest) 
	{
		// Begin by sending an "OPTIONS" command:
		char* optionsResponse
			= getOptionsResponse(ourClient, url, username, password);
		if (sendOptionsRequestOnly)
		{
			if (optionsResponse == NULL) 
			{
				*env << clientProtocolName << " \"OPTIONS\" request failed: "
					<< env->getResultMsg() << "\n";
			} 
			else 
			{
				*env << clientProtocolName << " \"OPTIONS\" request returned: "
					<< optionsResponse << "\n";
			}

			if (optionsResponse)
				delete[] optionsResponse;

			if (!IsBadReadPtr(ourClient, 4))
			{
				Medium::close(ourClient);
				ourClient = NULL;
			}

			for (int i=0; i<argcTemp; i++)
				delete[] argvTemp[i];

			return;
		}
		
		if (optionsResponse)
			delete[] optionsResponse;
	}

	char* sdpDescription = NULL;

	if (m_isStoped)
	{
		if (!IsBadReadPtr(ourClient, 4))
		{
			Medium::close(ourClient);
			ourClient = NULL;
		}

		for (int i=0; i<argcTemp; i++)
			delete[] argvTemp[i];

		return;
	}

	if (isRunning)
	{
		if (IsBadReadPtr(ourClient, 4))
		{
			for (int i=0; i<argcTemp; i++)
				delete[] argvTemp[i];

			return;
		}
		else
		{
			sdpDescription
				= getSDPDescriptionFromURL(ourClient, url, username, password,
						   proxyServerName, proxyServerPortNum, desiredPortNum);
		}
	}
	else
	{
		if (!IsBadReadPtr(ourClient, 4))
		{
			Medium::close(ourClient);
			ourClient = NULL;
		}

		for (int i=0; i<argcTemp; i++)
			delete[] argvTemp[i];

		return;
	}

	for (int i=0; i<argcTemp; i++)
		delete[] argvTemp[i];

	m_needShutdown = TRUE;

	if (sdpDescription == NULL)
	{
		// Finally, shut down our client:
		//shutdown(1);

		return;
	}

	// sdpDescription to IP Camera Receiver.sdpLines setting
	CIPCameraStreamReceiver* pThis = (CIPCameraStreamReceiver*)This;
	if (strlen(((CIPCameraStreamSource*)(pThis->m_ptr_stream_source))->sdpLines) == 0)
	{
		//ThisSource->m_ptr_stream_source->sdpLines = new char[strlen(sdpDescription) + 1];
		strcpy(((CIPCameraStreamSource*)(pThis->m_ptr_stream_source))->sdpLines, sdpDescription);
	}


	pThis->profileLevelID[0] = NULL;
	spsParameter[0] = NULL;
	int videoInIndex = pThis->videoInIndex;

	for (int i = 0; i < (int)(strlen(sdpDescription)); i++)
	{
		if (strncmp(sdpDescription+i, "profile-level-id", 16) == 0)
		{
			strncpy(pThis->profileLevelID, sdpDescription+i+17, 6);
			pThis->profileLevelID[6] = NULL;
			continue;
		}

		if (strncmp(sdpDescription+i, "sprop-parameter-sets", 20) == 0)
		{
			for (int j = i; j < (int)(strlen(sdpDescription)); j++)
			{
				if (sdpDescription[j] == 0x0d && sdpDescription[j+1] == 0x0a)
				{
					strncpy(spsParameter, sdpDescription+i+21, j-i-21);
					spsParameter[j-i-21] = NULL;
					break;
				}
			}
			
			continue;
		}
	}
	
	*env << "Opened URL \"" << url
		<< "\", returning a SDP description:\n" << sdpDescription << "\n";

	// Create a media session object from this SDP description:
	session = MediaSession::createNew(*env, sdpDescription);
	delete[] sdpDescription;

	if (session == NULL) 
	{
		*env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";
		return;
	} 
	else if (!session->hasSubsessions()) 
	{
		*env << "This session has no media subsessions (i.e., \"m=\" lines)\n";
		return;
	}

	// Then, setup the "RTPSource"s for the session:
	MediaSubsessionIterator iter(*session);
	MediaSubsession *subsession;
	Boolean madeProgress = False;
	char const* singleMediumToTest = singleMedium;

	while ((subsession = iter.next()) != NULL) 
	{
		// If we've asked to receive only a single medium, then check this now:
		if (singleMediumToTest != NULL)
		{
			if (strcmp(subsession->mediumName(), singleMediumToTest) != 0) 
			{
				*env << "Ignoring \"" << subsession->mediumName()
					<< "/" << subsession->codecName()
					<< "\" subsession, because we've asked to receive a single " << singleMedium
					<< " session only\n";
				continue;
			} 
			else 
			{
				// Receive this subsession only
				singleMediumToTest = "xxxxx";
				// this hack ensures that we get only 1 subsession of this type
			}
		}

		if (desiredPortNum != 0) 
		{
			subsession->setClientPortNum(desiredPortNum);
			desiredPortNum += 2;
		}

		if (createReceivers) 
		{
			if (!subsession->initiate(simpleRTPoffsetArg)) 
			{
				*env << "Unable to create receiver for \"" << subsession->mediumName()
					<< "/" << subsession->codecName()
					<< "\" subsession: " << env->getResultMsg() << "\n";
			}
			else 
			{
				*env << "Created receiver for \"" << subsession->mediumName()
					<< "/" << subsession->codecName()
					<< "\" subsession (client ports " << subsession->clientPortNum()
					<< "-" << subsession->clientPortNum()+1 << ")\n";
				madeProgress = True;
	
				if (subsession->rtpSource() != NULL) 
				{
					// Because we're saving the incoming data, rather than playing
					// it in real time, allow an especially large time threshold
					// (0.1 second) for reordering misordered incoming packets:
					unsigned const thresh = 1000000; // 0.1 second
					subsession->rtpSource()->setPacketReorderingThresholdTime(thresh);
	  
					// Set the RTP source's OS socket buffer size as appropriate - either if we were explicitly asked (using -B),
					// or if the desired FileSink buffer size happens to be larger than the current OS socket buffer size.
					// (The latter case is a heuristic, on the assumption that if the user asked for a large FileSink buffer size,
					// then the input data rate may be large enough to justify increasing the OS socket buffer size also.)
					int socketNum = subsession->rtpSource()->RTPgs()->socketNum();
					unsigned curBufferSize = getReceiveBufferSize(*env, socketNum);

					if (socketInputBufferSize > 0 || fileSinkBufferSize > curBufferSize) 
					{
						unsigned newBufferSize = socketInputBufferSize > 0 ? socketInputBufferSize : fileSinkBufferSize;
						newBufferSize = setReceiveBufferTo(*env, socketNum, newBufferSize);
						if (socketInputBufferSize > 0) 
						{ // The user explicitly asked for the new socket buffer size; announce it:
							*env << "Changed socket receive buffer size for the \""
								<< subsession->mediumName()
								<< "/" << subsession->codecName()
								<< "\" subsession from "
								<< curBufferSize << " to "
								<< newBufferSize << " bytes\n";
						}
					}
				}
			}
		} 
		else 
		{
			if (subsession->clientPortNum() == 0) 
			{
				*env << "No client port was specified for the \""
					<< subsession->mediumName()
					<< "/" << subsession->codecName()
					<< "\" subsession.  (Try adding the \"-p <portNum>\" option.)\n";
			} 
			else 
			{
				madeProgress = True;
			}
		}
	}

	if (!madeProgress) 
	{
		return;
	}

	// Perform additional 'setup' on each subsession, before playing them:
	setupStreams();

	// Create output files:
	if (createReceivers) 
	{
		if (outputQuickTimeFile) 
		{
			// Create a "QuickTimeFileSink", to write to 'stdout':
			qtOut = QuickTimeFileSink::createNew(*env, *session, "stdout",
					   fileSinkBufferSize,
					   movieWidth, movieHeight,
					   movieFPS,
					   packetLossCompensate,
					   syncStreams,
					   generateHintTracks,
					   generateMP4Format);
			if (qtOut == NULL) 
			{
				*env << "Failed to create QuickTime file sink for stdout: " << env->getResultMsg();

				return;
			}

			qtOut->startPlaying(sessionAfterPlaying, (void*)this);
		} 
		else if (outputAVIFile) 
		{
			// Create an "AVIFileSink", to write to 'stdout':
			aviOut = AVIFileSink::createNew(*env, *session, "stdout",
				      fileSinkBufferSize,
				      movieWidth, movieHeight,
				      movieFPS,
				      packetLossCompensate);

			if (aviOut == NULL) 
			{
				*env << "Failed to create AVI file sink for stdout: " << env->getResultMsg();

				return;
			}

			aviOut->startPlaying(sessionAfterPlaying, (void*)this);
	#ifdef SUPPORT_REAL_RTSP
		} 
		else if (session->isRealNetworksRDT) 
		{
			IPCameraVideoSink* ipCameraSink = IPCameraVideoSink::createNew(*env, ipCameraSinkBufferSize);

			struct timeval timeNow;
			gettimeofday(&timeNow, NULL);

			// Start playing the output file from the first subsession.
			// (Hack: Because all subsessions' data is actually multiplexed on the
			// single RTSP TCP connection, playing from one subsession is sufficient.)
			iter.reset();
			madeProgress = False;

			while ((subsession = iter.next()) != NULL) 
			{
				if (subsession->readSource() == NULL) continue; // was not initiated

				ipCameraSink->startPlaying(*(subsession->readSource()),
			       subsessionAfterPlaying, subsession);
				madeProgress = True;
				break; // play from one subsession only
			}

			if (!madeProgress) 
				shutdown();
	#endif
		} 
		else
		{
			// Create and start "FileSink"s for each subsession:
			madeProgress = FALSE;
			iter.reset();

			while ((subsession = iter.next()) != NULL) 
			{
				if (subsession->readSource() == NULL) 
					continue; // was not initiated

				void* sink = NULL;
				
				if (strcmp(subsession->mediumName(), "audio") == 0 &&
					(strcmp(subsession->codecName(), "AMR") == 0 ||
					strcmp(subsession->codecName(), "AMR-WB") == 0)) 
				{
					// For AMR audio streams, we use a special sink that inserts AMR frame hdrs:
					/*fileSink = AMRAudioFileSink::createNew(*env, outFileName,
						 fileSinkBufferSize, oneFilePerFrame);*/
				} 
				else if (strcmp(subsession->mediumName(), "video") == 0)
				{
					//////////////////////////////////////////////////////////////////////////
					int nCodecIndex = CODEC_MPEG4;
					char *pSPS = NULL;
					if (strcmp(subsession->codecName(), "H264") == 0)
					{
						nCodecIndex = CODEC_H264;
						pSPS = (char*)subsession->fmtp_spropparametersets();
					}
					else if (strcmp(subsession->codecName(), "JPEG") == 0)
					{
						nCodecIndex = CODEC_MJPEG;
					}
					//////////////////////////////////////////////////////////////////////////
					// Normal case:
					sink = IPCameraVideoSink::createNew(*env, session, this->ourClient, fileSinkBufferSize, nCodecIndex, pSPS);

				} 
				else if (strcmp(subsession->mediumName(), "audio") == 0)
				{
					sink = IPCameraAudioSink::createNew(*env, fileSinkBufferSize);
				}
				

				subsession->sink = (MediaSink*)sink;
				if (sink)
					subsession->sink->int_stream_id = pThis->m_int_stream_id; // buffer index

				if (subsession->sink == NULL) 
				{
					*env << "Failed to create IPCameraVideoSink.";
				}
				else 
				{
					if (singleMedium != NULL) 
					{
						*env << "Outputting data from the \"" << subsession->mediumName()
							<< "/" << subsession->codecName()
							<< "\" subsession to 'stdout'\n";
					}

					if (strcmp(subsession->mediumName(), "video") == 0 &&
							strcmp(subsession->codecName(), "MP4V-ES") == 0 &&
							subsession->fmtp_config() != NULL) 
					{
						// For MPEG-4 video RTP streams, the 'config' information
						// from the SDP description contains useful VOL etc. headers.
						// Insert this data at the front of the output file:
						int xx = 0;
					}

					int* params;
					params = new int[2];
					params[0] = (int)this;
					params[1] = (int)subsession;
					subsession->sink->startPlaying(*(subsession->readSource()),
										 subsessionAfterPlaying,
										 (void*)params);

					// Also set a handler to be called if a RTCP "BYE" arrives
					// for this subsession:
					if (subsession->rtcpInstance() != NULL) 
					{
						int* params1;
						params1 = new int[2];
						params1[0] = (int)this;
						params1[1] = (int)subsession;

						subsession->rtcpInstance()->setByeHandler(subsessionByeHandler,
								  params1);
					}

					madeProgress = True;
				}
			}

			if (!madeProgress) 
			{
				return;
			}
		}
	}


	// Finally, start playing each subsession, to start the data flow:

	startPlayingStreams();

	env->taskScheduler().doEventLoop((char*)(&isRunning)); // does not return
}

void CIPCameraStreamReceiver::SetRTSPServerURI(char* rtspServerUri)
{
	strcpy(m_rtspServerUri, "rtsp://");
	strcat(m_rtspServerUri, ((DEVICE_IPCAMERA*)(m_ptr_stream_source->m_ptr_input_device->self_info))->id);
	strcat(m_rtspServerUri, ":");
	strcat(m_rtspServerUri, ((DEVICE_IPCAMERA*)(m_ptr_stream_source->m_ptr_input_device->self_info))->password);
	strcat(m_rtspServerUri, "@");
	strcat(m_rtspServerUri, ((DEVICE_IPCAMERA*)(m_ptr_stream_source->m_ptr_input_device->self_info))->address);
	strcat(m_rtspServerUri, rtspServerUri);
}

void CIPCameraStreamReceiver::usage() 
{
//	*env << "Usage: " << progName
//		<< " [-p <startPortNum>] [-r|-q|-4|-i] [-a|-v] [-V] [-d <duration>] [-D <max-inter-packet-gap-time> [-c] [-S <offset>] [-n] [-O]"
//		<< (controlConnectionUsesTCP ? " [-t|-T <http-port>]" : "")
//		<< " [-u <username> <password>"
//		<< (allowProxyServers ? " [<proxy-server> [<proxy-server-port>]]" : "")
//		<< "]" << (supportCodecSelection ? " [-A <audio-codec-rtp-payload-format-code>|-M <mime-subtype-name>]" : "")
//		<< " [-s <initial-seek-time>] [-z <scale>]"
//		<< " [-w <width> -h <height>] [-f <frames-per-second>] [-y] [-H] [-Q [<measurement-interval>]] [-F <filename-prefix>] [-b <file-sink-buffer-size>] [-B <input-socket-buffer-size>] [-I <input-interface-ip-address>] [-m] <url> (or " << progName << " -o [-V] <url>)\n";
	//##### Add "-R <dest-rtsp-url>" #####
	//shutdown();
}

void CIPCameraStreamReceiver::setupStreams() 
{
	MediaSubsessionIterator iter(*session);
	MediaSubsession *subsession;
	Boolean madeProgress = False;

	while ((subsession = iter.next()) != NULL) 
	{
		if (subsession->clientPortNum() == 0) continue; // port # was not set

		if (!clientSetupSubsession(ourClient, subsession, streamUsingTCP)) 
		{
			*env << "Failed to setup \"" << subsession->mediumName()
				<< "/" << subsession->codecName()
				<< "\" subsession: " << env->getResultMsg() << "\n";
		}
		else
		{
			*env << "Setup \"" << subsession->mediumName()
				<< "/" << subsession->codecName()
				<< "\" subsession (client ports " << subsession->clientPortNum()
				<< "-" << subsession->clientPortNum()+1 << ")\n";
			madeProgress = True;
		}
	}
}

void CIPCameraStreamReceiver::startPlayingStreams() 
{
	if (duration == 0) 
	{
		if (scale > 0) duration = session->playEndTime() - initialSeekTime; // use SDP end time
		else if (scale < 0) duration = initialSeekTime;
	}
	if (duration < 0) duration = 0.0;

	if (!clientStartPlayingSession(ourClient, session)) 
	{
		*env << "Failed to start playing session: " << env->getResultMsg() << "\n";

		return;
	}
	else
	{
		*env << "Started playing session\n";
	}

	if (qosMeasurementIntervalMS > 0) 
	{
		// Begin periodic QOS measurements:
		beginQOSMeasurement();
	}

	// Figure out how long to delay (if at all) before shutting down, or
	// repeating the playing
	Boolean timerIsBeingUsed = False;
	double secondsToDelay = duration;
	if (duration > 0) 
	{
		double const maxDelayTime
			= (double)( ((unsigned)0x7FFFFFFF)/1000000.0 );
		if (duration > maxDelayTime) 
		{
			*env << "Warning: specified end time " << duration
				<< " exceeds maximum " << maxDelayTime
				<< "; will not do a delayed shutdown\n";
		}
		else 
		{
			timerIsBeingUsed = True;
			double absScale = scale > 0 ? scale : -scale; // ASSERT: scale != 0
			secondsToDelay = duration/absScale + durationSlop;

			int uSecsToDelay = (int)(secondsToDelay*1000000.0);
			sessionTimerTask = env->taskScheduler().scheduleDelayedTask(
				uSecsToDelay, (TaskFunc*)sessionTimerHandler, (void*)this);
		}
	}

	char const* actionString
		= createReceivers? "Receiving streamed data":"Data is being streamed";
	if (timerIsBeingUsed) 
	{
		*env << actionString
			<< " (for up to " << secondsToDelay
			<< " seconds)...\n";
	} 
	else 
	{
#ifdef USE_SIGNALS
		pid_t ourPid = getpid();
		*env << actionString
			<< " (signal with \"kill -HUP " << (int)ourPid
			<< "\" or \"kill -USR1 " << (int)ourPid
			<< "\" to terminate)...\n";
#else
		*env << actionString << "...\n";
#endif
	}

	// Watch for incoming packets (if desired):
	checkForPacketArrival(this);
	checkInterPacketGaps(this);

	if (!m_ptr_stream_source)
		return;
}

void CIPCameraStreamReceiver::tearDownStreams() 
{
	if (!m_ptr_stream_source || session == NULL || IsBadReadPtr(session->fSubsessionsHead, 1)) return;

	clientTearDownSession(ourClient, session);
}


void CIPCameraStreamReceiver::closeMediaSinks() 
{

	if (!IsBadReadPtr(qtOut, 4))
		Medium::close(qtOut);

	if (!IsBadReadPtr(aviOut, 4))
		Medium::close(aviOut);

	if (!m_ptr_stream_source || session == NULL || IsBadReadPtr(session, 4) || IsBadReadPtr(session->fSubsessionsHead, 4)) 
	{
		return;
	}
	MediaSubsessionIterator iter(*session);
	MediaSubsession* subsession;

	while ((subsession = iter.next()) != NULL) 
	{
		if (!IsBadReadPtr(subsession->sink, 4)) 
		{
			subsession->sink->stopPlaying();
			Medium::close(subsession->sink);
		}
		subsession->sink = NULL;
	}
}



void CIPCameraStreamReceiver::beginQOSMeasurement() 
{
	// Set up a measurement record for each active subsession:
	struct timeval startTime;
	gettimeofday(&startTime, NULL);
	nextQOSMeasurementUSecs = startTime.tv_sec*1000000 + startTime.tv_usec;
	qosMeasurementRecord* qosRecordTail = NULL;
	MediaSubsessionIterator iter(*session);
	MediaSubsession* subsession;
	while ((subsession = iter.next()) != NULL) 
	{
		RTPSource* src = subsession->rtpSource();
#ifdef SUPPORT_REAL_RTSP
		if (session->isRealNetworksRDT) src = (RTPSource*)(subsession->readSource()); // hack
#endif
		if (src == NULL) continue;

		qosMeasurementRecord* qosRecord
			= new qosMeasurementRecord(startTime, src);
		if (qosRecordHead == NULL) qosRecordHead = qosRecord;
		if (qosRecordTail != NULL) qosRecordTail->fNext = qosRecord;
		qosRecordTail  = qosRecord;
	}

	// Then schedule the first of the periodic measurements:
	scheduleNextQOSMeasurement();
}

void CIPCameraStreamReceiver::printQOSData(int exitCode) 
{
	if (exitCode != 0 && statusCode == 0) statusCode = 2;
	*env << "begin_QOS_statistics\n";
	*env << "server_availability\t" << (statusCode == 1 ? 0 : 100) << "\n";
	*env << "stream_availability\t" << (statusCode == 0 ? 100 : 0) << "\n";

	// Print out stats for each active subsession:
	qosMeasurementRecord* curQOSRecord = qosRecordHead;
	if (session != NULL) 
	{
		MediaSubsessionIterator iter(*session);
		MediaSubsession* subsession;
		while ((subsession = iter.next()) != NULL) 
		{
			RTPSource* src = subsession->rtpSource();
#ifdef SUPPORT_REAL_RTSP
			if (session->isRealNetworksRDT) src = (RTPSource*)(subsession->readSource()); // hack
#endif
			if (src == NULL) continue;

			*env << "subsession\t" << subsession->mediumName()
				<< "/" << subsession->codecName() << "\n";

			unsigned numPacketsReceived = 0, numPacketsExpected = 0;

			if (curQOSRecord != NULL) 
			{
				numPacketsReceived = curQOSRecord->totNumPacketsReceived;
				numPacketsExpected = curQOSRecord->totNumPacketsExpected;
			}
			*env << "num_packets_received\t" << numPacketsReceived << "\n";
			*env << "num_packets_lost\t" << numPacketsExpected - numPacketsReceived << "\n";

			if (curQOSRecord != NULL) 
			{
				unsigned secsDiff = curQOSRecord->measurementEndTime.tv_sec
					- curQOSRecord->measurementStartTime.tv_sec;
				int usecsDiff = curQOSRecord->measurementEndTime.tv_usec
					- curQOSRecord->measurementStartTime.tv_usec;
				double measurementTime = secsDiff + usecsDiff/1000000.0;
				*env << "elapsed_measurement_time\t" << measurementTime << "\n";

				*env << "kBytes_received_total\t" << curQOSRecord->kBytesTotal << "\n";

				*env << "measurement_sampling_interval_ms\t" << qosMeasurementIntervalMS << "\n";

				if (curQOSRecord->kbits_per_second_max == 0) 
				{
					// special case: we didn't receive any data:
					*env <<
						"kbits_per_second_min\tunavailable\n"
						"kbits_per_second_ave\tunavailable\n"
						"kbits_per_second_max\tunavailable\n";
				} 
				else 
				{
					*env << "kbits_per_second_min\t" << curQOSRecord->kbits_per_second_min << "\n";
					*env << "kbits_per_second_ave\t"
						<< (measurementTime == 0.0 ? 0.0 : 8*curQOSRecord->kBytesTotal/measurementTime) << "\n";
					*env << "kbits_per_second_max\t" << curQOSRecord->kbits_per_second_max << "\n";
				}

				*env << "packet_loss_percentage_min\t" << 100*curQOSRecord->packet_loss_fraction_min << "\n";
				double packetLossFraction = numPacketsExpected == 0 ? 1.0
					: 1.0 - numPacketsReceived/(double)numPacketsExpected;
				if (packetLossFraction < 0.0) packetLossFraction = 0.0;
				*env << "packet_loss_percentage_ave\t" << 100*packetLossFraction << "\n";
				*env << "packet_loss_percentage_max\t"
					<< (packetLossFraction == 1.0 ? 100.0 : 100*curQOSRecord->packet_loss_fraction_max) << "\n";

#ifdef SUPPORT_REAL_RTSP
				if (session->isRealNetworksRDT) 
				{
					RealRDTSource* rdt = (RealRDTSource*)src;
					*env << "inter_packet_gap_ms_min\t" << rdt->minInterPacketGapUS()/1000.0 << "\n";
					struct timeval totalGaps = rdt->totalInterPacketGaps();
					double totalGapsMS = totalGaps.tv_sec*1000.0 + totalGaps.tv_usec/1000.0;
					unsigned totNumPacketsReceived = rdt->totNumPacketsReceived();
					*env << "inter_packet_gap_ms_ave\t"
						<< (totNumPacketsReceived == 0 ? 0.0 : totalGapsMS/totNumPacketsReceived) << "\n";
					*env << "inter_packet_gap_ms_max\t" << rdt->maxInterPacketGapUS()/1000.0 << "\n";
				} 
				else 
				{
#endif
					RTPReceptionStatsDB::Iterator statsIter(src->receptionStatsDB());
					// Assume that there's only one SSRC source (usually the case):
					RTPReceptionStats* stats = statsIter.next(True);
					if (stats != NULL) 
					{
						*env << "inter_packet_gap_ms_min\t" << stats->minInterPacketGapUS()/1000.0 << "\n";
						struct timeval totalGaps = stats->totalInterPacketGaps();
						double totalGapsMS = totalGaps.tv_sec*1000.0 + totalGaps.tv_usec/1000.0;
						unsigned totNumPacketsReceived = stats->totNumPacketsReceived();
						*env << "inter_packet_gap_ms_ave\t"
							<< (totNumPacketsReceived == 0 ? 0.0 : totalGapsMS/totNumPacketsReceived) << "\n";
						*env << "inter_packet_gap_ms_max\t" << stats->maxInterPacketGapUS()/1000.0 << "\n";
					}
#ifdef SUPPORT_REAL_RTSP
				}
#endif

				curQOSRecord = curQOSRecord->fNext;
			}
		}
	}

	*env << "end_QOS_statistics\n";
	delete qosRecordHead;
}

void CIPCameraStreamReceiver::shutdown(int exitCode)
{

	if (env != NULL) 
	{
		env->taskScheduler().unscheduleDelayedTask(sessionTimerTask);
		env->taskScheduler().unscheduleDelayedTask(arrivalCheckTimerTask);
		env->taskScheduler().unscheduleDelayedTask(interPacketGapCheckTimerTask);
		env->taskScheduler().unscheduleDelayedTask(qosMeasurementTimerTask);
	}

	if (qosMeasurementIntervalMS > 0) 
	{
		printQOSData(exitCode);
	}

	// Teardown, then shutdown, any outstanding RTP/RTCP subsessions
	tearDownStreams();

	// Close our output files:
	closeMediaSinks();

	Medium::close(session);

	// Finally, shut down our client:
	if (!IsBadReadPtr(ourClient, 4))
	{
		Medium::close(ourClient);
		ourClient = NULL;
	}

	if (!IsBadReadPtr(env, 4)) 
	{
		if (env->liveMediaPriv)
		{
			delete[] env->liveMediaPriv;
			env->liveMediaPriv = NULL;
		}

		//env->taskScheduler().unscheduleDelayedTask(sessionTimerTask);
		//env->taskScheduler().unscheduleDelayedTask(arrivalCheckTimerTask);
		//env->taskScheduler().unscheduleDelayedTask(interPacketGapCheckTimerTask);
		//env->taskScheduler().unscheduleDelayedTask(qosMeasurementTimerTask);
		
		//delete &env->taskScheduler();
		env->reclaim();
	}

	env = NULL;

	if (!IsBadReadPtr(scheduler, 4))
		delete scheduler;

	scheduler = NULL;

	// Adios...
	//exit(exitCode);
}

void CIPCameraStreamReceiver::signalHandlerShutdown(int /*sig*/) 
{
	*env << "Got shutdown signal\n";
	shutdown(0);
}

void CIPCameraStreamReceiver::scheduleNextQOSMeasurement() 
{
	nextQOSMeasurementUSecs += qosMeasurementIntervalMS*1000;
	struct timeval timeNow;
	gettimeofday(&timeNow, NULL);
	unsigned timeNowUSecs = timeNow.tv_sec*1000000 + timeNow.tv_usec;
	unsigned usecsToDelay = nextQOSMeasurementUSecs - timeNowUSecs;
	// Note: This works even when nextQOSMeasurementUSecs wraps around

	qosMeasurementTimerTask = env->taskScheduler().scheduleDelayedTask(
		usecsToDelay, (TaskFunc*)periodicQOSMeasurement, (void*)this);
}

void checkForPacketArrival(void* clientData) 
{

	CIPCameraStreamReceiver* This = (CIPCameraStreamReceiver*)clientData;

	if (!This->notifyOnPacketArrival) return; // we're not checking

	// Check each subsession, to see whether it has received data packets:
	unsigned numSubsessionsChecked = 0;
	unsigned numSubsessionsWithReceivedData = 0;
	unsigned numSubsessionsThatHaveBeenSynced = 0;

	MediaSubsessionIterator iter(*(This->session));
	MediaSubsession* subsession;
	while ((subsession = iter.next()) != NULL)
	{
		RTPSource* src = subsession->rtpSource();
		if (src == NULL) continue;
		++numSubsessionsChecked;

		if (src->receptionStatsDB().numActiveSourcesSinceLastReset() > 0) 
		{
			// At least one data packet has arrived
			++numSubsessionsWithReceivedData;
		}
		if (src->hasBeenSynchronizedUsingRTCP()) 
		{
			++numSubsessionsThatHaveBeenSynced;
		}
	}

	unsigned numSubsessionsToCheck = numSubsessionsChecked;
	// Special case for "QuickTimeFileSink"s and "AVIFileSink"s:
	// They might not use all of the input sources:
	if (This->qtOut != NULL) 
	{
		numSubsessionsToCheck = This->qtOut->numActiveSubsessions();
	} 
	else if (This->aviOut != NULL) 
	{
		numSubsessionsToCheck = This->aviOut->numActiveSubsessions();
	}

	Boolean notifyTheUser;
	if (!This->syncStreams) 
	{
		notifyTheUser = numSubsessionsWithReceivedData > 0; // easy case
	} 
	else 
	{
		notifyTheUser = numSubsessionsWithReceivedData >= numSubsessionsToCheck
			&& numSubsessionsThatHaveBeenSynced == numSubsessionsChecked;
		// Note: A subsession with no active sources is considered to be synced
	}
	if (notifyTheUser) 
	{
		struct timeval timeNow;
		gettimeofday(&timeNow, NULL);
		char timestampStr[100];
		sprintf(timestampStr, "%ld%03ld", timeNow.tv_sec, (long)(timeNow.tv_usec/1000));
		*(This->env) << (This->syncStreams ? "Synchronized d" : "D")
			<< "ata packets have begun arriving [" << timestampStr << "]\007\n";
		return;
	}

	// No luck, so reschedule this check again, after a delay:
	int uSecsToDelay = 100000; // 100 ms
	This->arrivalCheckTimerTask
		= This->env->taskScheduler().scheduleDelayedTask(uSecsToDelay,
		(TaskFunc*)checkForPacketArrival, (void*)This);
}


void checkInterPacketGaps(void* clientData) 
{
	CIPCameraStreamReceiver* This = (CIPCameraStreamReceiver*)clientData;
	if (This->interPacketGapMaxTime == 0) return; // we're not checking

	// Check each subsession, counting up how many packets have been received:
	unsigned newTotNumPacketsReceived = 0;

	MediaSubsessionIterator iter(*(This->session));
	MediaSubsession* subsession;
	while ((subsession = iter.next()) != NULL) 
	{
		RTPSource* src = subsession->rtpSource();
		if (src == NULL) continue;
		newTotNumPacketsReceived += src->receptionStatsDB().totNumPacketsReceived();
	}

	if (newTotNumPacketsReceived == This->totNumPacketsReceived) 
	{
		// No additional packets have been received since the last time we
		// checked, so end this stream:
		*(This->env) << "Closing session, because we stopped receiving packets.\n";
		This->interPacketGapCheckTimerTask = NULL;
		sessionAfterPlaying(This);
	}
	else
	{
		This->totNumPacketsReceived = newTotNumPacketsReceived;
		// Check again, after the specified delay:
		This->interPacketGapCheckTimerTask
			= This->env->taskScheduler().scheduleDelayedTask(This->interPacketGapMaxTime*1000000,
			(TaskFunc*)checkInterPacketGaps, (void*)This);
	}
}

void periodicQOSMeasurement(void* clientData) 
{
	CIPCameraStreamReceiver* This = (CIPCameraStreamReceiver*)clientData;
	struct timeval timeNow;
	gettimeofday(&timeNow, NULL);

	for (qosMeasurementRecord* qosRecord = This->qosRecordHead;
		qosRecord != NULL; qosRecord = qosRecord->fNext) 
	{
			qosRecord->periodicQOSMeasurement(timeNow);
	}

	// Do this again later:
	This->scheduleNextQOSMeasurement();
}

void subsessionAfterPlaying(void* clientData) 
{
	CIPCameraStreamReceiver* This = (CIPCameraStreamReceiver*) (((void**)clientData)[0]);
	// Begin by closing this media subsession's stream:
	MediaSubsession* subsession = (MediaSubsession*)(((void**)clientData)[1]);
	Medium::close(subsession->sink);
	subsession->sink = NULL;

	// Next, check whether *all* subsessions' streams have now been closed:
	MediaSession& session = subsession->parentSession();
	MediaSubsessionIterator iter(session);
	while ((subsession = iter.next()) != NULL) 
	{
		if (subsession->sink != NULL) return; // this subsession is still active
	}

	// All subsessions' streams have now been closed
	sessionAfterPlaying(This);
}

void subsessionByeHandler(void* clientData) 
{
	CIPCameraStreamReceiver* This = (CIPCameraStreamReceiver*) (((void**)clientData)[0]);
	
	struct timeval timeNow;
	gettimeofday(&timeNow, NULL);
	unsigned secsDiff = timeNow.tv_sec - This->startTime.tv_sec;

	MediaSubsession* subsession = (MediaSubsession*)(((void**)clientData)[1]);
	
	// Act now as if the subsession had closed:
	subsessionAfterPlaying(clientData);
}

void sessionAfterPlaying(void* clientData) 
{
	CIPCameraStreamReceiver* This = (CIPCameraStreamReceiver*)clientData;

	if (!This->playContinuously) 
	{
		This->shutdown(0);
	}
	else 
	{
		// We've been asked to play the stream(s) over again:
		This->startPlayingStreams();
	}
}

void sessionTimerHandler(void* clientData) 
{
	CIPCameraStreamReceiver* This = (CIPCameraStreamReceiver*)clientData;
	This->sessionTimerTask = NULL;

	sessionAfterPlaying(This);
}

//KRTSPClient
Medium* CIPCameraStreamReceiver::createClient(UsageEnvironment& env,
                     int verbosityLevel, char const* applicationName) 
{
  //extern portNumBits tunnelOverHTTPPortNum;
  return RTSPClient::createNew(env, verbosityLevel, applicationName,
			       tunnelOverHTTPPortNum);
}

char* CIPCameraStreamReceiver::getOptionsResponse(Medium* client, char const* url,
			 char* username, char* password) 
{
  RTSPClient* rtspClient = (RTSPClient*)client;
  return rtspClient->sendOptionsCmd(url, username, password); 
}

char* CIPCameraStreamReceiver::getSDPDescriptionFromURL(Medium* client, char const* url,
			       char const* username, char const* password,
			       char const* /*proxyServerName*/,
			       unsigned short /*proxyServerPortNum*/,
			       unsigned short /*clientStartPort*/) 
{
  RTSPClient* rtspClient = (RTSPClient*)client;
  char* result;
  if (username != NULL && password != NULL) 
  {
    result = rtspClient->describeWithPassword(url, username, password, FALSE, 10);
  } 
  else
  {
    result = rtspClient->describeURL(url, NULL, FALSE, 10);
  }

  if (result != NULL)
  {
	  char* ptr_position = strstr(result, "PCMU/16000");
	  if (ptr_position != NULL
		  || ((ptr_position = strstr(result, "L8/")) != NULL)
		  || ((ptr_position = strstr(result, "L16/")) != NULL)
		  || ((ptr_position = strstr(result, "PCMA/")) != NULL))
		  result[0] = '\0';
  }
  //extern unsigned statusCode;
  statusCode = rtspClient->describeStatus();
  return result;
}

Boolean CIPCameraStreamReceiver::clientSetupSubsession(Medium* client, MediaSubsession* subsession,
			      Boolean streamUsingTCP) 
{
  if (client == NULL || subsession == NULL) return False;
  RTSPClient* rtspClient = (RTSPClient*)client;
  return rtspClient->setupMediaSubsession(*subsession,
					  False, streamUsingTCP);
}

Boolean CIPCameraStreamReceiver::clientStartPlayingSession(Medium* client,
				  MediaSession* session) 
{
  //extern double initialSeekTime, duration, scale;
  double endTime = initialSeekTime;
  if (scale > 0) 
  {
    if (duration <= 0) endTime = -1.0f;
    else endTime = initialSeekTime + duration;
  } 
  else
  {
    endTime = initialSeekTime - duration;
    if (endTime < 0) endTime = 0.0f;
  }

  if (client == NULL || session == NULL) return False;
  RTSPClient* rtspClient = (RTSPClient*)client;
  return rtspClient->playMediaSession(*session, initialSeekTime, endTime, (float)scale);
}

Boolean CIPCameraStreamReceiver::clientTearDownSession(Medium* client,
			      MediaSession* session) 
{
  if (client == NULL || session == NULL) return False;
  RTSPClient* rtspClient = (RTSPClient*)client;

  MediaSubsessionIterator iter(*(session));
  MediaSubsession* subsession;
  while ((subsession = iter.next()) != NULL) 
  {
	  rtspClient->teardownMediaSubsession(*subsession);
  }

  return rtspClient->teardownMediaSession(*session);
}
