
#include "liveMedia.hh"
#include "GroupsockHelper.hh"

#include "BasicUsageEnvironment.hh"

Boolean const isSSM = False;

UsageEnvironment* env;

// A structure to hold the state of the current session.
// It is used in the "afterPlaying()" function to clean up the session.
struct sessionState_t {
	FramedSource* source;
	RTPSink* sink;
	RTCPInstance* rtcpInstance;
	Groupsock* rtpGroupsock;
	Groupsock* rtcpGroupsock;
} sessionState;

char const* inputFileName = "f:\\tmp\\test.mp3";
char fStopFlag;

void play(); // forward
//argv dstip:dstport:filename:framesize:
int main(int argc, char** argv) {
	fStopFlag = 0;
	char* ip = NULL;
	int port = 0;
	char* filename = NULL;
	int framesize = 0;
	if (argc > 1)ip = argv[1];
	if (argc > 2)port = atoi(argv[2]);
	if (argc > 3)filename = argv[3];
	if (argc > 4)framesize = atoi(argv[4]);
	// Begin by setting up our usage environment:
	TaskScheduler* scheduler = BasicTaskScheduler::createNew();
	env = BasicUsageEnvironment::createNew(*scheduler);

	// Create 'groupsocks' for RTP and RTCP:
	char const* destinationAddressStr = ip;
		/*
#ifdef USE_SSM
		= "232.255.42.42";
#else
		//= "239.255.42.42";
		//= "224.100.1.99";
		//= "192.168.3.3";
		//= "192.168.0.255";
	// Note: This is a multicast address.  If you wish to stream using
	// unicast instead, then replace this string with the unicast address
	// of the (single) destination.  (You may also need to make a similar
	// change to the receiver program.)
#endif
		*/
	const unsigned short rtpPortNum = port;
	const unsigned short rtcpPortNum = rtpPortNum + 1;
	const unsigned char ttl = 63; // low, in case routers don't admin scope

	struct in_addr destinationAddress;
	destinationAddress.s_addr = our_inet_addr(destinationAddressStr);
	const Port rtpPort(rtpPortNum);
	const Port rtcpPort(rtcpPortNum);

	sessionState.rtpGroupsock = new Groupsock(*env, destinationAddress, rtpPort, ttl);
	sessionState.rtcpGroupsock = new Groupsock(*env, destinationAddress, rtcpPort, ttl);

	//sessionState.sink = MPEG1or2AudioRTPSink::createNew(*env, sessionState.rtpGroupsock);
	MPEG1or2AudioRTPSink* sink = MPEG1or2AudioRTPSink::createNew(*env, sessionState.rtpGroupsock);
	sink->setPacketSizes(framesize, 1460);
	sessionState.sink = sink;

	//sessionState.se
	// Create (and start) a 'RTCP instance' for this RTP sink:
	
	const unsigned estimatedSessionBandwidth = 160; // in kbps; for RTCP b/w share
	const unsigned maxCNAMElen = 100;
	unsigned char CNAME[maxCNAMElen + 1];
	gethostname((char*)CNAME, maxCNAMElen);
	CNAME[maxCNAMElen] = '\0'; // just in case
	sessionState.rtcpInstance
		= RTCPInstance::createNew(*env, sessionState.rtcpGroupsock,
			estimatedSessionBandwidth, CNAME,
			sessionState.sink, NULL,
			isSSM);
	
	// Note: This starts RTCP running automatically
	play();

	env->taskScheduler().doEventLoop(&fStopFlag);
	return 0;
}

void afterPlaying(void* clientData); // forward

void play() {
	// Open the file as a 'MP3 file source':
	sessionState.source = MP3FileSource::createNew(*env, inputFileName);
	if (sessionState.source == NULL) {
		*env << "Unable to open file \"" << inputFileName
			<< "\" as a MP3 file source\n";
		exit(1);
	}

	// Finally, start the streaming:
	* env << "Beginning streaming...\n";
	sessionState.sink->startPlaying(*sessionState.source, afterPlaying, NULL);
}


void afterPlaying(void* /*clientData*/) {
	*env << "...done streaming\n";

	sessionState.sink->stopPlaying();

	// End this loop by closing the current source:
	Medium::close(sessionState.source);

	fStopFlag = 1;
	// And start another loop:
	//play();
}
