#include "../../renderTree.h"

#include <iostream.h>
#include <math.h>
#include <cstdlib>
#include <QFile>
#include <unistd.h>
#include <signal.h>

MovieRendererDelegate::MovieRendererDelegate() {
	// Register all formats and codecs
	av_register_all();
}

MovieRendererDelegate::~MovieRendererDelegate() {

}

void MovieRendererDelegate::render(QGLWidget *gl,Renderable *o,AbstractRenderTree *r) {
	MovieRenderer *renderer = rendererMap[o];
	if (renderer==0) {
		printf("Initialize new\n");
		renderer = new MovieRenderer();
		rendererMap[o]=renderer;
	}
	renderer->render(gl,o,r);
}

MovieRenderer::MovieRenderer() {
	lastTime=0;
	inited=false;
	lastFileNum=-1;
	dstbuffer=0;
	lastSize=0;
	lastFileName="";


	reset=false;
}

MovieRenderer::~MovieRenderer() {
	// Free the RGB image
	delete [] buffer;
	delete [] alpha;
	av_free(pFrameRGB);

	// Free the YUV frame
	av_free(pFrame);

	// Close the codec
	avcodec_close(pCodecCtx);

	// Close the video file
	av_close_input_file(pFormatCtx);
}

int MovieRenderer::load(std::string filename,int start,int playMode) {

	printf("load\n");
	if (inited) {
		delete [] buffer;
		delete [] alpha;
		av_free(pFrameRGB);

		// Free the YUV frame
		av_free(pFrame);

		// Close the codec
		avcodec_close(pCodecCtx);

		// Close the video file
		av_close_input_file(pFormatCtx);
	}
	else {
		inited=true;
	}

	if(av_open_input_file(&pFormatCtx, filename.data(), NULL, 0, NULL)!=0){
		printf("error\n");
		return -1;
	}

	// Retrieve stream information
	if(av_find_stream_info(pFormatCtx)<0){
		printf("error\n");
		return -1;
	}

	// Dump information about file onto standard error
	dump_format(pFormatCtx, 0, filename.data(), false);

	// Find the first video stream
	videoStream=-1;
	for(i=0; i<pFormatCtx->nb_streams; i++)
		//corrected by hacklm
		//if(pFormatCtx->streams[i]->codec.codec_type==CODEC_TYPE_VIDEO)
		if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
		{
			videoStream=i;
			break;
		}
	if(videoStream==-1) {
		printf("error\n");
		return -1;
	}
	int duration = (int)pFormatCtx->streams[i]->duration;

	if (duration<1)
		duration=200000;

	// Get a pointer to the codec context for the video stream
	//corrected by hacklm
	//pCodecCtx=&pFormatCtx->streams[videoStream]->codec;
	pCodecCtx=pFormatCtx->streams[videoStream]->codec;



	// Find the decoder for the video stream
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
	if(pCodec==NULL) {
		printf("error\n");
		return -1;
	}



	// Inform the codec that we can handle truncated bitstreams -- i.e.,
	// bitstreams where frame boundaries can fall in the middle of packets
	if(pCodec->capabilities & CODEC_CAP_TRUNCATED)
		pCodecCtx->flags|=CODEC_FLAG_TRUNCATED;

	// Open codec
	if(avcodec_open(pCodecCtx, pCodec)<0) {
		printf("error\n");
		return -1;
	}

	// Hack to correct wrong frame rates that seem to be generated by some
	// codecs


	//hacklm commented out due to missing parameters
	//    if(pCodecCtx->frame_rate>1000 && pCodecCtx->frame_rate_base==1)
	//        pCodecCtx->frame_rate_base=1000;

	// Allocate video frame
	pFrame=avcodec_alloc_frame();
	dst = avcodec_alloc_frame();
	// Allocate an AVFrame structure
	pFrameRGB=avcodec_alloc_frame();
	if(pFrameRGB==NULL) {
		printf("error\n");
		return -1;
	}

	// Determine required buffer size and allocate buffer
	numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
			pCodecCtx->height);
	printf("numbytes: %d\n",numBytes);
	buffer=new uint8_t[numBytes];
	//	dstbuffer=new uint8_t[numBytes];

	alpha = new uint8_t[pCodecCtx->width*pCodecCtx->height*4];

	// Assign appropriate parts of buffer to image planes in pFrameRGB
	avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
			pCodecCtx->width, pCodecCtx->height);
	//	avpicture_fill((AVPicture *)dst, dstbuffer, PIX_FMT_RGB24,
	//			pCodecCtx->width, pCodecCtx->height);

	int seek =av_seek_frame(pFormatCtx, 0, start, 0);
	printf("seek %d\n",seek);
	counter=0;
	return duration;
}

void MovieRenderer::render(QGLWidget *gl,Renderable *o,AbstractRenderTree *r) {
	Texture *parent = ((Texture*)o);
	Texture *t = parent->tmp->get();


	if (t->playMode->get()==3)  {
		printf("num %d\n",(int)t->current->get());
		if (t->current->get()>500) {
			printf("back\n");
			reset=true;
			t->current->set(-1);
			//			sleep(10);
		}
		else if (t->current->get()>400 && reset) {
			printf("reset\n");
			reset=false;

			if (pid>0) {
				kill(pid,SIGINT);
				kill(pid+1,SIGINT);
			}
			int pid2;
			switch (pid2=fork()) {
						case 0:
							char *c[] = {"killall","mencoder","/usr/bin",NULL};
							execv("/usr/bin/killall",c);
							break;
						}
			switch (pid2=fork()) {
			case 0:
				char *c[] = {"killall","mencoder.sh","/usr/bin",NULL};
				execv("/usr/bin/killall",c);
				break;
			}

			printf("killed\n");
		}

		switch (pid=fork()) {
		case 0:
			char *c[] = {"sh","/video/mencoder.sh","/usr/bin",NULL};
			execv("/bin/sh",c);
			break;
		}
	}






	if (t->playMode->get()==1) {
		lastFileNum=-1;
	}

	if (t->playMode->get()==2) {

		lastFileName="";
		if (t->current->get()==-1) {
			t->current->set(0);
		}
		int fileNum = ((t->current->get())/25)+1;

		if (lastFileNum==-1 || lastFileNum!=fileNum) {
			lastFileNum = fileNum;
			lastCurrent=0;
			std::ostringstream os;
			os << t->filebase->get() << "cam.counter";
			std::string counterName = os.str();
			QFile file(counterName.data());
			if (!file.open(QIODevice::ReadOnly | QIODevice::Text))
				return;
			if (!file.atEnd()) {
				QByteArray line = file.readLine();
				QString content = QString(line);
				int num = content.toInt()-1;
				if (num==-1) {
					num=0;
				}

				if (t->camToTime->get()==1) {
					lastCurrent=((num-1)*25)+1;
//					printf("fileNum %d %d\n",fileNum,num);
					if (fileNum > num) {

						fileNum = num;
						t->current->set(lastCurrent+1);
						//						t->frameIncrement->set(t->frameIncrement->get()*1.0);
					}
					else if (fileNum < num) {
						if (fileNum < num) {
							t->current->set(lastCurrent+1);
							fileNum=num;
						}
						//						else {
						//							t->frameIncrement->set(t->frameIncrement->get()*1.0);
						//						}
					}
				}
				else if (fileNum+1 > num) {
					printf("already reached end %d\n",fileNum);
					fileNum = num-1;
				}
			}

			std::ostringstream thumbFile;
			thumbFile << t->filebase->get() << "cam-" << fileNum << ".avi";
			printf(thumbFile.str().data());
			printf(" -cam\n");
			t->filename->set(thumbFile.str());
			int duration = load(thumbFile.str(),0,t->playMode->get());
			while (duration ==-1) {
				duration = load(thumbFile.str(),0,t->playMode->get());
			}
			t->length->set(duration);
			printf("cam duration %d\n",duration);
		}
	}


	else if (t->current->get()==-1 ) {
		//		if (pid>0) {
		//			kill(pid,SIGINT);
		//			kill(pid+1,SIGINT);
		//		}


		lastFileName = t->filebase->get();
		int duration = load(t->filebase->get(),t->start->get(),t->playMode->get());
		t->current->set(t->start->get());
		t->length->set(duration);
		printf("duration %d\n",duration);

	}

	else if ( t->filebase->get()!=lastFileName) {
		lastFileName = t->filebase->get();
		int duration = load(t->filebase->get(),t->start->get(),t->playMode->get());
		t->current->set(t->start->get());
		t->length->set(duration);
		printf("duration %d\n",duration);
	}
	counter++;


	if (lastTime==0) {
		lastTime = TimeHelper::currentTimeMillis();
	}

	long tempTime = TimeHelper::currentTimeMillis();

	int elapsedTime = (int)(tempTime-lastTime);
	//			printf("time: %d\n",elapsedTime);
	if (elapsedTime==0) {
		elapsedTime=1;
	}


	int frameTime = 1000/t->fps->get();
	int frameDiff =0;
	if (t->startStop->get()==1) {
		int current = (int)t->current->get();
		if (elapsedTime>frameTime) {
			float timeIncrement = ((float)elapsedTime)/((float)frameTime);

			float increment = timeIncrement*t->frameIncrement->get();

//			printf("time: %d frameTime:%d timeIncrement:%f increment:%f\n",elapsedTime,frameTime,timeIncrement,increment);

			t->current->set(t->current->get()+increment);
//			printf("%d newCurrent: %f\n",current,t->current->get());
			lastTime=tempTime;
		}
		frameDiff =  (int)t->current->get()-current;
	}
	else if (t->current->get()!=lastCurrent) {
		frameDiff = 2;
	}




	if (frameDiff==0) {

	}
	else if (t->playMode->get()==1 && ((t->stop->get()==-1 && t->current->get()>t->length->get())||(t->stop->get()>-1 && t->current->get()>t->stop->get()))) {
		printf("end Reached 2\n");
		if (t->playMode->get()==1) {
			t->current->set(t->start->get());
		}
		//		else {
		//			t->current->set(-1);
		//		}
	}
	else if(GetNextFrame(pFormatCtx, pCodecCtx, videoStream, pFrame,t,frameDiff))
	{

		img_convert((AVPicture *)pFrameRGB, PIX_FMT_RGB24, (AVPicture*)pFrame,
				pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);

		if(pFrame->interlaced_frame && false) {
			printf("colorspace: %d\n",PIX_FMT_RGB24);
			avpicture_deinterlace((AVPicture *)dst, (AVPicture *)pFrameRGB, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
			printf("converted\n");
			avpicture_deinterlace( (AVPicture *)pFrameRGB,(AVPicture *)dst, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
			//					pFrameRGB=dst;
		}



	}

	//			            SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i);
	int size =pCodecCtx->width*pCodecCtx->height;
	//	printf("size : %d \n",size);
	uint8_t *val = pFrameRGB->data[0];
	bool blackFrame=false;
	if (t->playMode->get()==2) {
		int i;
		bool nonBlack = false;
		for (i=0;i<10000;i++) {
			if (val[i]>0) {
				nonBlack=true;
				break;
			}
		}
		if (!nonBlack) {
			blackFrame=true;
		}
	}

	int opacity=0;
	if (t->opacity->get()>0) {
		opacity = t->opacity->get()*2;
		if (opacity>254)
			opacity=254;
		opacity=254-opacity;



		int i,r,g,b,m,index;
		for (i=0;i<size*4;i++) {
			m=i%4;
			if (m==3) {
				//				if (r<40)
				//					alpha[i]=0;
				//				else
				//					alpha[i]=opacity;

				alpha[i]=opacity;
			}
			else{
				index=i-(i/4);
				if (m==0)
					r=val[index];
				else if (m==1)
					g=val[index];
				else
					b=val[index];
				alpha[i]=val[index];
			}
			//			printf("%d",alpha[i]);
		}
		//					        printf("\n");
	}
	if (!blackFrame) {

		glDeleteTextures(1,&texture);
		glGenTextures( 1, &texture );
	}
	glBindTexture( GL_TEXTURE_2D, texture );
	if (!blackFrame) {
		if (t->opacity->get()>0) {
			glEnable(GL_BLEND);
			//		glColor4b(255,255,255,opacity/2);
			glDisable(GL_DEPTH_TEST);
			glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

			glTexEnvf(GL_TEXTURE_ENV,GL_TEXTURE_ENV_MODE,GL_REPLACE);
			glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA8, pCodecCtx->width, pCodecCtx->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, alpha );

			// Turn Blending On
		}
		else {

			glDisable(GL_BLEND);

			//			glTexEnvf(GL_TEXTURE_ENV,GL_TEXTURE_ENV_MODE,GL_DECAL);

			glTexImage2D( GL_TEXTURE_2D, 0, GL_RGB, pCodecCtx->width, pCodecCtx->height, 0, GL_RGB, GL_UNSIGNED_BYTE, val );

		}

		glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
		glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
	}




}

bool MovieRenderer::GetNextFrame(AVFormatContext *pFormatCtx, AVCodecContext *pCodecCtx,
		int videoStream, AVFrame *pFrame,Texture *t,int frameDiff) {


	static AVPacket packet;
	static int      bytesRemaining=0;
	static uint8_t  *rawData;
	bool     fFirstTime=true;
	int             bytesDecoded;
	int             frameFinished;

	// First time we're called, set packet.data to NULL to indicate it
	// doesn't have to be freed
	if(fFirstTime)
	{
		fFirstTime=false;
		packet.data=NULL;
	}



	//		    if (texture->frameIncrement->get()>1) {
	//	if (frameDiff!=1 && t->playMode->get()<3) {
	printf("frameDiff: %d\n",frameDiff);
	if (frameDiff!=1 || t->playMode->get()==2) {
		int current = (int)t->current->get();
		//TODO only used because of encoding bug
		if (t->playMode->get()==2){
			current = current % 25;
			current=current*2;
//			if (current > lastCurrent+40)
//				lastCurrent+=2;
//			else if (current<lastCurrent)
//				current=48;

			lastCurrent=current*12.5;
			if (current>46)
				current=46;
			printf("current %d %d\n",current,(int)t->current->get());
		}
		//		if (current%2==1)
		//			current++;
		av_seek_frame(pFormatCtx, 0,current, 0);
	}
	//		    	printf("frameincrement\n");
	//		    }

	//av_seek_frame(pFormatCtx, 0, time, 0);;


	// Decode packets until we have decoded a complete frame
	while(true)
	{
		//		printf("entered decoding loop %d\n",bytesRemaining);
		// Work on the current packet until we have decoded all of it
		while(bytesRemaining > 0)
		{
			// Decode the next chunk of data
			bytesDecoded=avcodec_decode_video(pCodecCtx, pFrame,
					&frameFinished, rawData, bytesRemaining);

			// Was there an error?
			if(bytesDecoded < 0)
			{
				fprintf(stderr, "Error while decoding frame\n");
				return false;
			}

			bytesRemaining-=bytesDecoded;
			rawData+=bytesDecoded;

			// Did we finish the current frame? Then we can return
			if(frameFinished)
				return true;
		}

		// Read the next packet, skipping all packets that aren't for this
		// stream
		do
		{
			// Free old packet
			if(packet.data!=NULL)
				av_free_packet(&packet);

			// Read new packet
			int read = av_read_frame(pFormatCtx, &packet);
			//			printf("next frame %d %d\n",(int)t->current->get(),read);
			//			printf("read: %d\n",read);
			if(read<0) {

				goto loop_exit;
			}
		} while(packet.stream_index!=videoStream);

		bytesRemaining=packet.size;
		rawData=packet.data;
	}

	loop_exit:

	// Decode the rest of the last frame
	bytesDecoded=avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
			rawData, bytesRemaining);

	// Free last packet
	if(packet.data!=NULL)
		av_free_packet(&packet);

	return frameFinished!=0;
}

