// client_android.c

#include "client_android.h"

// Create a global packet queue
static PacketQueue pktQueue;

// Create a global server handling
static video_client_t cltVideo;

// Creating mutex and cond
pthread_mutex_t send_mutex;
pthread_cond_t send_cond;

// Main function. arg1 - server ip. arg2 - server port
int main(int argc, char *argv[])
{
        // Check input from user (need to be ./client SERVER_IP PORT)
        if (argc < 3)
        {
                fprintf(stderr,"Usage: ./client SERVER_IP SERVER_PORT \n");
                exit (2);
        }

	// Creating new server struct
	server_info_t server_info;

	// Initialize
	server_info.server_ip=argv[1];
	server_info.server_port=argv[2];

	// Creating new thread
	pthread_t reader;
	if (pthread_create(&reader, NULL, (void*)read_camera, &server_info) == -1)
	{
		fprintf(stderr, "Unable to create thread: reader. \n");
		perror("pthread_create");
	}

	// Stoping..
	fprintf(stdout,"Press any key to stop client..\n");
	getchar();

	exit(0);
}

// Allocating new frame
void alloc_yuv_frame(SDL_Overlay *bmp, AVPicture *pict) 
{
	pict->data[0] = bmp->pixels[0];
	pict->data[1] = bmp->pixels[2];
        pict->data[2] = bmp->pixels[1];

        pict->linesize[0] = bmp->pitches[0];
        pict->linesize[1] = bmp->pitches[2];
        pict->linesize[2] = bmp->pitches[1];

}

// Reading from camera
int *read_camera (server_info_t *server_info)
{
	// Consts
	const char *DeviceName="/dev/video0"; // This is the camera device for linux. may differ on android.

	// Variables
	// Server variables
	int nCounter;

	// ffmpeg variables
	AVFormatContext *pFormatCtx;
	AVOutputFormat	*pOutFormat;
 	int             i, videoStream;
 	AVCodecContext  *pCodecCtx, *newCodecCtx;
 	AVCodec         *pCodec, *newCodec;
 	AVFrame         *pFrame, *picture; 
	AVPacket        packet, sendPacket;
	int             frameFinished;
	float           aspect_ratio;
	AVInputFormat *iformat;
	uint8_t *outbuf;
	int	outbuf_size = 1000000;
	static struct SwsContext *img_convert_ctx;
	long		frame_number = 0;

	SDL_Overlay     *bmp;
	SDL_Overlay	*remote_bmp;
	SDL_Surface     *screen;
	SDL_Rect        rect;

	outbuf = malloc(outbuf_size);

	// Initialize the global packet queue
	packet_queue_init(&pktQueue);

	// Initialize mutex and cond
	pthread_mutex_init(&send_mutex, NULL);
	pthread_cond_init(&send_cond, NULL);

	// Generating client struct
	video_client(&cltVideo, server_info->server_ip, server_info->server_port);

	// Creating thread
	pthread_t sender;
	if (pthread_create(&sender, NULL, send_video_frame, NULL) == -1)
	{
		fprintf(stderr, "Unable to create thread: sender. \n");
		perror("pthread_create");
	}

	// Register all formats and codecs
	av_register_all();
  
	// Registering all avdevices (for camera)
	avdevice_register_all();

	// Getting the format. Avilable formats via `ffmpeg -formats` on CLI.
	iformat = av_find_input_format("video4linux2");

	// Open the camera device. From now on, using camera like a movie.
	if (av_open_input_file(&pFormatCtx, DeviceName,  iformat, 0, NULL) !=0)
	{
		printf("Could not open camera!\n");
		exit(5) ; // Could not open camera
	}

	// Creating the output format
	pOutFormat = av_guess_format("mpeg", NULL, NULL);
	pFormatCtx->oformat = pOutFormat;

	// Retrieve stream information
	if(av_find_stream_info(pFormatCtx)<0)
	exit(-1); // Couldn't find stream information
  
	// Find the first video stream
	videoStream=-1;
	for(i=0; i<pFormatCtx->nb_streams; i++)
	if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
	{
		videoStream=i;
		break;
	}

	// Checking for stream not found.
	if(videoStream==-1)
	{
		fprintf(stderr,"Could not find a video stream. \n");
		exit(6);
	}
  
	// Get a pointer to the codec context for the video stream
	pCodecCtx=pFormatCtx->streams[videoStream]->codec;
  
	// Find the decoder for the video stream
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
	if(pCodec==NULL) 
	{
		fprintf(stderr, "Unsupported codec!\n");
		exit(7);
	}
	
	// Open codec
	if(avcodec_open(pCodecCtx, pCodec)<0)
	{
		fprintf(stderr,"Could not open codec! \n");
		exit(8);
	}

	// Allocate video frame
        pFrame=avcodec_alloc_frame();

	// Finding encoder
        newCodec = avcodec_find_encoder(CODEC_ID_RAWVIDEO);

        if (!newCodec) {
                fprintf(stderr, "couldn't find codec\n");
                exit(1);
        }

	// Allocating new codec context
	newCodecCtx = avcodec_alloc_context();

	// Generate the sending codec
        avcodec_get_context_defaults2(newCodecCtx, AVMEDIA_TYPE_VIDEO);
        newCodecCtx->codec_id = CODEC_ID_RAWVIDEO;
        newCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
        newCodecCtx->strict_std_compliance = 0;
        newCodecCtx->time_base.den = 15;
        newCodecCtx->time_base.num = 1;
        newCodecCtx->gop_size = 12;
        newCodecCtx->pix_fmt = PIX_FMT_YUV420P;
        newCodecCtx->width = 320;
        newCodecCtx->height = 240;

        if (avcodec_open(newCodecCtx, newCodec) < 0) {
                fprintf(stderr, "couldn't open codec\n");
                exit(1);
        } 
	
	// Make a screen to put our video
	#ifndef __DARWIN__
        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
	#else
        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
	#endif
	if(!screen) {
		fprintf(stderr, "SDL: could not set video mode - exiting\n");
		exit(1);
	}

	SDL_WM_SetCaption("Awesome remote video client!", NULL);

	// Allocate a place to put our YUV image on that screen
	bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
                                 pCodecCtx->height,
                                 SDL_YV12_OVERLAY,
                                 screen);

	remote_bmp = SDL_CreateYUVOverlay(newCodecCtx->width, newCodecCtx->height, SDL_YV12_OVERLAY, screen);

	// Main loop. Reading from camera.		
	while(av_read_frame(pFormatCtx, &packet)>=0) 
	{
		// Checking if we got a video stream
		if (packet.stream_index != videoStream)
			continue;
		
		// Decode video frame
                avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
		
		// Did we get a complete frame?
		if (frameFinished)
		{
			// Creating two pictures. one to display, and one to send.
			AVPicture local_pict;
			AVPicture remote_pict;
			
			// Allocating a frame
			alloc_yuv_frame(bmp, &local_pict);
			alloc_yuv_frame(remote_bmp, &remote_pict);

			int w = pCodecCtx->width;
			int h = pCodecCtx->height;
			
			// Rescale local picture
			img_convert_ctx = sws_getContext(w, h,
                                                pCodecCtx->pix_fmt,
                                                w, h, PIX_FMT_YUV420P, SWS_BILINEAR,
                                                0, 0, 0);
			
			sws_scale(img_convert_ctx, (const uint8_t **)pFrame->data, pFrame->linesize, 0,
                                                  h, local_pict.data, local_pict.linesize);
			
			sws_freeContext(img_convert_ctx);

			// Show image locally
			SDL_UnlockYUVOverlay(bmp);
			rect.x = 0;
			rect.y = 0;
			rect.w = pCodecCtx->width;
			rect.h = pCodecCtx->height;
			SDL_DisplayYUVOverlay(bmp, &rect);

			// Drop every four frame
			if (frame_number % 4) 
			{
				// Convert the remote pict
				img_convert_ctx = sws_getContext(w, h,
                                                pCodecCtx->pix_fmt,
                                                320, 240, PIX_FMT_YUV420P, SWS_BILINEAR,
                                                0, 0, 0);

	                        sws_scale(img_convert_ctx, (const uint8_t **)pFrame->data, pFrame->linesize, 0,
                                                  320, remote_pict.data, remote_pict.linesize);

        	                sws_freeContext(img_convert_ctx);

				// Encode frame
                	        int out_size = avcodec_encode_video(newCodecCtx, outbuf, outbuf_size, (AVFrame*) &remote_pict);

				// Generating a new AVPacket	
				AVPacket pkt;
		                av_init_packet(&pkt);
	 			pkt.pts = packet.pts;
				pkt.dts = packet.dts;
				pkt.duration = packet.duration;
				pkt.pos = packet.pos;
				pkt.convergence_duration = packet.convergence_duration;
				pkt.flags |= AV_PKT_FLAG_KEY;
				pkt.stream_index= 0;
				pkt.data= outbuf;
				pkt.size= out_size;

				if (packet_queue_put(&pktQueue, &pkt) == -1)
	               		{
        	                	fprintf(stderr,"Unable to put packet on queue");
               	        		perror("packet_queue_put");
                		}

				// Added packet to queue, notify thread.
 		                pthread_cond_signal(&send_cond);
	
				av_free_packet(&pkt);

				// Zeroing the counter
				frame_number = 0;
			}

			// Inc counter
			frame_number ++;
		}
	
		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);

	}

	// Close the connection
	close_video_client(&cltVideo);

	//Free the YUV frame
	av_free(pFrame);
  
	// Close the codec
	avcodec_close(pCodecCtx);
  
	// Close the video file
	av_close_input_file(pFormatCtx);
  
	// Destroy mutex and cond
	pthread_mutex_destroy(&send_mutex);
	pthread_cond_destroy(&send_cond);	

	exit (0);
}

// Send frames to packet
void *send_video_frame()
{

	while(1 < 2)
	{
		// Locking untill a packet is in queue
		pthread_cond_wait(&send_cond, &send_mutex);

		AVPacket pkt;

		// Getting packet from queue (blocking)
		if (packet_queue_get(&pktQueue, &pkt, 1) < 0)
		{
			fprintf(stderr,"Could not read packet from queue");
			perror("packet_queue_get");

			// Free the packet.. (to avoid ioctl issues)
			av_free_packet(&pkt);

			// Quit loop, try again.
			continue;
		}

		// Send to server. Serializing the AVPacket
		send_video_data(&cltVideo, (void*)&pkt.pts, sizeof(int64_t));
		send_video_data(&cltVideo, (void*)&pkt.dts, sizeof(int64_t));
		send_video_data(&cltVideo, (void*)&pkt.size, sizeof(int));
		send_video_data(&cltVideo, (void*)&pkt.data, pkt.size);
		send_video_data(&cltVideo, (void*)&pkt.stream_index, sizeof(int));
		send_video_data(&cltVideo, (void*)&pkt.flags, sizeof(int));
		send_video_data(&cltVideo, (void*)&pkt.duration, sizeof(int));
		send_video_data(&cltVideo, (void*)&pkt.pos, sizeof(int64_t));
		send_video_data(&cltVideo, (void*)&pkt.convergence_duration, sizeof(int64_t));

		// Free the avpacket
//		av_free_packet(&pkt);
	}
}

