/**
 * $Id: av.cpp 30 2010-08-03 12:32:42Z bearice $
 * Copyright (C) 2010 Bearice
**/
#include "stdafx.h"
#include "av.h"
#include "Drawing.h"

static HWND hWindow;
static int16_t* audioBuffer;

HRESULT AV_Init(HWND hWnd){
	//av_log_set_callback(av_log_callback);
	av_register_all();
	audioBuffer=(int16_t*)av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE);
	hWindow=hWnd;
	return S_OK;
}

DWORD WINAPI AudioThread(LPORANGE o){
	MSG msg;
	LPWAVEHDR wave;
	MMRESULT mmret;
	while(TRUE){
		GetMessage(&msg,(HWND)-1,NULL,NULL);
		switch(msg.message){
		case WM_USER+1:
			wave = (LPWAVEHDR)msg.lParam;		
			mmret = waveOutPrepareHeader(o->waveOut,wave,sizeof(WAVEHDR));
			if(MMSYSERR_NOERROR!=mmret){
				OutputDebugString(_T("waveOutPrepareHeader() failed\n"));
				free(wave->lpData);
				free(wave);
				break;
			}

			mmret = waveOutWrite(o->waveOut,wave,sizeof(WAVEHDR));
			if(MMSYSERR_NOERROR!=mmret){
				waveOutUnprepareHeader(o->waveOut,wave,sizeof(WAVEHDR));
				OutputDebugString(_T("waveOutWrite() failed\n"));
				free(wave->lpData);
				free(wave);
				break;
			}
			/*
			TCHAR debug_buf[128];
			_stprintf(debug_buf,_T("AudioThread: %d bytes to device\n",wave->dwBufferLength));
			OutputDebugString(debug_buf);
			*/
			break;
		case MM_WOM_OPEN:
			OutputDebugString(_T("WOM_OPEN\n"));
			PostMessage(hWindow,WM_TIMER,NULL,NULL);
			break;
		case MM_WOM_DONE :
			//OutputDebugString(L"WOM_DONE\n");
			wave = (LPWAVEHDR)msg.lParam;
			waveOutUnprepareHeader(o->waveOut,wave,sizeof(WAVEHDR));
			free(wave->lpData);
			free(wave);
			WaitForSingleObject(o->hAudioLock,1000);
			if(--o->waveBufferAllocated<20){
				SetEvent(o->hAudioSignal);
			}
			ReleaseMutex(o->hAudioLock);
			break;
		case MM_WOM_CLOSE:
			OutputDebugString(_T("WOM_CLOSE\n"));
			return 0;
		}
	}
}


void DecodeAudio(LPORANGE o,AVPacket* packet){
	int bytes_used=0,buflen;
	LPWAVEHDR wave=NULL;
	while(1){
		buflen = AVCODEC_MAX_AUDIO_FRAME_SIZE;
		bytes_used = avcodec_decode_audio2(o->aCodecCtx,audioBuffer,&buflen,packet->data+bytes_used,packet->size-bytes_used);
		if(bytes_used<=0){
			return;
		}
		
		wave = (LPWAVEHDR)malloc(sizeof(WAVEHDR));
		memset(wave,0,sizeof(WAVEHDR));
		wave->dwBufferLength = buflen;
		wave->lpData = (char*)malloc(buflen);
		memcpy(wave->lpData,audioBuffer,buflen);
		WaitForSingleObject(o->hAudioLock,1000);
		o->waveBufferAllocated++;
		ReleaseMutex(o->hAudioLock);
		PostThreadMessage(o->aThread,WM_USER+1,NULL,(LPARAM)wave);
		
	}
	return;
}

HRESULT AV_NextFrame(LPORANGE o,AVFrame* targetFrame){
	int frameFinished;
	AVPacket packet;
	int i=0;
	begin:
	while(av_read_frame(o->pFormatCtx, &packet)>=0) {
		// Is this a packet from the video stream?
		if(packet.stream_index==o->videoStream) {
			// Decode video frame
			avcodec_decode_video2(o->vCodecCtx, o->pFrame, &frameFinished, &packet);
    
			// Did we get a video frame?
			if(frameFinished) {
			// Convert the image
				if(o->sws)
					sws_scale(o->sws,
						o->pFrame->data,o->pFrame->linesize,
						0,o->pFormatCtx->streams[i]->codec->height,
						targetFrame->data, targetFrame->linesize);
				//FIXED memory_leak
				av_free_packet(&packet);
				return S_OK;
			}
		}else if(packet.stream_index==o->audioStream){
			DecodeAudio(o,&packet);
		}
		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
	}
	av_seek_frame(o->pFormatCtx,o->videoStream,0,0);
	goto begin;
}

HRESULT AV_LoadOrange(HWND hWnd,LPORANGE o,TCHAR* file,int w,int h,enum PixelFormat fmt){
#ifdef UNICODE
	char _f[MAX_PATH];
	int _len=wcslen(file);
	WideCharToMultiByte(0,0,file,_len,_f,MAX_PATH,0,0);
	_f[_len]=0;
#else
	char _f = file;
#endif
	unsigned int i;
	o->pause=TRUE;
	o->hAudioLock = CreateMutex(NULL,FALSE,NULL);
	o->hAudioSignal = CreateEvent(NULL,FALSE,FALSE,NULL);
	o->hRenderSignal = CreateEvent(NULL,FALSE,FALSE,NULL);
	if(av_open_input_file(&o->pFormatCtx,_f, NULL, 0, NULL)!=0){
		REPORT_ERROR("Couldn't open file ");
		return E_FAIL; // Couldn't open file
	}

	o->audioStream=o->videoStream=-1;
	// Find the first video stream
	for(i=0; i<o->pFormatCtx->nb_streams; i++){
		if(o->pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO && o->videoStream < 0) {
			o->videoStream=i;
		}
		if(o->pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_AUDIO && o->audioStream < 0) {
			o->audioStream=i;
		}	
	}
	if(o->videoStream==-1 || o->audioStream==-1)
		return E_FAIL; // Didn't find a audio/video stream

	o->aCodecCtx=o->pFormatCtx->streams[o->audioStream]->codec;
	o->aCodec = avcodec_find_decoder(o->aCodecCtx->codec_id);
	if(!o->aCodec) {
		REPORT_ERROR("Unsupported codec!");
		return E_FAIL;
	}
	if(avcodec_open(o->aCodecCtx, o->aCodec)<0)return E_FAIL; // Could not open codec
	
	o->aFormat.nSamplesPerSec  = o->aCodecCtx->sample_rate;
	switch(o->aCodecCtx->sample_fmt){
		case SAMPLE_FMT_U8:
			o->aFormat.wBitsPerSample = 8;
			break;
		case SAMPLE_FMT_S16:
			o->aFormat.wBitsPerSample = 16;
			break;
		case SAMPLE_FMT_S32:
			o->aFormat.wBitsPerSample = 32;
			break;
		case SAMPLE_FMT_FLT:
			o->aFormat.wBitsPerSample = sizeof(double) * 8;
			break;
		default:
			o->aFormat.wBitsPerSample = 0;
			break;
	}
	o->aFormat.wFormatTag = WAVE_FORMAT_PCM;
	o->aFormat.cbSize = 0;
	o->aFormat.nChannels = o->aCodecCtx->channels;
	o->aFormat.nBlockAlign = (o->aFormat.wBitsPerSample * o->aFormat.nChannels) >> 3;
	o->aFormat.nAvgBytesPerSec = o->aFormat.nBlockAlign * o->aFormat.nSamplesPerSec;
	o->waveBufferAllocated=0;
	
	if(!(o->aThreadHandle=CreateThread(NULL,NULL,(LPTHREAD_START_ROUTINE)AudioThread,o,0,&o->aThread))){
		REPORT_ERROR("CreateAudioThread failed");
		return E_FAIL;
	}

	MMRESULT ret = waveOutOpen(&o->waveOut,WAVE_MAPPER,&o->aFormat,(DWORD_PTR)o->aThread,(DWORD_PTR)o,CALLBACK_THREAD);
	if(MMSYSERR_NOERROR!=ret){
		REPORT_ERROR("waveOutOpen failed (wave format unsupported?)");
		return E_FAIL;
	}

	o->vCodecCtx=o->pFormatCtx->streams[o->videoStream]->codec;
	float frame_time = 1.0f*o->pFormatCtx->streams[o->videoStream]->time_base.num/o->pFormatCtx->streams[o->videoStream]->time_base.den;
	o->fps=1/frame_time;

	o->pCodec=avcodec_find_decoder(o->pCodecCtx->codec_id);
	if(o->pCodec==NULL) {
		REPORT_ERROR("Unsupported codec!");
		return E_FAIL;
	}
	if(avcodec_open(o->pCodecCtx, o->pCodec)<0)return E_FAIL; // Could not open codec

	// video frame
	o->vFrame=avcodec_alloc_frame();

	w=w?w:o->pCodecCtx->width;
	h=h?h:o->pCodecCtx->height;
	
	o->pCodecCtx->pix_fmt==PIX_FMT_NONE?o->pCodecCtx->pix_fmt=PIX_FMT_YUV420P:0;
	fmt==PIX_FMT_NONE?fmt=o->pCodecCtx->pix_fmt:0;

	o->sws = sws_getContext(o->pCodecCtx->width, o->pCodecCtx->height,o->pCodecCtx->pix_fmt,w,h,fmt,SWS_FAST_BILINEAR,NULL,NULL,NULL);
	if(NULL==o->sws)return E_FAIL;
	return S_OK;
}

void AV_FreeOrange(LPORANGE o){
	if(o->sws)sws_freeContext(o->sws);
	if(o->pFrame)av_free(o->pFrame);
	if(o->waveOut)waveOutClose(o->waveOut);
	if(o->aThread){
		Sleep(5);
		TerminateThread(o->aThreadHandle,0);
	}
	if(o->pCodecCtx)avcodec_close(o->pCodecCtx);
	if(o->aCodecCtx)avcodec_close(o->aCodecCtx);
	if(o->pFormatCtx)av_close_input_file(o->pFormatCtx);	
}
