#include "stdafx.h"

#include <afxres.h>
#include "shlwapi.h"
//引用lib库，也可以在项目中设置，打开视频只需要用到这三个库
//#pragma comment(lib,"avformat.lib")
//#pragma comment(lib,"avutil.lib")
//#pragma comment(lib,"avcodec.lib")

#include <iostream>

#include "video.h"
#include "common.h"

#pragma comment(lib,"shlwapi.lib")

using namespace std;

int Open_codec_context(int * stream_idx, AVFormatContext * fmt_ctx, enum AVMediaType type)//在文件中找到流标号
{
	int ret;
	ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0);
	if (ret < 0)
	{
		return ret;
	} 
	else 
	{
		*stream_idx = ret;
	}
	return 0;
}

int ff_openfile_1(CString &filename)
{/*
	AVFormatContext *pFormatCtx; 

	avcodec_register_all();
	av_register_all();  
    avformat_network_init();

	pFormatCtx = avformat_alloc_context();
	
	char file[256];
	char buf[1024];
	_tcscpy(file, filename.GetBuffer(0));

	int err_code = avformat_open_input(&pFormatCtx, file, NULL, NULL);
	if(err_code){  
		av_strerror(err_code, buf, 1024);
        printf("Couldn't open input stream.\n");  
        return -1;  
    }  

    if(avformat_find_stream_info(pFormatCtx,NULL)<0){  
        printf("Couldn't find stream information.\n");  
        return -1;  
    } 

	return 0;


    //初始化所以ffmpeg的解码器
    av_register_all();
 */
    //用来存放打开的视频流信息
    AVFormatContext *ic = NULL;
     
    //用来存储视频流索引
    int videoStream = 0;
     
    //打开视频播放流
    //path参数表示打开的视频路径，这个路径可以包括各种视频文件
    //也包括rtsp和http网络视频流
    //第三个参数表示传入的视频格式，我这边不传递有FFmpeg内部获取
    //最后一个参数是设置，我们这里也不传递
	int re = avformat_open_input(&ic, filename.GetBuffer(0), 0, 0);
    if (re != 0)
    {
        //获取到FFmpeg的错误信息
        char errorbuf[1024] = {0};
        av_strerror(re, errorbuf, sizeof(errorbuf));
		MessageBox(0, _T("FFMPEG 打开文件失败 请确认文件类型和路径正确"), NULL, MB_OK );
        return -1;
    }

		//获取视频文件信息
	if (avformat_find_stream_info(ic, NULL) < 0)                                   
	{
		return -1;
	}

	AVPacket avpkt;
	av_init_packet(&avpkt);
	while (true) {
		int ret = av_read_frame(ic, &avpkt);
		if (ret < 0) {
			break;
		}

		if (avpkt.flags & AV_PKT_FLAG_KEY){
			int k = 0;
			k++;
		}
		// processing
	}

	AVFrame OutFrame;
 
	av_free_packet(&avpkt);

	int m_video_stream_idx;                  //视频在文件中的流标号
	double m_dbFrameRate;                    //视频帧率
	double m_video_duration;                 //视频单帧持续时间 秒（s）
	int m_dwWidth;                           //宽
	int m_dwHeight;                          //高

	if (Open_codec_context(&m_video_stream_idx, ic, AVMEDIA_TYPE_VIDEO) >= 0)
	{
		AVStream        *video_st = ic->streams[m_video_stream_idx];
		AVCodecContext * m_video_dec_ctx = video_st->codec;
		// Find the decoder for the video stream 查找解码器
		AVCodec * m_pvideo_codec = avcodec_find_decoder(m_video_dec_ctx->codec_id);
		if(m_pvideo_codec == NULL) 
		{
			//MessageBox(0, _T("Find video decoder失败 请重新加载"), NULL, MB_OK);
			return -1;
		}

		// Open codec 打开解码器
		if(avcodec_open(m_video_dec_ctx, m_pvideo_codec) < 0)
		{
			//MessageBox(0, _T("Could not open video codec 请重新加载"), NULL, MB_OK);
			return -1;
		}

		//视频的帧率（每秒显示的帧数）frames/s
		m_dbFrameRate = av_q2d(video_st->r_frame_rate);

		//这里说明是裸流
// 		if (m_dbFrameRate < 15 || m_dbFrameRate > 50)
// 		{
// 			//这种是MP3 有图片的
// 			//m_stream_type = 2;
// 			//m_dbFrameRate = 25.0;
// 		}

		//宽高,视频编码类型,视频yuv类型,spspps_buf,spspps_size
		m_dwWidth = m_video_dec_ctx->width;
		m_dwHeight = m_video_dec_ctx->height;
		AVCodecID m_video_codecID = m_video_dec_ctx->codec_id;
		AVPixelFormat m_video_pixelfromat = m_video_dec_ctx->pix_fmt;
		int m_spspps_size = m_video_dec_ctx->extradata_size;

		char m_spspps[1000];                      //spspps_buf
		if(m_spspps_size < 1000)
		{
			memcpy(m_spspps,m_video_dec_ctx->extradata,m_spspps_size);
		}

		//这里加一个判断是用于对不同的格式进行pts单位的统一化：统一到 “秒”,可扩展其它格式
		if (strstr(ic->iformat->name,"mpegts")!= NULL)
		{
			double time_base = 90 * 1000;
			m_video_duration = time_base / m_dbFrameRate /100000;
		}
		else
		{
			m_video_duration = 1 / m_dbFrameRate; 
		}

	}
     

	double m_file_duration = ic->duration / 1000.0 / 1000.0; //从纳秒转换成秒
	av_seek_frame(ic, m_video_stream_idx, 100, AVSEEK_FLAG_ANY);






    //遍历视频流，里面包含音频流，视频流，或者字母流，我们这里只处理视频
    for (int i = 0; i < ic->nb_streams; i++)
    {
        AVCodecContext *enc = ic->streams[i]->codec;
        //确认是视频流
        if (enc->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            //存放视频流索引，后面的代码要用到
            videoStream = i;
             
            //找到解码器，比如H264，解码器的信息也是ffmpeg内部获取的
            AVCodec *codec = avcodec_find_decoder(enc->codec_id);
            if (!codec)
            {
                printf("video code not find!\n");
                return -2;
            }
             
            //打开视频解码器，打开音频解码器用的也是同一个函数
            int err = avcodec_open2(enc, codec, NULL);
            if (err != 0)
            {
                char buf[1024] = { 0 };
                av_strerror(err, buf, sizeof(buf));
                printf(buf);
                return -3;
            }
             
             
        }
    }
}


bool picture2Image(AVCodecContext *CodecContex, AVFrame *Picture, int width, int height, CImage &image)
{
	AVPicture pPictureRGB;//RGB图片

	struct SwsContext *img_convert_ctx = sws_getContext(width, height,	CodecContex->pix_fmt, width, height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);

	// 确认所需缓冲区大小并且分配缓冲区空间
	avpicture_alloc(&pPictureRGB, PIX_FMT_RGB24, width, height);
	sws_scale(img_convert_ctx, Picture->data, Picture->linesize, 0, height, pPictureRGB.data, pPictureRGB.linesize);

	int lineBytes = pPictureRGB.linesize[0];

	BITMAPFILEHEADER btfileHeader;
	btfileHeader.bfType = MAKEWORD(66, 77); 
	btfileHeader.bfSize = lineBytes*height; 
	btfileHeader.bfReserved1 = 0; 
	btfileHeader.bfReserved2 = 0; 
	btfileHeader.bfOffBits = 54;

	BITMAPINFOHEADER bitmapinfoheader;
	bitmapinfoheader.biSize = 40; 
	bitmapinfoheader.biWidth = width; 
	bitmapinfoheader.biHeight = height; 
	bitmapinfoheader.biPlanes = 1; 
	bitmapinfoheader.biBitCount = 24;
	bitmapinfoheader.biCompression = BI_RGB; 
	bitmapinfoheader.biSizeImage = lineBytes*height; 
	bitmapinfoheader.biXPelsPerMeter = 0; 
	bitmapinfoheader.biYPelsPerMeter = 0; 
	bitmapinfoheader.biClrUsed = 0; 
	bitmapinfoheader.biClrImportant = 0;

	char *buf = (char*)malloc(sizeof(btfileHeader) + sizeof(bitmapinfoheader) + height*lineBytes);
	if(!buf){
		avpicture_free(&pPictureRGB);
		return false;
	}

	memcpy(buf, &btfileHeader, sizeof(btfileHeader));
	memcpy(buf+sizeof(btfileHeader), &bitmapinfoheader, sizeof(bitmapinfoheader));

	for(int i=height-1; i>=0; i--){
		memcpy(buf + sizeof(btfileHeader) + sizeof(bitmapinfoheader) + ((height-1) - i)*lineBytes, pPictureRGB.data[0]+i*lineBytes, lineBytes);
	}

	sws_freeContext(img_convert_ctx);
	avpicture_free(&pPictureRGB);
	return LoadMemImage(buf, sizeof(btfileHeader) + sizeof(bitmapinfoheader) + height*lineBytes, image);
}

void SaveBmp(AVCodecContext *CodecContex, AVFrame *Picture, int width, int height, int num)
{
	AVPicture pPictureRGB;//RGB图片

	static struct SwsContext *img_convert_ctx;
	img_convert_ctx = sws_getContext(width, height,	CodecContex->pix_fmt, width, height,PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);

	// 确认所需缓冲区大小并且分配缓冲区空间
	avpicture_alloc(&pPictureRGB, PIX_FMT_RGB24, width, height);
	sws_scale(img_convert_ctx, Picture->data, Picture->linesize, 0, height, pPictureRGB.data, pPictureRGB.linesize);

	int lineBytes = pPictureRGB.linesize[0], i=0;

	CString fileName;
	fileName.Format(TEXT("%d.bmp"), num);

	FILE *pDestFile = fopen(fileName.GetBuffer(0), "wb");
	BITMAPFILEHEADER btfileHeader;
	btfileHeader.bfType = MAKEWORD(66, 77); 
	btfileHeader.bfSize = lineBytes*height; 
	btfileHeader.bfReserved1 = 0; 
	btfileHeader.bfReserved2 = 0; 
	btfileHeader.bfOffBits = 54;

	BITMAPINFOHEADER bitmapinfoheader;
	bitmapinfoheader.biSize = 40; 
	bitmapinfoheader.biWidth = width; 
	bitmapinfoheader.biHeight = height; 
	bitmapinfoheader.biPlanes = 1; 
	bitmapinfoheader.biBitCount = 24;
	bitmapinfoheader.biCompression = BI_RGB; 
	bitmapinfoheader.biSizeImage = lineBytes*height; 
	bitmapinfoheader.biXPelsPerMeter = 0; 
	bitmapinfoheader.biYPelsPerMeter = 0; 
	bitmapinfoheader.biClrUsed = 0; 
	bitmapinfoheader.biClrImportant = 0;

	char *buf = (char*)malloc(sizeof(btfileHeader) + sizeof(bitmapinfoheader) + height*lineBytes);
	if(!buf){
		fclose(pDestFile);
		avpicture_free(&pPictureRGB);
		return;
	}

	memcpy(buf, &btfileHeader, sizeof(btfileHeader));
	memcpy(buf+sizeof(btfileHeader), &bitmapinfoheader, sizeof(bitmapinfoheader));

	for(i=height-1; i>=0; i--)
	{
		memcpy(buf + sizeof(btfileHeader) + sizeof(bitmapinfoheader) + ((height-1) - i)*lineBytes, pPictureRGB.data[0]+i*lineBytes, lineBytes);
	}

	fwrite(buf, sizeof(btfileHeader) + sizeof(bitmapinfoheader) + height*lineBytes, 1, pDestFile);
	fclose(pDestFile);
	avpicture_free(&pPictureRGB);

	/*
	CImage image;
	if(LoadMemImage(buf, sizeof(btfileHeader) + sizeof(bitmapinfoheader) + height*lineBytes, image)){
		TCHAR name[1024];
		sprintf(name, "E:\\led\\tt%d.bmp" , num);
		image.Save(name);
	}
	*/
}

CStreamState::CStreamState(CString &filename)
{
	m_filename = filename;
	m_pFormatCtx = 0;
	m_codecContext = 0;
	m_codec = 0;

	m_audio_st = 0;
	m_video_st = 0;
	memset(m_audio_buf, 0, sizeof(m_audio_buf));
	m_audio_buf_size = 0;
	m_audio_buf_index = 0;
	//m_audio_pkt;
	m_audio_pkt_data = 0;
	m_audio_pkt_size = 0;
	m_pictq_size = 0;
	m_pictq_rindex = 0;
	m_pictq_windex = 0;
	m_audio_clock = 0;
	m_video_clock = 0;
	m_seek_time = 0;
	m_seek_req = 0;
	m_seek_flags = 0;
	m_seek_pos = 0;
}

CStreamState::~CStreamState()
{
	if(m_codecContext)
		avcodec_close(m_codecContext); 
	if(m_pFormatCtx)
		av_free(m_pFormatCtx);
}

bool CStreamState::open()
{
	m_pFormatCtx = avformat_alloc_context();//NULL; 

	int ret;
	// 打开视频文件
	if((ret=avformat_open_input(&m_pFormatCtx, m_filename.GetBuffer(0), NULL, NULL))!=0){
		cout<<" can't open file "<<endl;
	    return false;
	}
	// 取出文件流信息
	if(avformat_find_stream_info(m_pFormatCtx, NULL)<0){ 
		cout<<" can't find suitable codec parameters"<<endl;
		return false;
	}

	//用于诊断 //产品中不可用
	//dump_format(m_pFormatCtx, 0, sourceFile, false);

	//仅仅处理视频流
	//只简单处理我们发现的第一个视频流
	//  寻找第一个视频流
	int videoIndex = -1;
	for(int i=0; i<m_pFormatCtx->nb_streams; i++) {
		if(m_pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
			videoIndex = i;
			break;
		}
	} 

	if(-1 == videoIndex){
		cout<<" can't find video stream !"<<endl;
		return false;
	}

	 // 得到视频流编码上下文的指针
	 m_codecContext = m_pFormatCtx->streams[videoIndex]->codec; 
	 //  寻找视频流的解码器
	 m_codec = avcodec_find_decoder(m_codecContext->codec_id);  

	 if(NULL == m_codec){
		 cout<<"can't decode "<<endl;
		 return false;
	 }

	 // 通知解码器我们能够处理截断的bit流，bit流帧边界可以在包中
	 //视频流中的数据是被分割放入包中的。因为每个视频帧的数据的大小是可变的，
	 //那么两帧之间的边界就不一定刚好是包的边界。这里，我们告知解码器我们可以处理bit流。
	 if(m_codec->capabilities & CODEC_CAP_TRUNCATED){
		 m_codecContext->flags|=CODEC_FLAG_TRUNCATED;
	 }

	 //打开解码器
	 if(avcodec_open2(m_codecContext, m_codec,NULL) != 0) {
		 cout<<"decode error"<<endl;
		 return false;
	 }

	 //  加入这句话来纠正某些编码器产生的帧速错误
	 //if(m_codecContext->frame_rate>1000 && m_codecContext->frame_rate_base==1)
		 //m_codecContext->frame_rate_base=1000;
	 return true;
}

bool CStreamState::loadImage(CImage &image)
{
	 int videoHeight = m_codecContext->height;
	 int videoWidth = m_codecContext->width; 

	 AVPacket InPack = {0};
	 int len = 0;
	 AVFrame OutFrame = {0};
	 int nComplete=0;

	 AVRational avRation = m_codecContext->time_base;
	 float frameRate = (float)avRation.den/avRation.num;
	 //av_seek_frame(m_pFormatCtx,0);
	 while((av_read_frame(m_pFormatCtx, &InPack) >= 0)){
		 len = avcodec_decode_video2(m_codecContext, &OutFrame, &nComplete, &InPack);  

		  //判断是否是关键帧
		 if(nComplete > 0 && OutFrame.key_frame){   

			 bool ret = picture2Image(m_codecContext, &OutFrame, videoWidth, videoHeight, image);
			 //av_free_packet(&pkt);
			 return ret;
		 }

		 //av_free_packet(&OutFrame);
	 }	

	 return false;
}

unsigned CStreamState::height()
{
	return m_codecContext->height;
}

unsigned CStreamState::width()
{
	return m_codecContext->width;
}

double CStreamState::time()
{
	return m_pFormatCtx->duration / 1000.0 / 1000.0; //从纳秒转换成秒
}


bool CStreamState::seek(double ratio)
{
	int video_stream_idx;
	if (Open_codec_context(&video_stream_idx, m_pFormatCtx, AVMEDIA_TYPE_VIDEO) >= 0){
		int64_t fps = m_codecContext->pkt_timebase.den;
		int64_t timestamp = fps * ratio;
	
		int64_t nb_frames = getFrames();

		av_seek_frame(m_pFormatCtx, video_stream_idx, timestamp, AVSEEK_FLAG_ANY);
		return true;
	}

	return false;
}

bool CStreamState::getImage(double ratio, CImage &image)
{
	int video_stream_idx;
	if (Open_codec_context(&video_stream_idx, m_pFormatCtx, AVMEDIA_TYPE_VIDEO) < 0)
		return false;

	int64_t fps = m_codecContext->pkt_timebase.den;
	int64_t duration = m_pFormatCtx->duration;
	
	int64_t time = m_pFormatCtx->duration * av_q2d(m_codecContext->time_base); //视频时长，毫秒
	int64_t timestamp = time * ratio;
	double m_dbFrameRate = av_q2d(m_pFormatCtx->streams[video_stream_idx]->r_frame_rate);
	
	int64_t nb_frames = getFrames();

#if 0
	int err = av_seek_frame(m_pFormatCtx, video_stream_idx, timestamp, AVSEEK_FLAG_FRAME);
#else
	int err = av_seek_frame(m_pFormatCtx, video_stream_idx, timestamp, AVSEEK_FLAG_BACKWARD);
#endif

	avcodec_flush_buffers(m_pFormatCtx->streams[video_stream_idx]->codec);
	//avcodec_flush_buffers(m_codecContext);
    //avcodec_flush_buffers(m_pFormatCtx->streams[audio_stream]->codec);
	
	int len = 0;
	int nComplete=0;
	AVPacket InPack = {0};
	AVFrame OutFrame = {0};

	int videoHeight = m_codecContext->height;
	int videoWidth = m_codecContext->width; 

	av_init_packet(&InPack);

	int ret;
	while(((ret = av_read_frame(m_pFormatCtx, &InPack)) >= 0)){
		//int64_t timestamp = pts * av_q2d(st->time_base);

		if(InPack.stream_index!=video_stream_idx){
			av_init_packet(&InPack);
			continue;
		}

		len = avcodec_decode_video2(m_codecContext, &OutFrame, &nComplete, &InPack);  
		if(0 >= len)
			return false;

		//判断是否是关键帧
		if(nComplete > 0 && OutFrame.key_frame)
		{  
			if(!image.IsNull())
				image.Destroy();

			bool ret = picture2Image(m_codecContext, &OutFrame, videoWidth, videoHeight, image);
			return ret;
		}

        av_init_packet(&InPack);
	}

	return false;
}

int CStreamState::savePic(CString &path)
{
	TCHAR curDirectory[MAX_PATH];
	GetCurrentDirectory(MAX_PATH, curDirectory);

	createCdPath(path);

	int videoHeight = m_codecContext->height;
	int videoWidth = m_codecContext->width; 

	AVPacket InPack = {0};
	int len = 0;
	AVFrame OutFrame = {0};
	int nComplete=0;

	int nFrame = 0;
	AVRational avRation = m_codecContext->time_base;
	float frameRate = (float)avRation.den/avRation.num;
	//av_seek_frame(m_pFormatCtx,0);
	while((av_read_frame(m_pFormatCtx, &InPack) >= 0)){
		len = avcodec_decode_video2(m_codecContext, &OutFrame, &nComplete, &InPack);  

		//判断是否是关键帧
		if(nComplete > 0/* && OutFrame.key_frame*/)
		{  
			SaveBmp(m_codecContext, &OutFrame, videoWidth, videoHeight, nFrame++);
		}
	}	

	SetCurrentDirectory(curDirectory);
	return nFrame;
}

unsigned CStreamState::getFrameTime()
{
	unsigned ret = 50;

	//double fps = m_codecContext->pkt_timebase.den;
	//ret = 1000/fps;

	int video_stream_idx;
	if (Open_codec_context(&video_stream_idx, m_pFormatCtx, AVMEDIA_TYPE_VIDEO) < 0)
		return false;
	AVStream *video_st = m_pFormatCtx->streams[video_stream_idx];
	double dbFrameRate = av_q2d(video_st->r_frame_rate);
	if(0.00001 < dbFrameRate)
		ret = 1000/dbFrameRate;

	return ret;
}

int64_t CStreamState::getFrames()
{
	int video_stream_idx;
	if (Open_codec_context(&video_stream_idx, m_pFormatCtx, AVMEDIA_TYPE_VIDEO) < 0)
		return false;
	AVStream *video_st = m_pFormatCtx->streams[video_stream_idx];


	int64_t nb_frames = video_st->nb_frames;

	return nb_frames;
}