package com.avsdk;



import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;



import android.R.integer;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build;
import android.util.Log;

@TargetApi(Build.VERSION_CODES.JELLY_BEAN) 
public class MediaVideoEncoderHW {
	



	private final String LOG_TAG = "MediaVideoEncoderHW";

	private static final String MIME_TYPE = "video/avc";    // H.264 Advanced Video Coding
	private static final int FRAME_RATE = 30;               // 30fps
	private static final int IFRAME_INTERVAL = 1;           // 5 seconds between I-frames
//	private int mWidth = 1280;
//	private int mHeight = 720;
//	private int mBitrate = 1024*1024;

	private ByteBuffer[] mInputBuffers  = null;
	private ByteBuffer[] mOutputBuffers = null;

	private MediaCodec  mVideoEncoder = null;
	private MediaFormat mOutputFormat = null;

	private byte[]  mSpsPpsHeadData = null;
	private int mSpsPpsHeadDataDataLen = 0;
	
	private byte[]  mIframe = new byte[8*1024 * 1024];

	
	
	private byte[] mAvcConfigData = new byte[256];
	private int mAvcConfigDataLen = 0;
	private byte[] mAvcRawData = new byte[8*1024 * 1024];
	private int mAvcRawDataLen = 0;
	
	private long mLastTimeStampMs = 0;
	private boolean m_bKeyFrame = false;

	private int mCurrentAPIVersion = Build.VERSION.SDK_INT;

	private static final int kVideoControlRateConstant = 2; // Bitrate mode
	private static final int kCodecBufferDequeueTimeout = 0;


	public MediaVideoEncoderHW(int width, int height, int colorFormat,int Bitrate) throws IOException {

		init(width, height, colorFormat,Bitrate);
	}

	public void init(int width, int height, int colorFormat,int Bitrate) throws IOException {
		Log.i(LOG_TAG, "[init]: Enter init");
//		mWidth = width;
//		mHeight = height;

		try {
			mVideoEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
	} catch (Exception e) {
			// TODO Auto-generated catch block
		e.printStackTrace();
	}
		MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
		mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, Bitrate*8);
//		mediaFormat.setInteger("bitrate-mode", kVideoControlRateConstant);

		//
//		mediaFormat.setInteger(MediaFormat.KEY_BITRATE_MODE,
//                  MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);

		mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
//		mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);


//		//TODO 这是填坑  MTK 处理器这里会异常 mtk处理器 不能处理MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar
		mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
		//mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);


		mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
//		mediaFormat.setInteger("stride", width);
//		mediaFormat.setInteger("slice-height", height);
		int requirmentLength = width * height * 3 / 2;
		mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, requirmentLength);
//		mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
		
		mVideoEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	}
	
	
	public boolean start() 
	{

		Log.i(LOG_TAG, "[start]: Enter start");

		mVideoEncoder.start();

		// if API level <= 20, get input and output buffer arrays here
		// see http://developer.android.com/reference/android/media/MediaCodec.html for more details.
		mInputBuffers   = mVideoEncoder.getInputBuffers();
		mOutputBuffers  = mVideoEncoder.getOutputBuffers();   

		Log.i(LOG_TAG, "[start]: start video encoder OK");

		return true;
	}

	public boolean stop() 
	{

		Log.i(LOG_TAG, "[stop] Enter stop");

		mVideoEncoder.stop();

		mVideoEncoder.release();

		return true;
	}

	public int setInput(BufferUnit bufUnit) 
	{

//		Log.i(LOG_TAG, "[setInput]: Enter setInput()\n");

		int inputBufferIndex = mVideoEncoder.dequeueInputBuffer(kCodecBufferDequeueTimeout);
		if (inputBufferIndex >= 0) {

			Log.d(LOG_TAG, "[setInput] dequeueInputBuffer return inputBufferIndex = " + inputBufferIndex);

			// see http://developer.android.com/reference/android/media/MediaCodec.html for more details.
			ByteBuffer inputBuffer = mInputBuffers[inputBufferIndex];
			inputBuffer.clear();
			inputBuffer.put(bufUnit.getData(), 0, bufUnit.getLength());   

			Log.d(LOG_TAG, "[setInput]: get video raw sample pts = " + bufUnit.getPts() + " inputBuffer position = " + inputBuffer.position() + " capacity = " + inputBuffer.capacity());

			// pts is in us(input pts is in ms, so we need to convert.)
			mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, bufUnit.getLength(), bufUnit.getPts()*1000, 0);
		} else {
			Log.d(LOG_TAG, "[setInput] MediaCodec.dequeueInputBuffer inputBufferIndex =" + inputBufferIndex);
			return -1;
		}

		return bufUnit.getLength();
	}

	public int getOutput(BufferUnit bufUnit,byte[] out) 
	{

//		Log.i(LOG_TAG, "[getOutput]: Enter getOutput()" + " mCurrentAPIVersion = " + mCurrentAPIVersion);

		MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
		int outputBufferIndex = mVideoEncoder.dequeueOutputBuffer(bufferInfo, kCodecBufferDequeueTimeout);
		if (outputBufferIndex >= 0) {
//			Log.d(LOG_TAG, "[getOutput]: MediaCodec.dequeueOutputBuffer() OK" + 
//					" offset =" +  bufferInfo.offset + " size = " + bufferInfo.size + " pts =" + (bufferInfo.presentationTimeUs/1000) );

			ByteBuffer outputBuffer = null;
			// see http://developer.android.com/reference/android/media/MediaCodec.html for more details.
			outputBuffer = mOutputBuffers[outputBufferIndex]; 

			outputBuffer.position(bufferInfo.offset);
			outputBuffer.limit(bufferInfo.offset + bufferInfo.size);

			if(bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {

	//			Log.v(LOG_TAG, "[getOutput]: bufferInfo.flags: MediaCodec.BUFFER_FLAG_CODEC_CONFIG");

				mSpsPpsHeadData = new byte[bufferInfo.size];
				outputBuffer.get(mSpsPpsHeadData, 0, bufferInfo.size);
				outputBuffer.position(bufferInfo.offset);
				outputBuffer.clear();
				outputBuffer.get(mSpsPpsHeadData, bufferInfo.offset, bufferInfo.size);
				parseConfigData(mSpsPpsHeadData, bufferInfo.size);
				
				mSpsPpsHeadDataDataLen = bufferInfo.size;

//				MediaRtmpPublisher2.getInstance().addVideoConfigData(mAvcConfigData, mAvcConfigDataLen, 0, 0x09);
/*				boolean bIframe = true;
//				Log.d(TAG, "onPreviewFrame len"+ data.length);
				if(netjni==null){
					netjni=NetJni.getInstance();
				}
				netjni.WriteBuffer(mAvcConfigData, mAvcConfigDataLen, true, bIframe);
				*/
				
				
				mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false);
				return 0;
			} else {

//				Log.v(LOG_TAG, "[getOutput]: get an encoded video data");

				// write video encoded buffer to given output.
				byte[] outData = bufUnit.getData();
				outputBuffer.get(outData, 0, bufferInfo.size);
				outputBuffer.position(bufferInfo.offset);
				outputBuffer.clear();
				
				// write buffer attributes using property in current mediacodec BufferInfo
				bufUnit.setLength(bufferInfo.size);
				bufUnit.setPts((bufferInfo.presentationTimeUs/1000));     
				bufUnit.setFlags(bufferInfo.flags);
				
				long timeStampMs = 0;
				if (mLastTimeStampMs == 0) {
					timeStampMs = 0;
					mLastTimeStampMs = bufferInfo.presentationTimeUs / 1000;
				}else {
					timeStampMs = bufferInfo.presentationTimeUs / 1000 - mLastTimeStampMs;
					mLastTimeStampMs = bufferInfo.presentationTimeUs / 1000;
				}

//				outputBuffer.get(outData, bufferInfo.offset, bufferInfo.size);
				parseAvcRawData(outData, bufferInfo.size);
				int flags = m_bKeyFrame ? 1: 0;

				long absTimeStampMs = bufferInfo.presentationTimeUs / 1000;
				
//				Log.i(LOG_TAG, "sent " + bufferInfo.size + " bytes to muxer, ts=" +
//						absTimeStampMs + " delta time is= " + timeStampMs);
				boolean bIframe = false;	
	
				int outsize = 0;
				if(m_bKeyFrame)
				{
					bIframe = true;
					Log.i(LOG_TAG, " WriteBuffer SPS "+  mSpsPpsHeadDataDataLen);
					System.arraycopy(mSpsPpsHeadData, 0, mIframe,0,mSpsPpsHeadDataDataLen);
					System.arraycopy(outData, 0, mIframe,mSpsPpsHeadDataDataLen,bufferInfo.size);
					
					System.arraycopy(mIframe,0,out,0,mSpsPpsHeadDataDataLen + bufferInfo.size);
					outsize = mSpsPpsHeadDataDataLen + bufferInfo.size;
					mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false);				
					return outsize;
//					Log.i("encode ", " WriteBuffer SPS over "+  mSpsPpsHeadDataDataLen);
				}else{
					Log.i("LOG_TAG", " WriteBuffer "+ bufferInfo.size);
					System.arraycopy(outData,0,out,0,bufferInfo.size);					
					outsize = bufferInfo.size;
					mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false);
					return outsize;					
				}


				

			}
			
		} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {

//			Log.d(LOG_TAG, "[getOutput]: MediaCodec.dequeueOutputBuffer() INFO_OUTPUT_BUFFERS_CHANGED\n");

			mOutputBuffers = mVideoEncoder.getOutputBuffers();

			return -1;
		} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

//			Log.d(LOG_TAG, "[getOutput] MediaCodec.dequeueOutputBuffer() INFO_OUTPUT_FORMAT_CHANGED New format" 
//					+ mVideoEncoder.getOutputFormat());

			mOutputFormat = mVideoEncoder.getOutputFormat();   
			
//			MediaFFmpegWriter.getInstance().addVideoTrack(mOutputFormat);

			return 0;
		} else if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {

//			Log.d(LOG_TAG, "[getOutput] MediaCodec.dequeueOutputBuffer() INFO_TRY_AGAIN_LATER\n");

			return -1;
		} else {

			// this should not happen.
//			Log.d(LOG_TAG, "[getOutput] MediaCodec.dequeueOutputBuffer() ret = " + outputBufferIndex);

			throw new IllegalStateException("[getOutput] MediaCodec.dequeueOutputBuffer has encountered an error!");
		}
	}
	
	private void parseConfigData(byte[] configData, int configDataLen)
	{
		if (configData == null || configDataLen <= 0) {
			return;
		}
		int iPos = 0;
		int iEnd = configDataLen;

		byte[] spsData = new byte[64];
		byte[] ppsData = new byte[64];
	    short spsDataLen = 0;
		short ppsDataLen = 0;
		int iSpsStart = 0;
		int iSpsEnd = 0;
		int iPpsStart = 0;
		int iPpsEnd = iEnd;
		int iPrefixSize = 0;

		while (iPos < iEnd) {
			if (configData[iPos] == 0x00 && configData[iPos + 1] == 0x00
					&& configData[iPos + 2] == 0x00 && configData[iPos + 3] == 0x01) {
				iPos += 4;
				iPrefixSize = 4;
			} 
			else{
				iPos++;
				continue;
			}
			int iType = configData[iPos] & 0x1f;
			switch (iType) {
			case 0x07:
				iSpsStart = iPos;
				break;

			case 0x08:
				iSpsEnd = iPos - iPrefixSize;
				iPpsStart = iPos;
				break;

			default:
				break;
			}
		}
		System.arraycopy(configData, iSpsStart, spsData, 0, iSpsEnd - iSpsStart);
		spsDataLen = (short)(iSpsEnd - iSpsStart);
		System.arraycopy(configData, iPpsStart, ppsData, 0, iPpsEnd - iPpsStart);
		ppsDataLen = (short)(iPpsEnd - iPpsStart);

		mAvcConfigData[0] = 0x17;
		mAvcConfigData[1] = 0x00;
		mAvcConfigData[2] = 0x00;
		mAvcConfigData[3] = 0x00;
		mAvcConfigData[4] = 0x00;
		//AVCDecoderConfigurationRecord
		mAvcConfigData[5] = 0x01;			//configurationVersion
		mAvcConfigData[6] = spsData[1];	//AVCProfileIndication
		mAvcConfigData[7] = spsData[2];	//profile_compatibility
		mAvcConfigData[8] = spsData[3];	//AVCLevelIndication
		mAvcConfigData[9] = (byte)0xff;		//lengthSizeMinusOne
		mAvcConfigData[10] = (byte)0xe1;	//numOfSequenceParameterSets
		mAvcConfigData[11] = (byte)((spsDataLen >> 8) & 0xff); 				//sequenceParameterSetLength
		mAvcConfigData[12] = (byte)(spsDataLen & 0xff);			
		System.arraycopy(spsData, 0, mAvcConfigData, 13, spsDataLen);
		mAvcConfigData[13 + spsDataLen] = 0x01;								//numOfPictureParameterSets
		mAvcConfigData[14 + spsDataLen] = (byte)((ppsDataLen >> 8) & 0xff);	//pictureParameterSetLength
		mAvcConfigData[15 + spsDataLen] = (byte)(ppsDataLen & 0xff);			
		System.arraycopy(ppsData, 0, mAvcConfigData, 16 + spsDataLen, ppsDataLen);
		mAvcConfigDataLen = 11 + spsDataLen + ppsDataLen + 5;

	}

	private void parseAvcRawData(byte[] avcRawData, int avcRawDataLen)
	{
		if (avcRawData == null || avcRawDataLen <= 0) {
			return;
		}

		int iPos = 0;
		int iPrefixSize = 0;
		byte keybyte = 0;

		if (avcRawData[iPos] == 0x00 && avcRawData[iPos + 1] == 0x00 
				&& avcRawData[iPos + 2] == 0x01) {
			iPos += 3;
			iPrefixSize = 3;
		}
		else if (avcRawData[iPos] == 0x00 && avcRawData[iPos + 1] == 0x00
				&& avcRawData[iPos + 2] == 0x00 && avcRawData[iPos + 3] == 0x01) {
			iPos += 4;
			iPrefixSize = 4;
		}
		int iType = avcRawData[iPos] & 0x1f;
		if (iType == 0x05) {
			m_bKeyFrame = true;
			keybyte = 0x17; //key frame
		}else {
			m_bKeyFrame = false;
			keybyte = 0x27; //inter frame
		}
		mAvcRawData[0] = keybyte;
		mAvcRawData[1] = 0x01; 
		mAvcRawData[2] = 0x00;
		mAvcRawData[3] = 0x00;
		mAvcRawData[4] = 0x00;

		int iDataSize = avcRawDataLen - iPrefixSize;
		mAvcRawData[5] = (byte)((iDataSize >> 24) & 0xff);
		mAvcRawData[6] = (byte)((iDataSize >> 16) & 0xff);
		mAvcRawData[7] = (byte)((iDataSize >> 8) & 0xff);
		mAvcRawData[8] = (byte)(iDataSize & 0xff);
		System.arraycopy(avcRawData, iPos, mAvcRawData, 9, avcRawDataLen - iPrefixSize);
		mAvcRawDataLen = avcRawDataLen - iPrefixSize + 4 + 5;
	}
}
