package com.cloudptt.wp.ui.message.sub.record;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.Queue;

import com.cloudptt.api.product.log.Log;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;

/**
 * Created By Chengjunsen on 2018/9/8
 */
public class VideoRecordThread extends Thread implements Runnable {
	public static final String TAG = "VideoRecordThread";
	private static final int TIMEOUT_S = 10000;
	private int mFrameRate = 30;
	private int  mBitRate;
	private int mIFrameInterval = 10;
	private long generateIndex = 0;
	public Queue<byte[]> dataQueue;
	private boolean isRecording;
	private MediaCodec mMediaCodec;
	private int width, height;
	private WeakReference<MediaMuxerThread> mMutex;
	byte[] yuv420sp;

	public VideoRecordThread(MediaMuxerThread mMutex, int width, int height) {
		Log.d(TAG, "VideoRecordThread ");
		this.mMutex = new WeakReference<MediaMuxerThread>(mMutex);
		this.width = width;
		this.height = height;
		this.dataQueue =new LinkedList<byte[]>();
		this.isRecording = false;
		this.mBitRate = height * width * 3 * 8 * mFrameRate / 256;
		this.yuv420sp = new byte[width * height * 3 / 2];
	}

	private boolean initMediaCodec(int width, int height) {
		Log.d(TAG, "VideoRecordThread initMediaCodec ");
		try {
			MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
			mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
			mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
			mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate);
			mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameInterval);
			mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
			mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
			mMediaCodec.start();
		} catch (IOException e) {
			e.printStackTrace();
			return false;
		}
		return true;
	}

	public void frame(byte[] data) {
		Log.d(TAG, "VideoRecordThread frame ");
		if (isRecording) {
			dataQueue.offer(data);
		}
	}

	public void prapare() {
		Log.d(TAG, "VideoRecordThread prapare ");
		initMediaCodec(width, height);
	}

	public void begin() {
		Log.d(TAG, "VideoRecordThread begin ");
		dataQueue.clear();
		isRecording = true;
		generateIndex = 0;
		start();
	}

	public void end() {
		isRecording = false;
	}

	@Override
	public void run() {
		Log.d(TAG, "VideoRecordThread run isRecording = " + isRecording);
		while (isRecording) {
			if(dataQueue.size() > 0)
			{
				try
				{
					byte[] data = dataQueue.poll();
					if (data != null) {
						NV21toI420SemiPlanar(data, yuv420sp, width, height);
						encode(yuv420sp);
					}
				}
				catch(Exception e)
				{
					e.printStackTrace();
				}
			}
		}
		release();
	}

	private void release() {
		// 停止编解码器并释放资源
		Log.d(TAG, "VideoRecordThread release ");
		try {
			mMediaCodec.stop();
			mMediaCodec.release();
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	private void encode(byte[] input) {
		Log.e(TAG, "VideoRecordThread encode");
		if (input != null) {
			try {
				int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_S);
				if (inputBufferIndex >= 0) {
					long pts = getPts();
					ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputBufferIndex);
					inputBuffer.clear();
					inputBuffer.put(input);
					mMediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
					generateIndex += 1;
				}

				MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
				int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_S);
				Log.e(TAG, "VideoRecordThread encode outputBufferIndex = " + outputBufferIndex);
				if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) 
				{
					Log.e(TAG, "vedio run: INFO_OUTPUT_FORMAT_CHANGED");
					MediaMuxerThread mediaMutex = mMutex.get();
					Log.e(TAG, "VideoRecordThread encode mediaMutex = " + mediaMutex);
					Log.e(TAG, "VideoRecordThread encode mediaMutex.isVideoTrackExist() = " + mediaMutex.isVideoTrackExist());
					if (mediaMutex != null && !mediaMutex.isVideoTrackExist()) 
					{
						mediaMutex.addVedioTrack(mMediaCodec.getOutputFormat());
					}
				}

				while (outputBufferIndex >= 0) {
					ByteBuffer outputBuffer = mMediaCodec.getOutputBuffer(outputBufferIndex);
					if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
						Log.e(TAG, "vedio run: BUFFER_FLAG_CODEC_CONFIG" );
						bufferInfo.size = 0;
					}

					if (bufferInfo.size > 0) {
						MediaMuxerThread mediaMuxer = this.mMutex.get();
						if (mediaMuxer != null) {
							byte[] outData = new byte[bufferInfo.size];
							outputBuffer.get(outData);
							outputBuffer.position(bufferInfo.offset);
							outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
							Log.e(TAG, "video presentationTimeUs : " + bufferInfo.presentationTimeUs);
							bufferInfo.presentationTimeUs = getPts();
							mediaMuxer.addMutexData(new MutexBean(true, outData, bufferInfo));
						}
					}
					mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
					bufferInfo = new MediaCodec.BufferInfo();
					outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_S);
				}

			} catch (Throwable t) {
				t.printStackTrace();
				Log.e(TAG, "encode: "+t.toString() );
			}
		}
	}

	private long getPts() {
		return System.nanoTime() / 1000L;
	}

	private static void NV21toI420SemiPlanar(byte[] nv21bytes, byte[] i420bytes, int width, int height) {
		System.arraycopy(nv21bytes, 0, i420bytes, 0, width * height);
		for (int i = width * height; i < nv21bytes.length; i += 2) {
			i420bytes[i] = nv21bytes[i + 1];
			i420bytes[i + 1] = nv21bytes[i];
		}
	}
}
