package com.pb.camera.h264;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import android.os.AsyncTask;
import android.util.Log;

import com.tutk.IOTC.AVAPIs;

public class VideoBufferThread extends Thread{
    static {
        System.loadLibrary("H264"); 
    }
    static final int VIDEO_BUF_SIZE = 100000;
    static final int FRAME_INFO_SIZE = 16;
    
    int mTrans=0x0F0F0F0F;
    
    int iTemp=0;
	int nalLen;
	
	boolean bFirst=true;
	boolean bFindPPS=true;
	
	int bytesRead=0;    	
	int NalBufUsed=0;
	int SockBufUsed=0;
    
	byte [] NalBuf = new byte[409800]; // 40k
	byte [] SockBuf = new byte[2048];


	private int avIndex;
	private InputStream byteArrayInputStream;
	private VedioPlayView mVedioPlayView;
	public boolean isGettingData = true;
	
	private byte[] mPixel;
	private int mWidth;
	private int mHeight;
	private int selectWidth = 352;
	private int selectHeight = 288;
	private ByteBuffer mBuffer;
	private boolean mIsStopTrans = false;

	public VideoBufferThread(int avIndex,VedioPlayView view) {
    	this.avIndex = avIndex;
    	mVedioPlayView = view;
    	init();
    }  
    
    public native int InitDecoder(int width, int height);
    public native int UninitDecoder(); 
    public native int DecoderNal(byte[] in, int insize, byte[] out);
    
	private void init() {
		mWidth = mVedioPlayView.getWidth() <= 0 ? selectWidth : mVedioPlayView.getWidth();
    	mHeight = mVedioPlayView.getHeight() <= 0 ? selectHeight : mVedioPlayView.getHeight();
    	mPixel = new byte[mWidth*mHeight*2];
        mBuffer = ByteBuffer.wrap(mPixel);
    	for(int i=0; i<mPixel.length; i++){
        	mPixel[i]=(byte)0x00;
        }
	}
	
	public void setSelcectWidthAndHeight(int width, int height){
		this.selectWidth = width;
		this.selectHeight = height;
	}

	int MergeBuffer(byte[] NalBuf, int NalBufUsed, byte[] SockBuf, int SockBufUsed, int SockRemain)
    {
    	int  i=0;
    	byte Temp;

    	for(i=0; i<SockRemain; i++)
    	{
    		Temp  =SockBuf[i+SockBufUsed];
    		NalBuf[i+NalBufUsed]=Temp;

    		mTrans <<= 8;
    		mTrans  |= Temp;

    		if(mTrans == 1) // 找到一个开始字 
    		{
    			i++;
    			break;
    		}	
    	}
    	return i;
    }
    
    @Override
    public void run() {
    	Log.e("解码时的的宽和高", mWidth+"--================--"+mHeight);
    	InitDecoder(mWidth, mHeight); 
    	
//        System.out.printf("[%s] Start\n",
//                Thread.currentThread().getName());

        AVAPIs av = new AVAPIs();
        byte[] frameInfo = new byte[FRAME_INFO_SIZE];
        byte[] videoBuffer = new byte[VIDEO_BUF_SIZE];
        while (isGettingData) {
            int[] frameNumber = new int[1];
            
            int[] outBufSize = new int[1];
            int[] outFrmSize = new int[1];
            int[] outFrmInfoBufSize = new int[1];
            
//            int ret = av.avRecvFrameData(avIndex, videoBuffer,
//                    VIDEO_BUF_SIZE, frameInfo, FRAME_INFO_SIZE,
//                    frameNumber);
            
            int ret = av.avRecvFrameData2(avIndex, videoBuffer, VIDEO_BUF_SIZE, outBufSize, outFrmSize, frameInfo, FRAME_INFO_SIZE, outFrmInfoBufSize, frameNumber);
            
            if (ret == AVAPIs.AV_ER_DATA_NOREADY) {
                try {
                    Thread.sleep(30);
                    continue;
                }
                catch (InterruptedException e) {
                    System.out.println(e.getMessage());
                    break;
                }
            }
            else if (ret == AVAPIs.AV_ER_LOSED_THIS_FRAME) {
                System.out.printf("[%s] Lost video frame number[%d]\n",
                        Thread.currentThread().getName(), frameNumber[0]);
                continue;
            }
            else if (ret == AVAPIs.AV_ER_INCOMPLETE_FRAME) {
                System.out.printf("[%s] Incomplete video frame number[%d]\n",
                        Thread.currentThread().getName(), frameNumber[0]);
                continue;
            }
            else if (ret == AVAPIs.AV_ER_SESSION_CLOSE_BY_REMOTE) {
                System.out.printf("[%s] AV_ER_SESSION_CLOSE_BY_REMOTE\n",
                        Thread.currentThread().getName());
                break;
            }
            else if (ret == AVAPIs.AV_ER_REMOTE_TIMEOUT_DISCONNECT) {
                System.out.printf("[%s] AV_ER_REMOTE_TIMEOUT_DISCONNECT\n",
                        Thread.currentThread().getName());
                break;
            }
            else if (ret == AVAPIs.AV_ER_INVALID_SID) {
                System.out.printf("[%s] Session cant be used anymore\n",
                        Thread.currentThread().getName());
                break;
            }

            // Now the data is ready in videoBuffer[0 ... ret - 1]
            // Do something here
            
//            byte[] data = new byte[ret];
//            for (int i=0;i<ret;i++) {
//            	data[i] = videoBuffer[i];
//			}
//            byteArrayInputStream = new ByteArrayInputStream(data);
            byteArrayInputStream = new ByteArrayInputStream(videoBuffer);
            
            while (!Thread.currentThread().isInterrupted())   
            {   
    			try {
					bytesRead = byteArrayInputStream.read(SockBuf, 0, 2048);
				} catch (IOException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
                
                if(bytesRead<=0)
                	break;
                
                SockBufUsed =0;
                
        		while(bytesRead-SockBufUsed>0)
        		{
        			nalLen = MergeBuffer(NalBuf, NalBufUsed, SockBuf, SockBufUsed, bytesRead-SockBufUsed);
        					
        			NalBufUsed += nalLen;
        			SockBufUsed += nalLen;
        			
        			while(mTrans == 1)
        			{
        				mTrans = 0xFFFFFFFF;

        				if(bFirst==true) // the first start flag
        				{
        					bFirst = false;
        				}
        				else  // a complete NAL data, include 0x00000001 trail.
        				{
        					if(bFindPPS==true) // true
        					{
        						if( (NalBuf[4]&0x1F) == 7 )
        						{
        							bFindPPS = false;
        						}
        						else
        						{
        			   				NalBuf[0]=0;
        		    				NalBuf[1]=0;
        		    				NalBuf[2]=0;
        		    				NalBuf[3]=1;
        		    				NalBufUsed=4;
        		    				
        							break;
        						}
        					}
        					//	decode nal
        					iTemp=DecoderNal(NalBuf, NalBufUsed-4, mPixel);   
    					
//        					Log.e("视频解码结果",iTemp+"-----");
        		            if(iTemp>0){
        		            	mVedioPlayView.onVideoBufferUpdate(mBuffer);
								if (mIsStopTrans)
									break;
        		            }
        				}

        				NalBuf[0]=0;
        				NalBuf[1]=0;
        				NalBuf[2]=0;
        				NalBuf[3]=1;
        				
        				NalBufUsed=4;
        			}		
        		} 
            } 
            try{        	
    	        if(byteArrayInputStream!=null)
    	        	byteArrayInputStream.close();		
            }
    	    catch (IOException e) {
    	    	e.printStackTrace();
            }
        }
        
        System.out.printf("[%s] Exit\n",
                Thread.currentThread().getName());
        UninitDecoder();
    }

	public int getAvIndex() {
		return avIndex;
	}

	public void restartTransVideo(int avIndex){
		this.avIndex = avIndex;
		mIsStopTrans = true;
		mIsStopTrans = false;
	}

	public void setAvIndex(int avIndex) {
		this.avIndex = avIndex;
	}
}
