package com.tutk.sample.AVAPI;

import android.media.MediaCodec;
import android.os.Build;
import android.os.Handler;
import android.os.SystemClock;
import android.support.annotation.RequiresApi;
import android.util.Log;

import com.example.xgakj.mylibrary.mvp.utils.TimeUtil;
import com.example.xgakj.newterminal.base.Parameters;
import com.socks.library.KLog;
import com.tutk.IOTC.AVAPIs;
import com.tutk.IOTC.IOTCAPIs;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

import static android.content.ContentValues.TAG;


public class Client {
	public Handler mHandlerc;
    public boolean running;
    public boolean audioRunning;
    public boolean recordRunning;
    public boolean silence;
    public AudioDecoder audioDecoder;
    public AudioEncoder encoder;
    public   MediaCodec mCodec;
    public int sid;
    public int channel;
    public int avIndex;
    public  boolean recodingVideo;
    public static final int IOTC_ERRO=0;
    public static final int IOTC_SESSION_ERRO=1;
    public static final int AV_FAIL=2;
    public  static final int AUDIO_Record_Failure=3;
    public static final int AV_OK=4;
    public boolean beginClean=true;
    private int retryTimes=3;
    private String uidPassword;
    private String uid;

    public Client(String uid, String uidPassword,Handler mHandlerc, MediaCodec mCodec) {
        this.mHandlerc = mHandlerc;
        this.mCodec = mCodec;
        this.uid=uid;
        this.uidPassword=uidPassword;
    }

    public  void start() {
        running=true;

       KLog.d("shurun",uidPassword+"*******************"+uid);
        System.out.println("StreamClient start...");


        int ret = IOTCAPIs.IOTC_Initialize2(0);
        System.out.printf("IOTC_Initialize() ret = %d\n", ret);
        if (ret != IOTCAPIs.IOTC_ER_NoERROR) {
            System.out.printf("IOTCAPIs_Device exit...!!\n");


        }

        // alloc 3 sessions for video and two-way audio
        AVAPIs.avInitialize(3);
        sid = IOTCAPIs.IOTC_Get_SessionID();
	if (sid < 0)
	{
		System.out.printf("IOTC_Get_SessionID error code [%d]\n", sid);

		return;
	}
	
	
	ret = IOTCAPIs.IOTC_Connect_ByUID_Parallel(uid, sid);
        System.out.printf("Step 2: call IOTC_Connect_ByUID_Parallel(%s).......\n", uid);

        System.out.printf("Step 2: call avClientStart1(%d).......\n", sid);
        int[] srvType = new int[1];
       KLog.d("shurun",uidPassword);
        avIndex = AVAPIs.avClientStart(sid, "admin",uidPassword, 20000, srvType, 0);

        System.out.printf("Step 2: call avClientStart(%d).......\n", avIndex);


        if (avIndex < 0) {
            if (retryTimes>=1){
                retryTimes--;
                KLog.e(retryTimes+"次重连");
                SystemClock.sleep(2000);
                start();

            }
            System.out.printf("avClientStart failed[%d]\n", avIndex);
            AVAPIs.avClientStop(avIndex);
            System.out.printf("avClientStop OK\n");
            IOTCAPIs.IOTC_Session_Close(sid);
            System.out.printf("IOTC_Session_Close OK\n");
            AVAPIs.avDeInitialize();
            IOTCAPIs.IOTC_DeInitialize();
            return;
        }
        channel = IOTCAPIs.IOTC_Session_Get_Free_Channel(sid);
        if (startIpcamStream(avIndex)) {
        	
            Thread videoThread = new Thread(new VideoThread(avIndex),
                    "Video Thread");
            videoThread.start();

            Thread audioThread = new Thread(new AudioThread(avIndex),
                    "Audio Thread");
            audioThread.start();
            Thread audiorecordThread = null;
            if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT) {
                audiorecordThread = new Thread(new AudioRecordThread(),
                        "Audiorecord Thread");
                audiorecordThread.start();
            }
            Log.i("shurun","recordaudiostart");

            mHandlerc.sendEmptyMessage(AV_OK);
            try {
                videoThread.join();
                audioThread.join();
                if (audiorecordThread!=null){
                    audiorecordThread.join();
                }

            }
            catch (InterruptedException e) {
                System.out.println(e.getMessage());
                return;
            }
            AVAPIs.avClientStop(avIndex);
            System.out.printf("avClientStop OK\n");
            IOTCAPIs.IOTC_Session_Close(sid);
            System.out.printf("IOTC_Session_Close OK\n");
            AVAPIs.avDeInitialize();
            IOTCAPIs.IOTC_DeInitialize();
            System.out.printf("StreamClient exit...\n");
            mHandlerc=null;
            mCodec=null;
           KLog.d("client","client完全退出");

        }


       
    }
    public  void stopClient(){
    	running=false;
    	recordRunning=false;
        audioRunning=false;

    
    }
    public  void keepSilence(){
        silence=true;
        recordRunning=false;
        audioRunning=false;


    }
    public  void startAudio(){
            if (!silence) {
                audioRunning = true;
            }

    }
    public  void stopAudio(){
        Log.i("shurun","startAudio"+audioRunning);

        audioRunning=false;

    }
    public void startTalk(){
        silence=false;
        audioRunning=true;
    }
    public void startRecord(){
        if (!silence) {
            recordRunning = true;
        }

    }
    public void stoptRecord(){
        recordRunning=false;

    }
    public boolean startIpcamStream(int avIndex) {
        AVAPIs av = new AVAPIs();
        int ret = AVAPIs.avSendIOCtrl(avIndex, AVAPIs.IOTYPE_INNER_SND_DATA_DELAY,
                new byte[2], 2);
        if (ret < 0) {
            System.out.printf("start_ipcam_stream failed[%d]\n", ret);
            return false;
        }
        
        // This IOTYPE constant and its corrsponsing data structure is defined in
        // Sample/Linux/Sample_AVAPIs/AVIOCTRLDEFs.h
        //
        int IOTYPE_USER_IPCAM_START = 0x1FF;
        ret = AVAPIs.avSendIOCtrl(avIndex, IOTYPE_USER_IPCAM_START,
                new byte[8], 8);
        if (ret < 0) {
            System.out.printf("start_ipcam_stream failed[%d]\n", ret);
            return false;
        }
        
        int IOTYPE_USER_IPCAM_AUDIOSTART = 0x300;
        ret = AVAPIs.avSendIOCtrl(avIndex, IOTYPE_USER_IPCAM_AUDIOSTART,
                new byte[8], 8);
        if (ret < 0) {
            System.out.printf("start_ipcam_stream failed[%d]\n", ret);
            return false;
        }
        
        int IOTYPE_USER_IPCAM_SPEAKERSTART=0x0350;
        byte[] tt=new byte[8];
        tt[0]=(byte)channel;
        ret = AVAPIs.avSendIOCtrl(avIndex, IOTYPE_USER_IPCAM_SPEAKERSTART,
                tt, 8);
        if (ret < 0) {
            System.out.printf("start_ipcam_stream failed[%d]\n", ret);
            return false;
        }


        return true;
    }

    public  class VideoThread implements Runnable {
        static final int VIDEO_BUF_SIZE = 100000;
        static final int FRAME_INFO_SIZE = 20;
       
        
    
        //解码模块
        private int avIndex;
        private boolean firstsps = false;

        private FileOutputStream outputStream=null;
        private String fileName;
        private String videopath= Parameters.VIDEO_PATH;
        public  VideoThread(int avIndex) {
            this.avIndex = avIndex;
            fileName= TimeUtil.getCurrDateTime()+".h264";
           
        
        }

        @Override
        public void run() {
            System.out.printf("[%s] Start\n",
                    Thread.currentThread().getName());
            Log.i("shurun", String.format("[%s] Start\n",
                    Thread.currentThread().getName()));
            AVAPIs av = new AVAPIs();
            byte[] frameInfo = new byte[FRAME_INFO_SIZE];
            byte[] videoBuffer = new byte[VIDEO_BUF_SIZE];
			int[] outBufSize = new int[1];
			int[] outFrameSize = new int[1];
			int[] outFrmInfoBufSize = new int [1];
			
            while (running) {
                int[] frameNumber = new int[1];

				int ret = AVAPIs.avRecvFrameData2(avIndex, videoBuffer,
						VIDEO_BUF_SIZE, outBufSize, outFrameSize, 
						frameInfo, FRAME_INFO_SIZE,
						outFrmInfoBufSize, frameNumber);
				
				
				/*Log.i("shurun", String.format("frameNumber:%d",
						frameNumber[0]));*/
				
                if (ret == AVAPIs.AV_ER_DATA_NOREADY) {
                    try {
                        Thread.sleep(30);
                        continue;
                    }
                    catch (InterruptedException e) {
                        System.out.println(e.getMessage());
                        break;
                    }
                }
                else if (ret == AVAPIs.AV_ER_LOSED_THIS_FRAME) {
                    System.out.printf("[%s] Lost video frame number[%d]\n",
                            Thread.currentThread().getName(), frameNumber[0]);
                    continue;
                }
                else if (ret == AVAPIs.AV_ER_INCOMPLETE_FRAME) {
                    System.out.printf("[%s] Incomplete video frame number[%d]\n",
                            Thread.currentThread().getName(), frameNumber[0]);
                    continue;
                }
                else if (ret == AVAPIs.AV_ER_SESSION_CLOSE_BY_REMOTE) {
                    System.out.printf("[%s] AV_ER_SESSION_CLOSE_BY_REMOTE\n",
                            Thread.currentThread().getName());
                    break;
                }
                else if (ret == AVAPIs.AV_ER_REMOTE_TIMEOUT_DISCONNECT) {
                    System.out.printf("[%s] AV_ER_REMOTE_TIMEOUT_DISCONNECT\n",
                            Thread.currentThread().getName());
                    break;
                }
                else if (ret == AVAPIs.AV_ER_INVALID_SID) {
                    System.out.printf("[%s] Session cant be used anymore\n",
                            Thread.currentThread().getName());
                    break;
                }

                // Now the data is ready in videoBuffer[0 ... ret - 1]
                // Do something here

             
              
                if(!firstsps){//查找sps
                	if(videoBuffer[0]==0 && videoBuffer[1]==0 && videoBuffer[2]==0 && videoBuffer[3]==1 && videoBuffer[4]==103){
                		firstsps = true;
                	}else{
                		continue;	
                	}
                }
                if (recodingVideo){
                    saveVideo(videoBuffer,ret);
                }

                /**/
                ByteBuffer[] inputBuffers = mCodec.getInputBuffers();
        		int inputBufferIndex = mCodec.dequeueInputBuffer(1000);

        		if (inputBufferIndex >= 0) {
        			ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
        			inputBuffer.clear();
        			inputBuffer.put(videoBuffer, 0, ret);
        			mCodec.queueInputBuffer(inputBufferIndex, 0, ret, SystemClock.currentThreadTimeMillis()
        					, 0);}
   // Get output buffer index
        		MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        		int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 100);
        		while (outputBufferIndex >= 0) {
        			mCodec.releaseOutputBuffer(outputBufferIndex, true);
        			outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
        		}
            }
            if (outputStream!=null){
                try {
                    outputStream.flush();
                    outputStream.close();
                    outputStream=null;
                } catch (IOException e) {
                    e.printStackTrace();
                }

            }
            System.out.printf("[%s] Exit\n",
                    Thread.currentThread().getName());
        }

        private void saveVideo(byte[] videoBuffer, int ret) {
            try {
                File fileFolder=new File(videopath);
                if (!fileFolder.exists()){
                    fileFolder.mkdirs();
                }
                File file = new File(videopath, fileName);
                if (!file.exists()) {
                    Log.d(TAG, "file create success ");
                    file.createNewFile();
                    outputStream =new FileOutputStream(file);
                }
                outputStream.write(videoBuffer,0,ret);
            }catch (Exception e){
                Log.e(TAG, e.toString());
                e.printStackTrace();
            }

        }
    }
   
    public class AudioThread implements Runnable {
        static final int AUDIO_BUF_SIZE = 1024;
        static final int FRAME_INFO_SIZE = 16;
        private int avIndex;
        private long time1;
        private int num;

        public AudioThread(int avIndex) {
        	
            this.avIndex = avIndex;

            if (Build.VERSION.SDK_INT>=Build.VERSION_CODES.JELLY_BEAN_MR2){
                audioDecoder=new AudioDecoder();
                audioDecoder.start();
            }



        }

        @Override
        public void run() {

            System.out.printf("[%s] Start\n",
                    Thread.currentThread().getName());
            
            AVAPIs av = new AVAPIs();


           KLog.d("shurun","音频播放AVcliet启动");
            byte[] frameInfo = new byte[FRAME_INFO_SIZE];
            byte[] audioBuffer = new byte[AUDIO_BUF_SIZE];
            while (running) {
                if (audioRunning){
                    if (beginClean){
                        AVAPIs.avClientCleanAudioBuf(avIndex);
//                        av.avClientCleanLocalBuf(avIndex);
                        beginClean=false;
                    }
                    int ret = AVAPIs.avCheckAudioBuf(avIndex);

                    if (ret < 0) {
                        // Same error codes as below
                        System.out.printf("[%s] avCheckAudioBuf() failed: %d\n",
                                Thread.currentThread().getName(), ret);
                        break;
                    }
                    else if (ret < 3) {
                        try {
                            Thread.sleep(120);
                            continue;
                        }
                        catch (InterruptedException e) {
                            System.out.println(e.getMessage());
                            break;
                        }
                    }

                    int[] frameNumber = new int[1];
                    ret = AVAPIs.avRecvAudioData(avIndex, audioBuffer,
                            AUDIO_BUF_SIZE, frameInfo, FRAME_INFO_SIZE,
                            frameNumber);

                    if (ret == AVAPIs.AV_ER_SESSION_CLOSE_BY_REMOTE) {
                        System.out.printf("[%s] AV_ER_SESSION_CLOSE_BY_REMOTE\n",
                                Thread.currentThread().getName());
                        break;
                    }
                    else if (ret == AVAPIs.AV_ER_REMOTE_TIMEOUT_DISCONNECT) {
                        System.out.printf("[%s] AV_ER_REMOTE_TIMEOUT_DISCONNECT\n",
                                Thread.currentThread().getName());
                        break;
                    }
                    else if (ret == AVAPIs.AV_ER_INVALID_SID) {
                        System.out.printf("[%s] Session cant be used anymore\n",
                                Thread.currentThread().getName());
                        break;
                    }
                    else if (ret == AVAPIs.AV_ER_LOSED_THIS_FRAME) {
                        //System.out.printf("[%s] Audio frame losed\n",
                        //        Thread.currentThread().getName());
                        continue;
                    }

                    // Now the data is ready in audioBuffer[0 ... ret - 1]
                    // Do something here

//                   KLog.d("shurun",ret+"音频");
                    if (Build.VERSION.SDK_INT>=Build.VERSION_CODES.JELLY_BEAN_MR2){
                        audioDecoder.decode(audioBuffer, 0, ret);
                    }

                }else {
                    SystemClock.sleep(200);
                    if (beginClean==false){
                        beginClean=true;
                    }
                }


              
            }
            AVAPIs.avClientCleanAudioBuf(avIndex);
            System.out.printf("[%s] Exit\n",
                    Thread.currentThread().getName());

            if (audioDecoder!=null) {
        		audioDecoder.stop();
            	audioDecoder=null;
    		}
        }
    }
    public class AudioRecordThread implements Runnable{
    	  FrameInfo frameInfo;
          byte[] info;
 		 short[] sh=new short[320];
 		 int size=320;
    	  @RequiresApi(api = Build.VERSION_CODES.KITKAT)
          public AudioRecordThread() {
            frameInfo=new FrameInfo();
            frameInfo.codec_id=0x8A;
            frameInfo.flags=(AVFrame.AUDIO_SAMPLE_8K<< 2)|(AVFrame.AUDIO_DATABITS_16<< 1)|AVFrame.AUDIO_CHANNEL_MONO;
           info=frameInfo.parseContent(frameInfo.codec_id, frameInfo.flags);
          }
    	byte[] decodebyte=new byte[320];
		@Override
		public void run() {
			encoder=new AudioEncoder();
			encoder.prepare();
			 AVAPIs av = new AVAPIs();

	    	 int index2 = AVAPIs.avServStart(sid, null, null, 20000, 0, channel);
            if (index2<0){
                System.out.printf("avServStart failed[%d]\n", index2);
                mHandlerc.sendEmptyMessage(AUDIO_Record_Failure);
                return;
            }
            System.out.printf("avServStart[%d]\n", index2);
			
				while (running) {
                    if (recordRunning){
                        decodebyte=	encoder.testRecord();
                        int ret= AVAPIs.avSendAudioData(index2, decodebyte, size, info, info.length);
                    }else {
                        SystemClock.sleep(100);
                        AVAPIs.avClientCleanAudioBuf(index2);
                    }




				}
            AVAPIs.avServStop(index2);
            AVAPIs.avServExit(sid,channel);



           KLog.d("client","音频发送退出");
			encoder.stopRecord();
           KLog.d("client","音频记录退出");


			System.out.printf("[%s] Exit\n",
                    Thread.currentThread().getName());
		}
    	
    }
    public static class FrameInfo
	{
		public short codec_id;
		public byte flags;
		public byte cam_index;
		public byte onlineNum;
		public byte[] reserve1 = new byte[3];
		public int reserve2;
		public int timestamp;
		
		public byte[] parseContent(short _codec_id, byte _flags) {

			byte[] result = new byte[16];
			byte[] arg1 = Packet.shortToByteArray_Little(_codec_id);
			byte[] arg2 = new byte[1];
			arg2[0] = _flags;

			System.arraycopy(arg1, 0, result, 0, 2);
			System.arraycopy(arg2, 0, result, 2, 1);
			return result;
		}
	}
}

