package p2psvideo;

import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;

import org.json.JSONException;
import org.json.JSONObject;

import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.List;

import p2pproxy.P2PSClient.DataItem;

@SuppressLint("NewApi")
class AudioTrackPlayer extends Thread implements IMediaTrackPlayer, SoundPlayer.OnAudioFeed {

	private IAVPlayerCtl _ctl;
	private boolean _srcEof;
	private boolean _eof;
	private boolean _eofSent; 
	private volatile boolean _pause;
	private volatile boolean _quit;
	private boolean _headerSent;
	private MediaCodec _dec;
	private MediaFormat _format;
	private MediaFormat _outFormat;
	private DataItem _header;
	private int _avlOutputBuf = -1;
	private List<DataItem> _frames = new LinkedList<DataItem>();
	private int _sampleRate;
	private int _channels;	
	private volatile boolean _inputDrain; 
	private BufferInfo _audioInfo = new BufferInfo();
	private SoundPlayer _audPlayer;
	private int _latency;
	
	public AudioTrackPlayer(IAVPlayerCtl ctl, int latency) {
		_ctl = ctl;		
		_latency = latency;
	}
	
	private static boolean sameDataItem(DataItem it1, DataItem it2)
	{
		if (it1 == it2) return true;
		if (it1 == null) return false;
		if (it2 == null) return false;
		if (it1.type != it2.type) return false;
		if (it1.size != it2.size) return false;
		for(int i=0; i<it1.size; ++i) {
			if (it1.data[i] != it2.data[i]) return false;
		}
		return true;
	}
	
	private void releaseDec()
	{
		if (_dec != null) {
			try {
				_dec.stop();
			} catch(Throwable exp) {
				exp.printStackTrace();
			}
			try {
				_dec.release();
			} catch(Throwable exp) {
				exp.printStackTrace();
			}
			_dec = null;
		}
		_avlOutputBuf = -1;
		_headerSent = false;	
		_inputDrain = false;
		_audPlayer.setAudioEnd();
	}
	
	///发送eof到解码器
	private boolean sendEofToDec()
	{
		if (_dec == null) return true;
		int inputBufIdx = -1;
		try {
			inputBufIdx = _dec.dequeueInputBuffer(0);
		} catch(Throwable e) {
			e.printStackTrace();
			releaseDec();
			return true;
		}
		if (inputBufIdx != -1) {
			try {
				_dec.queueInputBuffer(inputBufIdx, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
				return true;
			} catch(Throwable e) {
				e.printStackTrace();
				releaseDec();
				return true;
			}
		}
		return false;
	}
	
	///检查结束
	private void checkEof()
	{
		if (_eof) return;
		if (!_srcEof) return;
		if (!_frames.isEmpty()) return;
		if (!_eofSent) return;
		if (_avlOutputBuf != -1) return;
		if (_audPlayer != null) {
			if (!_audPlayer.empty()) return;
		}
		_eof = true;
	}
	
	///发送eof到编码器并渲染解码器的帧
	private boolean flushDec()
	{
		if (_dec == null) return true;
		sendEofToDec();
		dequeueFrame();
		if (_avlOutputBuf == -1) { //已经没有输出帧了
			releaseDec();
			return true;
		} else { //还有输出帧, 上游需要等下再次调用					
			return false;
		}
	}
	
	private boolean deliverFrame(DataItem it)
	{
		if (_dec == null) return false;
		int bufferIdx = -1;
		try {
			bufferIdx = _dec.dequeueInputBuffer(0);
		} catch(Throwable e) {
			e.printStackTrace();
			releaseDec();
			return false;
		}
		if (bufferIdx < 0) {
			_inputDrain = true;
			return false;
		} else {
			_inputDrain = false;
		}
		try {
			ByteBuffer buf;
			if (android.os.Build.VERSION.SDK_INT >= 21) {
				buf = _dec.getInputBuffer(bufferIdx);
			} else {
				buf = _dec.getInputBuffers()[bufferIdx];
			}
			buf.put(it.data, 0, it.size);	
			if (it.type == 4 || it.type == 5) {
				long pts = ((long)it.pts)*1000;				
				_dec.queueInputBuffer(bufferIdx, 0, it.size, pts, 0);
			} else if (it.type == 2 || it.type == 3) {
				_dec.queueInputBuffer(bufferIdx, 0, it.size, 0, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
			}
		} catch(Throwable e) {			
			e.printStackTrace();
			releaseDec();
			return false;
		}
		return true;
	}
	
	private boolean dequeueFrame()
	{
		if (_dec == null) return false;
		if (_avlOutputBuf == -1) { //没有帧了
			int r = -1;			
			try {
				r = _dec.dequeueOutputBuffer(_audioInfo, 0);
				if (r == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
					_outFormat = _dec.getOutputFormat();
					_sampleRate = _outFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
					_channels = _outFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);	
					_audPlayer.rebuild(_outFormat);
					_ctl.notifyAudioInfo(_sampleRate, _channels);
				}
			} catch(Throwable e) {
				e.printStackTrace();
				releaseDec();
			}
			_inputDrain = false;
			if (r >= 0) {				
				_avlOutputBuf = r;						
			}
		}
		if (_avlOutputBuf == -1) return false;
		if (_audPlayer.full()) {
			return false;
		} else {
			try {
				ByteBuffer buf = _dec.getOutputBuffers()[_avlOutputBuf];
				byte[] arr = new byte[_audioInfo.size];
				buf.get(arr, 0, _audioInfo.size);
				_audPlayer.postBuffer(new SoundPlayerBuffer(arr, _audioInfo.presentationTimeUs));
				_dec.releaseOutputBuffer(_avlOutputBuf, true);
			} catch(Throwable e) {
				releaseDec();				
			} finally {
				_avlOutputBuf = -1;
			}
			return true;
		}
	}
	
	//尝试将帧推入解码器
	private boolean enqueueFrame()
	{
		if (_frames.isEmpty()) {
			if (_eof) return false;
			if (_srcEof && !_eofSent) { //尚未发生eof到解码器
				if (_dec != null) {					
					if (sendEofToDec()) {
						_eofSent = true;
					}
				}
				if (_dec == null) {
					_eofSent = true;
				}
			}
			if (_srcEof) {
				return dequeueFrame();	
			}
			return false;
		}
		DataItem it = _frames.get(0);
		
		switch(it.type) {
		case 1:  //重新打开解码器, 刷出解码器缓冲数据
			if (!flushDec()) {
				return false;
			}			
			String cfg = new String(it.data, 0, it.size);
			JSONObject mimeInfo;
			String mime="";
			int sr=0;
			int chnls = 0;
			try {
				mimeInfo = new JSONObject(cfg);
				mime = mimeInfo.getString("mime");
				sr = mimeInfo.getInt("sampleRate");
				chnls = mimeInfo.getInt("channels");
			} catch (JSONException e1) {
				// TODO Auto-generated catch block
				e1.printStackTrace();				
			}					
			if (sr == 0) {
				sr = 48000;				
			}
			if (chnls == 0) {
				chnls = 2;
			}
			if (mime.equals("audio/aac")) {
				mime = "audio/mp4a-latm";
			} else if (mime.equals("audio/mp3")) {
				mime = "audio/mpeg";
			}
			try {					
				_format = new MediaFormat(); 
				_format.setString(MediaFormat.KEY_MIME, mime);
				_format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, chnls*sr*4*8);
				_format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, chnls);
				_format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sr);										
			} catch(Throwable e) {
				e.printStackTrace();
				releaseDec();
			} 			
			_frames.remove(0);
			_ctl.freeFrame(it);
			return true;		
		case 3: //配置帧
			if (_format == null) { //没有格式帧
				_frames.remove(0);
				_ctl.freeFrame(it);
				return false;
			}
			boolean needRebuild = true;				
			if (sameDataItem(_header, it)) needRebuild = false;				
			if (needRebuild) { //需要重构
				if (!flushDec()) return false;
				if (_header != null) {
					_ctl.freeFrame(_header);						
				}
				_frames.remove(0);
				_header = it;
				return true;
			} else { //无需重构
				_ctl.freeFrame(it);
				_frames.remove(0);
				return true;
			}		
		case 5: //普通帧
			if (_format == null) { //没有格式帧
				_frames.remove(0);
				_ctl.freeFrame(it);
				return false;
			}			
			if (_dec == null) { //创建解码器
				try {
					_dec = MediaCodec.createDecoderByType(_format.getString(MediaFormat.KEY_MIME));	
					if (android.os.Build.VERSION.SDK_INT >= 18) {
						Log.v("P2PSMediaPlayer", "Using audio decoder: " + _dec.getName());
					}					
					_dec.configure(_format, null, null, 0);
					_dec.start();					
				} catch (Throwable e) {
					e.printStackTrace();
					releaseDec();
				}
			}
			if (_dec == null) { //解码器创建失败?				
				_ctl.freeFrame(it);
				_frames.remove(0);
				return false;
			}
			if (!_headerSent) { 
				if (_header == null) {
					_headerSent = true;
				} 
			}
			if (!_headerSent) {
				if (!deliverFrame(_header)) {
					return false;
				}
				_headerSent = true;
			}
			if (!deliverFrame(it)) {
				return false;
			}
			_ctl.freeFrame(it);
			_frames.remove(0);
			return true;					
		default:
			_ctl.freeFrame(it);
			_frames.remove(0);
			return true;
		}		
	}
	
	SoundPlayer getSoundPlayer()
	{
		return _audPlayer;
	}

	@Override
	public void run() {
		while(true) {
			while (_pause && !_quit) {			
				try {
					Thread.sleep(5);
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
			}
			if (_quit) break;
			boolean r=false;
			synchronized(this) {
				if (!_inputDrain) r = enqueueFrame();
				r |= dequeueFrame();
				checkEof();
			}
			if (!r) {
				try {
					Thread.sleep(5);
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
			}
		}
		super.run();
	}

	@Override
	public int getBufferLength() {
		synchronized(this) {
			int dur = _audPlayer.getPendingDuration();
			if (_frames.size() < 1) return dur;
			 int delta = _frames.get(_frames.size()-1).dts - _frames.get(0).dts;
			 if (delta < 0) return dur;
			 else return delta + dur;
		}		
	}

	@Override
	public void setSurface(Surface sf) {
		return;
	}

	@Override
	public void pause(boolean pauseOrResume) {
		_pause = pauseOrResume;
		synchronized(this) {
			if (_quit) return;
			if (_pause) {
				_audPlayer.pause();
			} else {
				_audPlayer.resume();
			}
		}
	}
	
	@Override
	public void pushFrame(DataItem it) {
		synchronized(this) {
			if (_quit || _eof) {
				if (it != null) {
					_ctl.freeFrame(it);
				}
				return;
			}
			if (_srcEof && it != null) {
				_ctl.freeFrame(it);
				return;
			}
			if (it == null) {
				_srcEof = true;
				return;
			}
			_frames.add(it);
		}
	}

	@Override
	public boolean eof() {		
		return _eof;
	}

	@Override
	public void start() {		
		_audPlayer = new SoundPlayer(this, _latency);
		super.start();		
	}

	@Override
	public void close() {
		synchronized(this) {
			if (_quit) return;
			_quit = true;
		}
		this.interrupt();
		try {
			join();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}		
		synchronized(this) {
			if (_header != null) {
				_ctl.freeFrame(_header);
				_header = null;
			}
			while(!_frames.isEmpty()) {
				_ctl.freeFrame(_frames.get(0));
				_frames.remove(0);
			}
			releaseDec();
			_audPlayer.release();
			_ctl = null;
		}
	}

	@Override
	public void notifySyncTimeChanged() {
		return;
	}

	@Override
	public void onAudioFeed() {
		_ctl.notifySyncTimeChanged();
	}

	@Override
	public long getTime() {
		synchronized(this) {
			if (_audPlayer == null) return -1;
			else return _audPlayer.getFrameTimeStamp();
		}
	}

}
