/*
 * Copyright (C) 2011-2013 GUIGUI Simon, fyhertz@gmail.com
 *
 * This file is part of Spydroid (http://code.google.com/p/spydroid-ipcamera/)
 *
 * Spydroid is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 3 of the License, or
 * (at your option) any later version.
 *
 * This source code is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this source code; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 */

package net.majorkernelpanic.streaming.video;

import android.annotation.SuppressLint;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCharacteristics;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;
import android.util.Size;

import net.majorkernelpanic.streaming.MediaStream;
import net.majorkernelpanic.streaming.rtp.MediaCodecInputStream;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.List;

import camera.Camera;

/**
 * Don't use this class directly.
 */
public abstract class VideoStream extends MediaStream {

    protected final static String TAG = "VideoStream";

    protected VideoQuality mQuality = VideoQuality.DEFAULT_VIDEO_QUALITY.clone();
    //protected SurfaceHolder.Callback mSurfaceHolderCallback = null;
    //protected SurfaceHolder mSurfaceHolder = null;
    protected int mVideoEncoder, mCameraFacing = CameraCharacteristics.LENS_FACING_FRONT;
    protected boolean mCameraOpenedManually = true;
    protected boolean mFlashState = false;
    protected boolean mSurfaceReady = false;
    protected boolean mUnlocked = false;
    protected boolean mPreviewStarted = false;

    /**
     * Don't use this class directly.
     * Uses CAMERA_FACING_BACK by default.
     */
    public VideoStream() {
        this(CameraCharacteristics.LENS_FACING_BACK);
    }

    /**
     * Don't use this class directly
     *
     * @param camera_facing Can be either CameraCharacteristics.LENS_FACING_BACK or CameraCharacteristics.LENS_FACING_FRONT
     */
    public VideoStream(int camera_facing) {
        super();

        setCamera(camera_facing);
        // TODO: Remove this when encoding with the MediaCodec API is ready
        setMode(MODE_MEDIARECORDER_API);
    }

    /**
     * Switch between the front facing and the back facing camera of the phone.
     * If {@link #startPreview()} has been called, the preview will be  briefly interrupted.
     * If {@link #start()} has been called, the stream will be  briefly interrupted.
     * You should not call this method from the main thread if you are already streaming.
     *
     * @throws IOException
     * @throws RuntimeException
     **/
    public void switchCamera() throws RuntimeException, IOException {
        Log.d(TAG, "switchCamera");

        if (Camera.getInstance().getNumbersOfCameras() < 2) {
            throw new IllegalStateException("Phone only has one camera !");
        }

        boolean streaming = mStreaming;
        boolean previewing = mCameraOpenedManually;
        mCameraFacing = mCameraFacing == CameraCharacteristics.LENS_FACING_BACK ?
                CameraCharacteristics.LENS_FACING_FRONT : CameraCharacteristics.LENS_FACING_BACK;
        setCamera(mCameraFacing);
        stopPreview();
        setCamera(mCameraFacing);
        if (previewing) startPreview();
        if (streaming) start();
    }

    public int getCamera() {
        Log.d(TAG, "getCamera");

        return mCameraFacing;
    }

    /**
     * Sets the camera that will be used to capture video.
     * You can call this method at any time and changes will take effect next time you start the stream.
     *
     * @param facing_to_select Can be either CameraCharacteristics.LENS_FACING_BACK or CameraCharacteristics.LENS_FACING_FRONT
     */
    public void setCamera(int facing_to_select) {
        Log.d(TAG, "setCamera");

        mCameraFacing = facing_to_select;
    }

    /**
     * Sets a Surface to show a preview of recorded media (video).
     * You can call this method at any time and changes will take effect next time you call {@link #start()}.
     */
	/*
	public synchronized void setPreviewDisplay(SurfaceHolder surfaceHolder) {
		if (mSurfaceHolderCallback != null && mSurfaceHolder != null) {
			//mSurfaceHolder.removeCallback(mSurfaceHolderCallback);
		}
		if (surfaceHolder != null) {
			mSurfaceHolderCallback = new Callback() {
				@Override
				public void surfaceDestroyed(SurfaceHolder holder) {
					mSurfaceReady = false;
					stopPreview();
					Log.d(TAG,"Surface destroyed !");
				}
				@Override
				public void surfaceCreated(SurfaceHolder holder) {
					mSurfaceReady = true;
				}
				@Override
				public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
					Log.d(TAG,"Surface Changed !");
				}
			};
			mSurfaceHolder = surfaceHolder;
			//mSurfaceHolder.addCallback(mSurfaceHolderCallback);
			mSurfaceReady = true;
		}
	}
        */

    /**
     * Toggle the LED of the phone if it has one.
     */
    public void toggleFlash() {
        Log.d(TAG, "toggleFlash");

        setFlashState(!mFlashState);
    }

    public boolean getFlashState() {
        Log.d(TAG, "getFlashState");

        return mFlashState;
    }

    /**
     * Turns the LED on or off if phone has one.
     */
    public synchronized void setFlashState(boolean state) {
        Log.d(TAG, "setFlashState");

        // FIXME: Is it possible to toggle the flash while streaming on android 2.3 ?
        // FIXME: It works on android 4.2 and 4.3

        mFlashState = state;

        Camera.getInstance().setFlashMode(mFlashState);

    }

    /**
     * Modifies the resolution of the stream. You can call this method at any time
     * and changes will take effect next time you call {@link #start()}.
     * {@link #setVideoQuality(VideoQuality)} may be more convenient.
     *
     * @param width  Width of the stream
     * @param height height of the stream
     */
    public void setVideoSize(int width, int height) {
        Log.d(TAG, "setVideoSize");

        if (mQuality.resX != width || mQuality.resY != height) {
            mQuality.resX = width;
            mQuality.resY = height;
        }
    }

    /**
     * Modifies the framerate of the stream. You can call this method at any time
     * and changes will take effect next time you call {@link #start()}.
     * {@link #setVideoQuality(VideoQuality)} may be more convenient.
     *
     * @param rate Framerate of the stream
     */
    public void setVideoFramerate(int rate) {
        Log.d(TAG, "setVideoParameter");

        if (mQuality.framerate != rate) {
            mQuality.framerate = rate;
        }
    }

    /**
     * Modifies the bitrate of the stream. You can call this method at any time
     * and changes will take effect next time you call {@link #start()}.
     * {@link #setVideoQuality(VideoQuality)} may be more convenient.
     *
     * @param bitrate Bitrate of the stream in bit per second
     */
    public void setVideoEncodingBitrate(int bitrate) {
        Log.d(TAG, "setVideoEncodingBitRate");

        if (mQuality.bitrate != bitrate) {
            mQuality.bitrate = bitrate;
        }
    }

    /**
     * Returns the quality of the stream.
     */
    public VideoQuality getVideoQuality() {
        Log.d(TAG, "getVideoQuality");

        return mQuality;
    }

    /**
     * Modifies the quality of the stream. You can call this method at any time
     * and changes will take effect next time you call {@link #start()}.
     *
     * @param videoQuality Quality of the stream
     */
    public void setVideoQuality(VideoQuality videoQuality) {
        Log.d(TAG, "setVideoQualty");

        if (!mQuality.equals(videoQuality)) {
            mQuality = videoQuality;
        }
    }

    /**
     * Modifies the videoEncoder of the stream. You can call this method at any time
     * and changes will take effect next time you call {@link #start()}.
     *
     * @param videoEncoder Encoder of the stream
     */
    protected void setVideoEncoder(int videoEncoder) {

        this.mVideoEncoder = videoEncoder;
        Log.d(TAG, "setVideoEncoder");

    }

    /**
     * Starts the stream.
     * This will also open the camera and dispay the preview
     * if {@link #startPreview()} has not aready been called.
     */
    public synchronized void start() throws IllegalStateException, IOException {
        Log.d(TAG, "start" + mPreviewStarted);

        if (!mPreviewStarted) mCameraOpenedManually = false;
        super.start();
    }

    /**
     * Stops the stream.
     */
    public synchronized void stop() {

        super.stop();
        Log.d(TAG, "stop" + mPreviewStarted);

        try {
            startPreview();
        } catch (RuntimeException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();

        }

    }

    public synchronized void startPreview() throws RuntimeException, IOException {
        Log.d(TAG, "startPreview" + mPreviewStarted);

        if (!mPreviewStarted) {
            createCamera();
            try {
                Camera.getInstance().startPreview();
                mPreviewStarted = true;
                mCameraOpenedManually = true;
            } catch (RuntimeException e) {
                destroyCamera();
                throw e;
            }
        }
    }

    /**
     * Stops the preview.
     */
    public synchronized void stopPreview() {
        Log.d(TAG, "stopPreview");

        mCameraOpenedManually = false;
        stop();
    }

    /**
     * Encoding of the audio/video is done by a MediaRecorder.
     */
    protected void encodeWithMediaRecorder() throws IOException {
        Log.d(TAG, "encodeWithMediaRecorder");

        // We need a local socket to forward data output by the camera to the packetizer
        //createSockets();
        createPipes();

        // Opens the camera if needed
        createCamera();

        // Stops the preview if needed
//		if (mPreviewStarted) {
        lockCamera();
        try {
            Camera.getInstance().stopPreview();
        } catch (Exception e) {
        }
        mPreviewStarted = false;
//		}

        // Unlock the camera if needed
        unlockCamera();


        // We write the ouput of the camera in a local socket instead of a file !
        // This one little trick makes streaming feasible quiet simply: data from the camera
        // can then be manipulated at the other end of the socket
        Camera.getInstance().setFD(pipeSender.getFileDescriptor());
        Camera.getInstance().startRecordingVideo();


        try {
            // mReceiver.getInputStream contains the data from the camera
            // the mPacketizer encapsulates this stream in an RTP stream and send it over the network
            mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
            mPacketizer.setInputStream(pipeIS);
            mPacketizer.start();
            mStreaming = true;
        } catch (IOException e) {
            stop();
            throw new IOException("Something happened with the local sockets :/ Start failed !");
        }

    }

    /**
     * Encoding of the audio/video is done by a MediaCodec.
     */
    @SuppressLint({"InlinedApi", "NewApi"})
    protected void encodeWithMediaCodec() throws RuntimeException, IOException {
        Log.d(TAG, "encodeWithMediaCodec");
        // Opens the camera if needed
        createCamera();

        // Starts the preview if needed
        if (!mPreviewStarted) {
            try {
                Camera.getInstance().startPreview();
                mPreviewStarted = true;
            } catch (RuntimeException e) {
                destroyCamera();
                throw e;
            }
        }

        mMediaCodec = MediaCodec.createEncoderByType("video/avc");
        MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 4);
        mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mMediaCodec.start();


        Camera.getInstance().setPreviewCallback(new Camera.PreviewCallback() {
            @Override
            public void onPreviewFrame(byte[] data, Camera camera) {
                long now = System.nanoTime() / 1000, timeout = 1000000 / mQuality.framerate;
                int bufferIndex = mMediaCodec.dequeueInputBuffer(timeout);

                if (bufferIndex >= 0) {
                    final ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(bufferIndex);
                    inputBuffer.clear();
                    inputBuffer.put(data, 0, data.length);
                    mMediaCodec.queueInputBuffer(bufferIndex, 0, data.length, System.nanoTime() / 1000, 0);
                } else {
                    Log.e(TAG, "No buffer available !");
                }

            }
        });

        try {
            // mReceiver.getInputStream contains the data from the camera
            // the mPacketizer encapsulates this stream in an RTP stream and send it over the network
            mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
            mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
            mPacketizer.start();
            mStreaming = true;
        } catch (IOException e) {
            stop();
            throw new IOException("Something happened with the local sockets :/ Start failed !");
        }

    }

    public abstract String generateSessionDescription() throws IllegalStateException, IOException;

    protected synchronized void createCamera() throws RuntimeException, IOException {
        //if (mSurfaceHolder == null || mSurfaceHolder.getSurface() == null || !mSurfaceReady)
        //	throw new IllegalStateException("Invalid surface holder !");

        Log.d(TAG, "createCamera");

        Camera.getInstance().open(mCameraFacing);
        mUnlocked = false;
        Camera.getInstance().setErrorCallback(new Camera.ErrorCallback() {
            @Override
            public void onError(int error, Camera camera) {
                // On some phones when trying to use the camera facing front the media server will die
                // Whether or not this callback may be called really depends on the phone
                if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
                    // In this case the application must release the camera and instantiate a new one
                    Log.e(TAG, "Media server died !");
                    // We don't know in what thread we are so stop needs to be synchronized
                    mCameraOpenedManually = false;
                    stop();
                } else {
                    Log.e(TAG, "Error unknown with the camera: " + error);
                }
            }
        });

        Camera.Parameters parameters = Camera.getInstance().getParameters();

        if (mMode == MODE_MEDIACODEC_API) {
            getClosestSupportedQuality(parameters);
            parameters.setPreviewFormat(ImageFormat.YV12);
            parameters.setPreviewSize(mQuality.resX, mQuality.resY);
            parameters.setPreviewFrameRate(mQuality.framerate);
        }

        if (mFlashState) {
            if (parameters.getFlashMode() == null) {
                // The phone has no flash or the choosen camera can not toggle the flash
                throw new IllegalStateException("Can't turn the flash on !");
            } else {
                parameters.setFlashMode(mFlashState ?
                        Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF);
            }
        }

        try {
            Camera.getInstance().setParameters(parameters);
            Camera.getInstance().setDisplayOrientation(mQuality.orientation);
            //mCamera.setPreviewDisplay(mSurfaceHolder);
        } catch (RuntimeException e) {
            destroyCamera();
            throw e;
            //} catch (IOException e) {
            //	destroyCamera();
            //	throw e;
        }

    }


    protected synchronized void destroyCamera() {
        Log.d(TAG, "destroyCamera" + mStreaming);
        if (mStreaming) super.stop();
        lockCamera();
        Camera.getInstance().stopPreview();
        try {
            Camera.getInstance().release();
        } catch (Exception e) {
            Log.e(TAG, e.getMessage() != null ? e.getMessage() : "unknown error");
        }

        mUnlocked = false;
        mPreviewStarted = false;

    }


    /**
     * Verifies if streaming using the MediaCodec API is feasable.
     */
    @SuppressLint("NewApi")
    private void checkMediaCodecAPI() {
        Log.d(TAG, "checkMediaCodecAPI");
        MediaCodecInfo[] lst = (new MediaCodecList(MediaCodecList.ALL_CODECS)).getCodecInfos();
        for (int j = lst.length - 1; j >= 0; j--) {
            MediaCodecInfo codecInfo = lst[j];
            if (codecInfo.isEncoder()) {
                MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
                for (int i = 0; i < capabilities.colorFormats.length; i++) {
                    int format = capabilities.colorFormats[i];
                    Log.e(TAG, codecInfo.getName() + " with color format " + format);
                }
                for (int i = 0; i < capabilities.colorFormats.length; i++) {
                    int format = capabilities.colorFormats[i];
                    Log.e(TAG, codecInfo.getName() + " with color format " + format);
                }
            }
        }
    }

    /**
     * Checks if the resolution and the framerate selected are supported by the camera.
     * If not, it modifies it by supported parameters.
     * FIXME: Not reliable, more or less useless :(
     **/
    private void getClosestSupportedQuality(Camera.Parameters parameters) {
        Log.d(TAG, "getClosestSupportedQuality");
        // Resolutions
        String supportedSizesStr = "Supported resolutions: ";
        List<Size> supportedSizes = parameters.getSupportedPreviewSizes();
        for (Iterator<Size> it = supportedSizes.iterator(); it.hasNext(); ) {
            Size size = it.next();
            supportedSizesStr += size.getWidth() + "x" + size.getHeight() + (it.hasNext() ? ", " : "");
        }
        Log.v(TAG, supportedSizesStr);

        // Frame rates
        String supportedFrameRatesStr = "Supported frame rates: ";
        List<Integer> supportedFrameRates = parameters.getSupportedPreviewFrameRates();
        for (Iterator<Integer> it = supportedFrameRates.iterator(); it.hasNext(); ) {
            supportedFrameRatesStr += it.next() + "fps" + (it.hasNext() ? ", " : "");
        }
        Log.v(TAG, supportedFrameRatesStr);

        int minDist = Integer.MAX_VALUE, newFps = mQuality.framerate;
        if (!supportedFrameRates.contains(mQuality.framerate)) {
            for (Iterator<Integer> it = supportedFrameRates.iterator(); it.hasNext(); ) {
                int fps = it.next();
                int dist = Math.abs(fps - mQuality.framerate);
                if (dist < minDist) {
                    minDist = dist;
                    newFps = fps;
                }
            }
            Log.v(TAG, "Frame rate modified: " + mQuality.framerate + "->" + newFps);
            mQuality.framerate = newFps;
        }

    }

    protected void lockCamera() {
        Log.d(TAG, "Locking camera" + mUnlocked);
        if (mUnlocked) {
            try {
                Camera.getInstance().reconnect();
            } catch (Exception e) {
                Log.e(TAG, e.getMessage());
            }
            mUnlocked = false;
        }
    }

    protected void unlockCamera() {
        Log.d(TAG, "Unlocking camera" + mUnlocked);
        if (!mUnlocked) {
            try {
                Camera.getInstance().unlock();
            } catch (Exception e) {
                Log.e(TAG, e.getMessage());
            }
            mUnlocked = true;
        }
    }
}
