/*
 * Copyright (C) 2011-2014 GUIGUI Simon, fyhertz@gmail.com
 *
 * This file is part of libstreaming (https://github.com/fyhertz/libstreaming)
 *
 * Spydroid is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 3 of the License, or
 * (at your option) any later version.
 *
 * This source code is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this source code; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 */

package net.majorkernelpanic.streaming;

import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.CountDownLatch;

import net.majorkernelpanic.spydroid.SpydroidApplication;
import net.majorkernelpanic.streaming.audio.AudioQuality;
import net.majorkernelpanic.streaming.audio.AudioStream;
import net.majorkernelpanic.streaming.exceptions.CameraInUseException;
import net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException;
import net.majorkernelpanic.streaming.exceptions.InvalidSurfaceException;
import net.majorkernelpanic.streaming.exceptions.StorageUnavailableException;
import net.majorkernelpanic.streaming.gl.SurfaceView;
import net.majorkernelpanic.streaming.rtsp.RtspClient;
import net.majorkernelpanic.streaming.video.VideoQuality;
import net.majorkernelpanic.streaming.video.VideoStream;

import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.Log;

import static net.majorkernelpanic.streaming.MediaStream.RTP_OVER_UDP;

/**
 * You should instantiate this class with the {@link SessionBuilder}.<br />
 * This is the class you will want to use to stream audio and or video to some peer using RTP.<br />
 * <p>
 * It holds a {@link VideoStream} and a {@link AudioStream} together and provides
 * synchronous and asynchronous functions to start and stop those steams.
 * You should implement a callback interface {@link Callback} to receive notifications and error reports.<br />
 * <p>
 * If you want to stream to a RTSP server, you will need an instance of this class and hand it to a
 * {@link RtspClient}.
 * <p>
 * If you don't use the RTSP protocol, you will still need to send a session description to the receiver
 * for him to be able to decode your audio/video streams. You can obtain this session description by calling
 * {@link #configure()} or {@link #syncConfigure()} to configure the session with its parameters
 * (audio samplingRate, video resolution) and then {@link Session#getSessionDescription()}.<br />
 * <p>
 * See the example 2 here: https://github.com/fyhertz/libstreaming-examples to
 * see an example of how to get a SDP.<br />
 * <p>
 * See the example 3 here: https://github.com/fyhertz/libstreaming-examples to
 * see an example of how to stream to a RTSP server.<br />
 * <p>
 * SpyDroid本身的推流过程分成了三部分:
 * 1. 首先就是本地的相机当中把数据读取出来;
 * 2. 然后就是将这些数据进行编码，然后推到RtspServer当中;
 * 3. 最后就是客户端从RtspServer当中拉流;
 * <p>
 * 需要注意的是上面的1和2都是在SpyDroid当中完成的.
 * <p>
 * {@link Session}才是整个视频流处理的核心.
 * 一切操作都是围绕该{@link Session}进行展开的,Session控制了视频流的编码以及推送的目的地等信息.
 * <p>
 * {@link net.majorkernelpanic.streaming.rtsp.RtspServer}是直接同{@link Session}
 * 进行交互，而不是同原始的视频流.
 * <p>
 * Session是一个中间的协调器,本身为了视频的编码状态等信息.
 * <p>
 * 这也是一种设计思想，更加符合OOP的设计理念.
 * <p>
 * 另外就是从RTSP协议本身出发，可以看到RTSP当中的每次通信过程本身也是以Session为单位
 * 进行划分的.
 */
public class Session {

    public final static String TAG = "Session";

    public final static int STREAM_VIDEO = 0x01;

    public final static int STREAM_AUDIO = 0x00;

    /**
     * Some app is already using a camera (Camera.open() has failed).
     */
    public final static int ERROR_CAMERA_ALREADY_IN_USE = 0x00;

    /**
     * The phone may not support some streaming parameters that you are using (bit rate, frame rate...s).
     */
    public final static int ERROR_CONFIGURATION_NOT_SUPPORTED = 0x01;

    /**
     * The internal storage of the phone is not ready.
     * Libstreaming tried to store a test file on the sdcard but couldn't.
     * See H264Stream and AACStream to find out why libstreaming would want to something like that.
     */
    public final static int ERROR_STORAGE_NOT_READY = 0x02;

    /**
     * The phone has no flash.
     */
    public final static int ERROR_CAMERA_HAS_NO_FLASH = 0x03;

    /**
     * The supplied SurfaceView is not a valid surface, or has not been created yet.
     */
    public final static int ERROR_INVALID_SURFACE = 0x04;

    /**
     * The destination set with {@link Session#setDestination(String)} could not be resolved.
     * May mean that the phone has no access to the internet, or that the DNS server could not
     * resolved the host name.
     */
    public final static int ERROR_UNKNOWN_HOST = 0x05;

    /**
     * Some other error occured !
     */
    public final static int ERROR_OTHER = 0x06;

    private String mOrigin;
    private String mDestination;
    private int mTimeToLive = 64;
    private long mTimestamp;

    private AudioStream mAudioStream = null;
    private VideoStream mVideoStream = null;

    private Callback mCallback;
    private Handler mMainHandler;

    private static CountDownLatch sSignal;
    private static Handler sHandler;

    static {
        // Creates the Thread that will be used when asynchronous methods of a Session are called
        sSignal = new CountDownLatch(1);
        new HandlerThread("net.majorkernelpanic.streaming.Session") {
            @Override
            protected void onLooperPrepared() {
                sHandler = new Handler();
                sSignal.countDown();
            }
        }.start();
    }

    /**
     * Creates a streaming session that can be customized by adding tracks.
     */
    public Session() {
        long uptime = System.currentTimeMillis();
        mMainHandler = new Handler(Looper.getMainLooper());
        mTimestamp = (uptime / 1000) << 32 & (((uptime - ((uptime / 1000) * 1000)) >> 32) / 1000); // NTP timestamp
        mOrigin = "127.0.0.1";

        // Me make sure that we won't send Runnables to a non existing thread
        try {
            sSignal.await();
        } catch (InterruptedException e) {
            Log.e(TAG, "InterruptedException happened", e);
        }
    }

    /**
     * The callback interface you need to implement to get some feedback
     * Those will be called from the UI thread.
     */
    public interface Callback {

        /**
         * Called periodically to inform you on the bandwidth
         * consumption of the streams when streaming.
         */
        void onBitrateUpdate(long bitrate);

        /**
         * Called when some error occurs.
         */
        void onSessionError(int reason, int streamType, Exception e);

        /**
         * Called when the preview of the {@link VideoStream}
         * has correctly been started.
         * If an error occurs while starting the preview,
         * {@link Callback#onSessionError(int, int, Exception)} will be
         * called instead of {@link Callback#onPreviewStarted()}.
         */
        void onPreviewStarted();

        /**
         * Called when the session has correctly been configured
         * after calling {@link Session#configure()}.
         * If an error occurs while configuring the {@link Session},
         * {@link Callback#onSessionError(int, int, Exception)} will be
         * called instead of  {@link Callback#onSessionConfigured()}.
         */
        void onSessionConfigured();

        /**
         * Called when the streams of the session have correctly been started.
         * If an error occurs while starting the {@link Session},
         * {@link Callback#onSessionError(int, int, Exception)} will be
         * called instead of  {@link Callback#onSessionStarted()}.
         */
        void onSessionStarted();

        /**
         * Called when the stream of the session have been stopped.
         */
        void onSessionStopped();
    }

    /**
     * You probably don't need to use that directly, use the {@link SessionBuilder}.
     */
    void addAudioTrack(AudioStream track) {
        removeAudioTrack();
        mAudioStream = track;
    }

    /**
     * You probably don't need to use that directly, use the {@link SessionBuilder}.
     */
    void addVideoTrack(VideoStream track) {
        removeVideoTrack();
        mVideoStream = track;
    }

    /**
     * You probably don't need to use that directly, use the {@link SessionBuilder}.
     */
    void removeAudioTrack() {
        if (mAudioStream != null) {
            mAudioStream.stop();
            mAudioStream = null;
        }
    }

    /**
     * You probably don't need to use that directly, use the {@link SessionBuilder}.
     */
    void removeVideoTrack() {
        if (mVideoStream != null) {
            mVideoStream.stopPreview();
            mVideoStream = null;
        }
    }

    /**
     * Returns the underlying {@link AudioStream} used by the {@link Session}.
     */
    public AudioStream getAudioTrack() {
        return mAudioStream;
    }

    /**
     * Returns the underlying {@link VideoStream} used by the {@link Session}.
     */
    public VideoStream getVideoTrack() {
        return mVideoStream;
    }

    /**
     * Sets the callback interface that will be called by the {@link Session}.
     *
     * @param callback The implementation of the {@link Callback} interface
     */
    public void setCallback(Callback callback) {
        mCallback = callback;
    }

    /**
     * The origin address of the session.
     * It appears in the session description.
     *
     * @param origin The origin address
     */
    public void setOrigin(String origin) {
        mOrigin = origin;
    }

    /**
     * The destination address for all the streams of the session.
     * Changes will be taken into account the next time you start the session.
     *
     * @param destination The destination address
     */
    public void setDestination(String destination) {
        mDestination = destination;
    }

    /**
     * Set the TTL of all packets sent during the session.
     * Changes will be taken into account the next time you start the session.
     *
     * @param ttl The Time To Live
     */
    public void setTimeToLive(int ttl) {
        mTimeToLive = ttl;
    }

    /**
     * Sets the configuration of the stream. You can call this method at any time
     * and changes will take effect next time you call {@link #configure()}.
     *
     * @param quality Quality of the stream
     */
    public void setVideoQuality(VideoQuality quality) {
        if (mVideoStream != null) {
            mVideoStream.setVideoQuality(quality);
        }
    }

    /**
     * Sets a Surface to show a preview of recorded media (video).
     * You can call this method at any time and changes will take effect next time you call {@link #start()} or {@link #startPreview()}.
     */
    public void setSurfaceView(final SurfaceView view) {
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mVideoStream != null) {
                    if (!SpydroidApplication.USE_SHARE_BUFFER_DATA) {
                        mVideoStream.setSurfaceView(view);
                    }
                }
            }
        });
    }

    /**
     * Sets the orientation of the preview. You can call this method at any time
     * and changes will take effect next time you call {@link #configure()}.
     *
     * @param orientation The orientation of the preview
     */
    public void setPreviewOrientation(int orientation) {
        if (mVideoStream != null) {
            mVideoStream.setPreviewOrientation(orientation);
        }
    }

    /**
     * Sets the configuration of the stream. You can call this method at any time
     * and changes will take effect next time you call {@link #configure()}.
     *
     * @param quality Quality of the stream
     */
    public void setAudioQuality(AudioQuality quality) {
        if (mAudioStream != null) {
            mAudioStream.setAudioQuality(quality);
        }
    }

    /**
     * Returns the {@link Callback} interface that was set with
     * {@link #setCallback(Callback)} or null if none was set.
     */
    public Callback getCallback() {
        return mCallback;
    }

    /**
     * Returns a Session Description that can be stored in a file or sent to a client with RTSP.
     *
     * @return The Session Description.
     * @throws IllegalStateException Thrown when {@link #setDestination(String)} has never been called.
     */
    public String getSessionDescription() {
        StringBuilder sessionDescription = new StringBuilder();
        if (mDestination == null) {
            throw new IllegalStateException("setDestination() has not been called !");
        }
        sessionDescription.append("v=0\r\n");
        // TODO: Add IPV6 support
        sessionDescription.append("o=- " + mTimestamp + " " + mTimestamp + " IN IP4 " + mOrigin + "\r\n");
        sessionDescription.append("s=Unnamed\r\n");
        sessionDescription.append("i=N/A\r\n");
        sessionDescription.append("c=IN IP4 " + mDestination + "\r\n");
        // t=0 0 means the session is permanent (we don't know when it will stop)
        sessionDescription.append("t=0 0\r\n");
        sessionDescription.append("a=recvonly\r\n");
        // Prevents two different sessions from using the same peripheral at the same time
        if (mAudioStream != null) {
            sessionDescription.append(mAudioStream.getSessionDescription());
            sessionDescription.append("a=control:trackID=" + 0 + "\r\n");
        }
        if (mVideoStream != null) {
            sessionDescription.append(mVideoStream.getSessionDescription());
            sessionDescription.append("a=control:trackID=" + 1 + "\r\n");
        }
        return sessionDescription.toString();
    }

    /**
     * Returns the destination set with {@link #setDestination(String)}.
     */
    public String getDestination() {
        return mDestination;
    }

    /**
     * Returns an approximation of the bandwidth consumed by the session in bit per seconds.
     */
    public long getBitrate() {
        long sum = 0;
        if (mAudioStream != null) sum += mAudioStream.getBitrate();
        if (mVideoStream != null) sum += mVideoStream.getBitrate();
        return sum;
    }

    /**
     * Indicates if a track is currently running.
     */
    public boolean isStreaming() {
        return (mAudioStream != null && mAudioStream.isStreaming())
                || (mVideoStream != null && mVideoStream.isStreaming());
    }

    /**
     * Configures all streams of the session.
     **/
    public void configure() {
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                try {
                    syncConfigure();
                } catch (final Exception e) {
                    Log.e(TAG, "Exception happened while sync the configuration", e);
                }
            }
        });
    }

    /**
     * Does the same thing as {@link #configure()}, but in a syncronous manner.
     * Throws exceptions in addition to calling a callback
     * {@link Callback#onSessionError(int, int, Exception)} when
     * an error occurs.
     **/
    public void syncConfigure() throws RuntimeException, IOException {
        for (int id = 0; id < 2; id++) {
            Stream stream = id == 0 ? mAudioStream : mVideoStream;
            if (stream != null && !stream.isStreaming()) {
                try {
                    stream.configure();
                } catch (CameraInUseException e) {
                    postError(ERROR_CAMERA_ALREADY_IN_USE, id, e);
                    throw e;
                } catch (StorageUnavailableException e) {
                    postError(ERROR_STORAGE_NOT_READY, id, e);
                    throw e;
                } catch (ConfNotSupportedException e) {
                    postError(ERROR_CONFIGURATION_NOT_SUPPORTED, id, e);
                    throw e;
                } catch (InvalidSurfaceException e) {
                    postError(ERROR_INVALID_SURFACE, id, e);
                    throw e;
                } catch (IOException e) {
                    postError(ERROR_OTHER, id, e);
                    throw e;
                } catch (RuntimeException e) {
                    postError(ERROR_OTHER, id, e);
                    throw e;
                }
            }
        }
        postSessionConfigured();
    }

    /**
     * Asynchronously starts all streams of the session.
     * 该方法只被一个地方调用: {@link RtspClient#startStream()}
     **/
    public void start() {
        Log.v(TAG, "starts all streams asynchronously");
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                try {
                    Log.d(TAG, "start all streams");
                    syncStart();
                } catch (Exception e) {
                    Log.e(TAG, "Exception happened while start the stream", e);
                }
            }
        });
    }

    private static String getStreamName(int streamId) {
        if (streamId == 0) {
            return "audioStream";
        }
        return "videoStream";
    }

    /**
     * Starts a stream in a synchronous manner.
     * Throws exceptions in addition to calling a callback.
     *
     * @param id   The id of the stream to start(这里的id值主要是为了区分音频流和视频流);
     *             0是audio stream
     *             1是video stream
     * @param mode 传输流是通过tcp通道还是udp通道进行传输
     **/
    public void syncStart(int id, int mode)
            throws CameraInUseException,
            ConfNotSupportedException,
            InvalidSurfaceException,
            IOException {
        Log.d(TAG, "start stream with id of " + getStreamName(id));
        Stream stream = id == 0 ? mAudioStream : mVideoStream;
        if (stream != null && !stream.isStreaming()) {
            stream.setTransferChannel(mode);
            try {
                InetAddress destination = InetAddress.getByName(mDestination);
                stream.setTimeToLive(mTimeToLive);
                stream.setDestinationAddress(destination);
                stream.start();
                if (getTrack(1 - id) == null || getTrack(1 - id).isStreaming()) {
                    postSessionStarted();
                }
                if (getTrack(1 - id) == null || !getTrack(1 - id).isStreaming()) {
                    sHandler.post(mUpdateBitrate);
                }
            } catch (UnknownHostException e) {
                postError(ERROR_UNKNOWN_HOST, id, e);
                throw e;
            } catch (CameraInUseException e) {
                postError(ERROR_CAMERA_ALREADY_IN_USE, id, e);
                throw e;
            } catch (StorageUnavailableException e) {
                postError(ERROR_STORAGE_NOT_READY, id, e);
                throw e;
            } catch (ConfNotSupportedException e) {
                postError(ERROR_CONFIGURATION_NOT_SUPPORTED, id, e);
                throw e;
            } catch (InvalidSurfaceException e) {
                postError(ERROR_INVALID_SURFACE, id, e);
                throw e;
            } catch (IOException e) {
                postError(ERROR_OTHER, id, e);
                throw e;
            } catch (RuntimeException e) {
                postError(ERROR_OTHER, id, e);
                throw e;
            }
        } else {
            Log.e(TAG, "streaming abnormal");
        }
    }

    /**
     * syncStart()方法只被一个地方调用，即{@link #start()},而{@link #start()}方法
     * 本身只被一个地方调用，即{@link RtspClient},即主要是用于测试目的．
     * 所以我们这里默认syncStart方法本身只是工作在udp模式下.(测试类本身不经过setup，而是直接开始拉流)
     * <p>
     * Does the same thing as {@link #start()}, but in a synchronous manner.
     * Throws exceptions in addition to calling a callback.
     **/
    public void syncStart() throws CameraInUseException, ConfNotSupportedException,
            InvalidSurfaceException, IOException {
        Log.v(TAG, "Session --> sync start");
        syncStart(1, RTP_OVER_UDP);
        // 如果我们不使用ShareBuffer的话,就传输AudioStream,否则不进行
        if (!SpydroidApplication.USE_SHARE_BUFFER_DATA) {
            Log.d(TAG, "start the AudioStream");
            try {
                syncStart(0, RTP_OVER_UDP);
            } catch (final Exception e) {
                Log.e(TAG, "RuntimeException happened while start audio stream, so stop the video stream either");
                syncStop(1);
                throw e;
            }
        }
    }

    /**
     * Stops all existing streams.
     */
    public void stop() {
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                syncStop();
            }
        });
    }

    /**
     * Stops one stream in a syncronous manner.
     *
     * @param id The id of the stream to stop
     **/
    private void syncStop(final int id) {
        Log.d(TAG, "stop stream with TRACK id of " + id);
        Stream stream = id == 0 ? mAudioStream : mVideoStream;
        if (stream != null) {
            stream.stop();
        }
    }

    /**
     * Stops all existing streams in a syncronous manner.
     */
    public void syncStop() {
        Log.d(TAG, "sync stop session ");
        syncStop(0);
        syncStop(1);
        postSessionStopped();
    }

    public void startPreview() {
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mVideoStream != null) {
                    try {
                        mVideoStream.configure();
                        mVideoStream.startPreview();
                        postPreviewStarted();
                    } catch (CameraInUseException e) {
                        postError(ERROR_CAMERA_ALREADY_IN_USE, STREAM_VIDEO, e);
                    } catch (ConfNotSupportedException e) {
                        postError(ERROR_CONFIGURATION_NOT_SUPPORTED, STREAM_VIDEO, e);
                    } catch (InvalidSurfaceException e) {
                        postError(ERROR_INVALID_SURFACE, STREAM_VIDEO, e);
                    } catch (RuntimeException e) {
                        postError(ERROR_OTHER, STREAM_VIDEO, e);
                    } catch (StorageUnavailableException e) {
                        postError(ERROR_STORAGE_NOT_READY, STREAM_VIDEO, e);
                    } catch (IOException e) {
                        postError(ERROR_OTHER, STREAM_VIDEO, e);
                    }
                }
            }
        });
    }

    public void stopPreview() {
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mVideoStream != null) {
                    mVideoStream.stopPreview();
                }
            }
        });
    }

    public void switchCamera() {
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mVideoStream != null) {
                    try {
                        mVideoStream.switchCamera();
                        postPreviewStarted();
                    } catch (CameraInUseException e) {
                        postError(ERROR_CAMERA_ALREADY_IN_USE, STREAM_VIDEO, e);
                    } catch (ConfNotSupportedException e) {
                        postError(ERROR_CONFIGURATION_NOT_SUPPORTED, STREAM_VIDEO, e);
                    } catch (InvalidSurfaceException e) {
                        postError(ERROR_INVALID_SURFACE, STREAM_VIDEO, e);
                    } catch (IOException e) {
                        postError(ERROR_OTHER, STREAM_VIDEO, e);
                    } catch (RuntimeException e) {
                        postError(ERROR_OTHER, STREAM_VIDEO, e);
                    }
                }
            }
        });
    }

    public int getCamera() {
        return mVideoStream != null ? mVideoStream.getCamera() : 0;

    }

    public void toggleFlash() {
        sHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mVideoStream != null) {
                    try {
                        mVideoStream.toggleFlash();
                    } catch (RuntimeException e) {
                        postError(ERROR_CAMERA_HAS_NO_FLASH, STREAM_VIDEO, e);
                    }
                }
            }
        });
    }

    /**
     * Deletes all existing tracks & release associated resources.
     */
    public void release() {
        removeAudioTrack();
        removeVideoTrack();
        sHandler.getLooper().quit();
    }

    private void postPreviewStarted() {
        mMainHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mCallback != null) {
                    mCallback.onPreviewStarted();
                }
            }
        });
    }

    private void postSessionConfigured() {
        mMainHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mCallback != null) {
                    mCallback.onSessionConfigured();
                }
            }
        });
    }

    private void postSessionStarted() {
        Log.d(TAG, "session started");
        mMainHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mCallback != null) {
                    Log.d(TAG, "notify the callback that the session has been started");
                    mCallback.onSessionStarted();
                }
            }
        });
    }

    private void postSessionStopped() {
        Log.d(TAG, "session stopped");
        mMainHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mCallback != null) {
                    mCallback.onSessionStopped();
                }
            }
        });
    }

    private void postError(final int reason, final int streamType, final Exception e) {
        Log.e(TAG, "error happened with reason of " + reason + ", with stream type of : " + getStreamName(streamType), e);
        mMainHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mCallback != null) {
                    mCallback.onSessionError(reason, streamType, e);
                }
            }
        });
    }

    private void postBitRate(final long bitrate) {
        mMainHandler.post(new Runnable() {
            @Override
            public void run() {
                if (mCallback != null) {
                    mCallback.onBitrateUpdate(bitrate);
                }
            }
        });
    }

    private Runnable mUpdateBitrate = new Runnable() {
        @Override
        public void run() {
            if (isStreaming()) {
                postBitRate(getBitrate());
                sHandler.postDelayed(mUpdateBitrate, 500);
            } else {
                postBitRate(0);
            }
        }
    };

    public boolean trackExists(int id) {
        if (id == 0) {
            return mAudioStream != null;
        } else {
            return mVideoStream != null;
        }
    }

    public Stream getTrack(int id) {
        if (id == 0) {
            return mAudioStream;
        } else {
            return mVideoStream;
        }
    }

}
