﻿using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using UnityEngine;
using UnityEngine.UI;

public class SCPlayer : MonoBehaviour
{
    [Header("Renderer")]
    public RawImage[] rendererRawimage;
    public MeshRenderer[] rendererMesh;
    [Space]
    [Header("Setting")]
    public OpenModel openModel = OpenModel.URL;
    public int openTimeout = 10000;
    public int readTimeout = 10000;
    public bool enableVideo = true;
    public bool enableAudio = true;
    public int cameraWidth = 640;
    public int cameraHeight = 480;
    public int cameraFrameRate = 30;
    public CIFP cameraInputPriority = CIFP.AUTO;
    public bool nobuffer = false;
    public bool codecForceLowDelay = false;
    public OutPixFmt outpixfmt = OutPixFmt.PIX_FMT_AUTO;
    public RtspTransportType rtspTransportType = RtspTransportType.UDP;

    public string url = "F:/HTTPServer/Faded.mp4";
    private Frame frame = new Frame();
    private StreamCapture capture;
    public StreamCapture Control
    {
        get { return capture; }
    }

    private Material rendererMat;
    //private Texture2D[] texs = new Texture2D[3];

    private bool isClosed = true;
    private SCClock videoClock;
    private SCClock audioClock;
    private SCClock masterClock;
    private long totalMs;
    public long TotalMs { get { return totalMs; } }
    private long frame_timer;
    private bool isPause = false;
    public bool IsPause { get { return isPause; } }

    public bool isLoop = false;
    public bool nosync = false;
    public bool openAutoPlay = true;
    private bool firstFramePause = false;

    [Range(0.0f, 1.0f)]
    public float volume = 0.5f;

    private bool isStep = false;
    private SCCanvas canvas;
    private SCAudioPlay audioPlay;
    private SCResampler resampler;
    private int bytes_per_sec;
    private int audio_hw_buf_size;
    private List<byte> pcm = new List<byte>();
    private AudioStream srcas;
    private bool prepareVideoOk = false;
    private bool prepareAudioOk = false;
    private bool refreshVideo = false;
    private bool isUpdateFirstVideoFrame = false;
    public event Action<StreamsParameters> OnOpenResultEvent;
    public event Action<InterruptCode> OnInterruptEvent;
    public event Action<string> OnCreateCanvasFailedEvent;
    private void Awake()
    {
        capture = new StreamCapture();
        audioPlay = new SCAudioPlay();
        resampler = new SCResampler();
    }
    // Use this for initialization
    void Start()
    {

    }

    // Update is called once per frame
    void Update()
    {
        UpdateVideo();
    }

    private void UpdateVideo()
    {
        if (!refreshVideo || isPause) return;

        Frame frame, lastFrame;
        double last_duration;
        double delay;
        long time;
        bool forcedraw = false;
        bool ignoreFrame = false;
        //seekMux.lock () ;
        frame = capture.TryGrabFrame(FrameType.VIDEO);
        //seekMux.unlock();
        if (frame == null)
        {
            Thread.Sleep(10);
        }
        else
        {

            lastFrame = capture.TryGrabLastFrame(FrameType.VIDEO);
            if (frame.flag == FrameFlag.EOF)
            {
                if (isLoop && masterClock == videoClock)
                    Seek(0);
                return;
            }

            last_duration = LastDuration(lastFrame, frame);

            delay = ComputeTargetDelay(last_duration, ref ignoreFrame);

            time = IStreamCapture.GetTimestampUTC() / 1000;
            if ((time < frame_timer + delay) && !ignoreFrame)
                return;


            if (!nosync && !ignoreFrame)
            {
                frame_timer += (int)delay;
                if (delay > 0 && time - frame_timer > AV_SYNC_THRESHOLD_MAX)
                    frame_timer = time;
            }

            videoClock.SetClock(frame.pts);
            forcedraw = true;

            if (firstFramePause)
            {
                isPause = true;
                firstFramePause = false;
            }

            if (isStep)
            {
                isStep = false;
                isPause = true;
            }
        }

        if (forcedraw)
        {
            if (canvas != null)
            {
                if (!isUpdateFirstVideoFrame)
                {
                    OnFirstFrame();
                    isUpdateFirstVideoFrame = true;
                }

                canvas.Renderer(frame);


            }
            if (ignoreFrame)
                capture.RemoveFrame(FrameType.VIDEO);
            capture.RemoveFrame(FrameType.VIDEO);
        }
    }
    private void OnFirstFrame()
    {
        for (int i = 0; i < rendererRawimage.Length; i++)
            rendererRawimage[i].material = rendererMat;

        for (int i = 0; i < rendererMesh.Length; i++)
            rendererMesh[i].material = rendererMat;

    }
    public void Open()
    {
        Close();
        isPause = false;
        isUpdateFirstVideoFrame = false;
        firstFramePause = !openAutoPlay;
        capture.url = url;
        capture.SetOption(OptionType.OpenTimeout, openTimeout);
        capture.SetOption(OptionType.ReadTimeout, readTimeout);
        capture.SetOption(OptionType.EnableVideo, enableVideo ? 1 : 0);
        capture.SetOption(OptionType.EnableAudio, enableAudio ? 1 : 0);
        capture.SetOption(OptionType.OpenModel, (int)openModel);
        capture.SetOption(OptionType.CameraWidth, cameraWidth);
        capture.SetOption(OptionType.CameraHeight, cameraHeight);
        capture.SetOption(OptionType.CameraFrameRate, cameraFrameRate);
        capture.SetOption(OptionType.CameraInputFormatPriority, (int)cameraInputPriority);
        capture.SetOption(OptionType.NoBuffer, nobuffer ? 1 : 0);
        capture.SetOption(OptionType.CodecForceLowDelay, codecForceLowDelay ? 1 : 0);
        capture.SetOption(OptionType.OutputPixelformat, (int)outpixfmt);
        capture.SetOption(OptionType.RTSPTransport, (int)rtspTransportType);
        capture.RegisterInterruptCallback(OnInterrupt);
        capture.Open(OpenCallback);
    }

    private void OpenCallback(StreamsParameters sp)
    {
        if (sp.retcode != RetcodeType.Success)
        {
            if (sp.retcode == RetcodeType.Alloc_Context_Failed)
                Debug.LogWarning("Alloc Context Failed");
            else if (sp.retcode == RetcodeType.FFmpeg_Log)
                Debug.LogWarning(sp.errLog);
            else if (sp.retcode == RetcodeType.OpenTimeout)
                Debug.Log("Open Timeout");
            Close();
            if (OnOpenResultEvent != null) OnOpenResultEvent(sp);
            return;
        }
        Debug.Log(sp.describe);
        if (sp.videoStream == null && sp.audioStream == null)
        {
            Debug.LogWarning("Does not contain any audio or video streams");
            Close();
            if (OnOpenResultEvent != null) OnOpenResultEvent(sp);
            return;
        }

        isClosed = false;
        videoClock = new SCClock();
        audioClock = new SCClock();
        prepareVideoOk = false;
        prepareAudioOk = false;
        if (sp.videoStream != null)
        {
            if (!CreateCanvas(sp.videoStream))
                return;
            if (!Enum.IsDefined(typeof(OutPixFmt), sp.videoStream.pixelfmt))
                Debug.LogFormat("pixel format:{0}(See FFmpeg AVPixelFormat for more information)", sp.videoStream.pixelfmt);
            else
                Debug.LogFormat("pixel format:{0}", (OutPixFmt)sp.videoStream.pixelfmt);
            prepareVideoOk = true;
            masterClock = videoClock;
        }
        if (sp.audioStream != null)
        {
            srcas = sp.audioStream;
            audioPlay.OpenAsync(srcas, OnOpenAudioCb, AudioCallback);
            masterClock = audioClock;
        }
        else
        {
            PrepareFinish();
        }

        totalMs = capture.GetDuration();

        if (OnOpenResultEvent != null) OnOpenResultEvent(sp);
    }

    private void OnInterrupt(InterruptCode code)
    {
        Close();
        if (OnInterruptEvent != null) OnInterruptEvent(code);
    }
    private void OnOpenAudioCb(AudioPlayParams app)
    {
        if (app == null)
        {
            Debug.LogError("Open AudioPlay failed!");
            Close();
            return;
        }
        bytes_per_sec = app.desas.samplerate * (16 / 8) * app.desas.channels;
        audio_hw_buf_size = app.hwSize;

        resampler.Close();
        if (!resampler.Open(srcas, app.desas))
        {
            Debug.LogError("Open Resampler failed!");
            Close();
            return;
        }

        audioPlay.Start();
        PrepareFinish();
    }

    private void PrepareFinish()
    {
        if (prepareVideoOk) refreshVideo = true;
        if (prepareAudioOk) audioPlay.Start();
    }

    public void Close()
    {
        totalMs = 0;
        for (int i = 0; i < rendererRawimage.Length; i++)
            rendererRawimage[i].material = null;
        refreshVideo = false;
        isClosed = true;
        if (capture != null)
            capture.Close();
        if (audioPlay != null)
            audioPlay.Close();
        if (resampler != null)
            resampler.Close();
        pcm.Clear();
    }

    public void Play()
    {
        isPause = false;
    }
    public void Pause()
    {
        isPause = true;
    }
    private void OnDestroy()
    {
        Close();
        capture.Release();
        if (audioPlay != null)
            audioPlay.Release();
        if (resampler != null)
            resampler.Release();
        capture = null;
        audioPlay = null;
        resampler = null;
    }

    private bool CreateCanvas(VideoStream vs)
    {
        int fmt = outpixfmt == OutPixFmt.PIX_FMT_AUTO ? vs.pixelfmt : (int)outpixfmt;
        canvas = SCCanvas.CreateCanvas(fmt);
        if (canvas == null)
        {
            string msg = "The target pixel format is not supported in the current version\n The target format:" + fmt;
            msg += "\n Please refer to FFmpeg AVPixelFormat for format type\n";
            msg += "You can also set the output pixel format to play in Inspector panel";
            Debug.LogWarning("Create canvas failed!\n" + msg);
            if (OnCreateCanvasFailedEvent != null) OnCreateCanvasFailedEvent(msg);
            OnInterruptEvent = null;
            Close();
            return false;
        }
        canvas.InitCanvas(ref rendererMat, vs);
        return true;
    }

    public void Seek(double per)
    {
        capture.Seek(per);
    }

    double LastDuration(Frame lastFrame, Frame frame)
    {
        double duration = frame.pts - lastFrame.pts;
        if (double.IsNaN(duration) || duration <= 0 || duration > 10000)
            return lastFrame.duration;
        else
            return duration;
    }

    private const int AV_SYNC_THRESHOLD_MIN = 40;
    private const int AV_SYNC_THRESHOLD_MAX = 100;
    private const int AV_SYNC_FRAMEDUP_THRESHOLD = 100;
    private double ComputeTargetDelay(double delay, ref bool ignore)
    {
        if (videoClock == null)
        {
            Debug.LogError("VideoClock is null");
        }
        if (masterClock == null)
        {
            Debug.LogError("MasterClock is null");
        }
        //视频时钟和主时钟的时间差
        double diff = videoClock.GetClock() - masterClock.GetClock();
        double sync_threshold = Mathf.Max(AV_SYNC_THRESHOLD_MIN, Math.Min(AV_SYNC_THRESHOLD_MAX, (float)delay));

        //Debug.Log(diff);
        //当视频比音频慢，并且超过阈值
        if (diff <= -sync_threshold)
        {
            ignore = true;
            delay = Mathf.Max(0, (float)(delay + diff));
            Debug.LogWarningFormat("Video is too slow, more than the threshold:{0}", diff);
        }
        else if (diff >= sync_threshold && delay > AV_SYNC_FRAMEDUP_THRESHOLD)
            delay = delay + diff;
        //当视频比音频快
        else if (diff >= sync_threshold)
            delay = 1.5 * delay;


        return delay;
    }

    private void AudioCallback(IntPtr stream, int len)
    {
        if (SCInstance.isPaused)
        {
            audioPlay.Memset(stream, 0, len);
            return;
        }
        long audio_callback_time = IStreamCapture.GetTimestampUTC();
        int audio_clock = 0;
        while (pcm.Count < len && !isClosed && !isPause)
        {
            Frame frame = capture.TryGrabFrame(FrameType.AUDIO);
            if (frame == null)
            {
                Thread.Sleep(5);
                continue;
            }

            if (frame.flag == FrameFlag.EOF)
            {
                if (isLoop && masterClock == audioClock)
                    Seek(0);
                break;
            }
            ResampleData rd = resampler.Resample(frame);
            byte[] buffer = new byte[rd.len];
            Marshal.Copy(rd.data, buffer, 0, rd.len);
            pcm.AddRange(buffer);
            audio_clock = (int)(frame.pts + (double)frame.nb_samples / frame.sample_rate * 1000);
            capture.RemoveFrame(FrameType.AUDIO);
        }
        if (isPause)
        {
            audioPlay.Memset(stream, 0, len);
            return;
        }
        int minLen = Mathf.Min(len, pcm.Count);

        audioPlay.MixAudioFormat(stream, pcm.ToArray(), minLen, volume);
        pcm.RemoveRange(0, minLen);

        double noplayBuffSize = audio_hw_buf_size + pcm.Count;
        int noplayms = (int)(noplayBuffSize / bytes_per_sec * 1000);
        int crtpts = audio_clock - noplayms;
        audioClock.SetClockAt(crtpts, audio_callback_time / 1000);
    }
}
