package io.agora.demo.streaming.beauty;

import android.app.Activity;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.View;

import java.util.concurrent.Callable;

import cn.tillusory.sdk.TiSDKManager;
import cn.tillusory.sdk.bean.TiRotation;
import io.agora.base.TextureBufferHelper;
import io.agora.base.VideoFrame;

//todo --- tillusory start ---
public class BeautyVideoFilter extends BaseBeautyVideoFilter {
    private static final String TAG = BeautyVideoFilter.class.getSimpleName();

    public static final boolean enableBeauty = false;

    private final Object mRenderLock = new Object();
    private TextureBufferHelper mTextureBufferHelper;
    private int lastInputTextureId = 0;
    private int skipCount = 0;  // skip frame count
    private boolean isFrontCamera = true;

    public void init(Activity activity) {

    }

    public void deinit() {
        synchronized (mRenderLock) {
            TiSDKManager.getInstance().destroy();
        }
    }

    public View getActionView(){
        return null;
    }

    // SensorEventListener start
    @Override
    public void onSensorChanged(SensorEvent event) {

    }

    @Override
    public void onAccuracyChanged(Sensor sensor, int accuracy) {

    }
    // SensorEventListener  end

    // SurfaceHolder.Callback start
    @Override
    public void surfaceCreated(SurfaceHolder holder) {

    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {

    }
    // SurfaceHolder.Callback end

    // VideoFilter start
    @Override
    public VideoFrame process(VideoFrame videoFrame) {
        if (!(videoFrame.getBuffer() instanceof VideoFrame.TextureBuffer)) {
            Log.e(TAG, "Receives a non-texture buffer, which should not happen!");
            return null;
        }

        final VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) videoFrame.getBuffer();
        Log.d(TAG, "video frame, type = " + texBuffer.getType() + ", id = " + texBuffer.getTextureId() + ", rotation = " + videoFrame.getRotation());

        synchronized (mRenderLock) {
            if (mTextureBufferHelper == null) {
                mTextureBufferHelper = TextureBufferHelper.create("RenderThread",
                        texBuffer.getEglBaseContext());
                if (mTextureBufferHelper == null) {
                    Log.e(TAG, "Failed to create texture buffer helper!");
                    return null;
                }
            }

            return mTextureBufferHelper.invoke(new Callable<VideoFrame>() {
                @Override
                public VideoFrame call() throws Exception {
                    // Drop incoming frame if output texture buffer is still in use.
                    if (mTextureBufferHelper.isTextureInUse()) {
                        return null;
                    }

                    // Process frame
                    int tiTex = TiSDKManager.getInstance()
                        .renderOESTexture(texBuffer.getTextureId(),
                            texBuffer.getWidth(),
                            texBuffer.getHeight(),
                            TiRotation.CLOCKWISE_ROTATION_270, isFrontCamera);

                    // Drop the frame if the incoming texture id changes, which occurs for the
                    // first frame on start or after camera switching.
                    // This avoids rendering a black frame (the first output frame on start)
                    // or a staled frame (the first output frame after camera switching),
                    if (lastInputTextureId != texBuffer.getTextureId()) {
                        lastInputTextureId = texBuffer.getTextureId();
                        Log.i(TAG, "Dropping frame since the source of input is changing");
                        return null;
                    }
                    // skip some frames on switch camera to avoid image stand upside down
                    synchronized (this) {
                        int _skip = skipCount;
                        skipCount--;
                        if (_skip > 0) {
                            return videoFrame;
                        }
                    }

                    // Return processed frame to Agora SDK.
                    VideoFrame.TextureBuffer processedBuffer = mTextureBufferHelper.wrapTextureBuffer(
                            texBuffer.getWidth(), texBuffer.getHeight(), VideoFrame.TextureBuffer.Type.RGB,
                            tiTex, texBuffer.getTransformMatrix());
                    return new VideoFrame(processedBuffer, videoFrame.getRotation(),
                            videoFrame.getTimestampNs());
                }
            });
        }
    }
    // VideoFilter end

    public void onCameraChange(int currentCameraType, int inputImageOrientation) {
        synchronized (this) {
            // skip some frames on switch camera to avoid image stand upside down
            skipCount = 2;
        }
        isFrontCamera = !isFrontCamera;
    }

    public void onActivityResume() {
    }

    public void onActivityPause() {

    }
}
//todo --- tillusory end ---
