package cn.livetec.gmlive;

import android.Manifest;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.content.res.AssetFileDescriptor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.GLUtils;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.TextView;

import org.adw.library.widgets.discreteseekbar.DiscreteSeekBar;

import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.util.Arrays;
import java.util.List;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import cn.livetec.gmlive.gles.FullFrameRect;
import cn.livetec.gmlive.gles.GlUtil;
import cn.livetec.gmlive.gles.Texture2dProgram;
import cn.livetec.matting.R;
import cn.livetec.matting.lib.LiveTecBackgroundFrame;
import cn.livetec.matting.lib.LiveTecInputFrame;
import cn.livetec.matting.lib.LiveTecOutputFrame;
import cn.livetec.matting.lib.LiveTecRenderConfig;
import cn.livetec.matting.lib.LivetecWrapper;

public class MainActivity extends Activity implements SurfaceTexture.OnFrameAvailableListener {

    int cameraWidth = 1280;
    int cameraHeight = 720;

    GLSurfaceView glSf;
    GLRenderer glRenderer;

    Camera mCamera;
    int mCurrentCameraType;
    boolean mIsOpenCameraSuccess = false;

    String TAG = "MainActivity";

    Handler mMainHandler;

    TextView staticImgTV1;
    TextView staticImgTV2;
    TextView staticImgTV3;
    TextView dynamicImgTV1;
    TextView dynamicImgTV2;

    MediaPlayer mediaPlayer;

    TextView tvFPS;
    TextView tvCPU;
    TextView tvSetupHint;

    TextView tvGreenmode;
    TextView tvBluemode;

    DiscreteSeekBar transparencySeekBar;

    float transparencyValue = 0.5f;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        setContentView(R.layout.activity_main);

        if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) !=
                PackageManager.PERMISSION_GRANTED ) {
            ActivityCompat.requestPermissions(this,
                    new String[]{Manifest.permission.CAMERA}, 0);
        }

        glRenderer = new GLRenderer();
        glSf = new GLSurfaceView(MainActivity.this);

        mMainHandler = new MainHandler(MainActivity.this);

        AspectFrameLayout aspectFrameLayout = (AspectFrameLayout) findViewById(R.id.afl);
        aspectFrameLayout.setAspectRatio(1.0f * cameraHeight / cameraWidth);
        glSf.setEGLContextClientVersion(2);
        glSf.setRenderer(glRenderer);
        glSf.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
        glSf.onResume();
        aspectFrameLayout.addView(glSf);

        staticImgTV1 = (TextView) findViewById(R.id.staticimg1);
        staticImgTV2 = (TextView) findViewById(R.id.staticimg2);
        staticImgTV3 = (TextView) findViewById(R.id.staticimg3);
        dynamicImgTV1 = (TextView) findViewById(R.id.dynamicimg1);
        dynamicImgTV2 = (TextView) findViewById(R.id.dynamicimg2);

        //mediaPlayer = new MediaPlayer();
        tvFPS = (TextView) findViewById(R.id.tv_fps);
        tvCPU = (TextView) findViewById(R.id.tv_cpu);
        tvSetupHint = (TextView) findViewById(R.id.tv_setup_hint);

        tvGreenmode = (TextView) findViewById(R.id.green_mode);
        tvBluemode = (TextView) findViewById(R.id.blue_mode);

        transparencySeekBar = (DiscreteSeekBar) findViewById(R.id.transparency_level_seekbar);
        transparencySeekBar.setOnProgressChangeListener(new DiscreteSeekBar.OnProgressChangeListener() {
            @Override
            public void onProgressChanged(DiscreteSeekBar seekBar, int value, boolean fromUser) {
                transparencyValue = 1.0f * value / 100;
            }

            @Override
            public void onStartTrackingTouch(DiscreteSeekBar seekBar) {

            }

            @Override
            public void onStopTrackingTouch(DiscreteSeekBar seekBar) {

            }
        });
    }

    @Override
    protected void onResume() {
        super.onResume();

        if (!mIsOpenCameraSuccess) {
            openCamera(Camera.CameraInfo.CAMERA_FACING_BACK,
                    cameraWidth,
                    cameraHeight);
        }

        glSf.onResume();
    }

    @Override
    protected void onPause() {
        super.onPause();

        mIsOpenCameraSuccess = false;

        releaseCamera();

        glSf.onPause();

        LivetecWrapper.liveTecReset();
    }

    private void handleCameraStartPreview(SurfaceTexture surfaceTexture) {
        Log.e(TAG, "handleCameraStartPreview");
        try {
            mCamera.setPreviewTexture(surfaceTexture);
        } catch (IOException e) {
            e.printStackTrace();
        }
        surfaceTexture.setOnFrameAvailableListener(this);
        mCamera.startPreview();
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        glSf.requestRender();
    }

    @SuppressWarnings("deprecation")
    private void openCamera(int cameraType, int desiredWidth, int desiredHeight) {
        Log.d(TAG, "openCamera");
        if (mCamera != null) {
            throw new RuntimeException("camera already initialized");
        }

        Camera.CameraInfo info = new Camera.CameraInfo();
        int cameraId = 0;
        int numCameras = Camera.getNumberOfCameras();
        for (int i = 0; i < numCameras; i++) {
            Camera.getCameraInfo(i, info);
            if (info.facing == cameraType) {
                cameraId = i;
                mCamera = Camera.open(i);
                mCurrentCameraType = cameraType;
                break;
            }
        }
        if (mCamera == null) {
            throw new RuntimeException("unable to open camera");
        }

        CameraUtils.setCameraDisplayOrientation(this, cameraId, mCamera);

        Camera.Parameters parameters = mCamera.getParameters();
        parameters.setRecordingHint(true);

        List<String> focusModes = parameters.getSupportedFocusModes();
        if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
            parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
            Log.e(TAG, "focuse mode continous video");
        }

        List<int[]> fpsRangeList = parameters.getSupportedPreviewFpsRange();
        for (int i = 0; i < fpsRangeList.size(); i++) {
            int[] fpsItem = fpsRangeList.get(i);
            String tmp = "";
            for (int j = 0; j < fpsItem.length; j++) {
                tmp += fpsItem[j] + " ";
            }
            Log.e(TAG, "fps supported " + i + " " + tmp);
        }
        int[] fpsMaxItem = fpsRangeList.get(fpsRangeList.size() - 1);
        if (fpsMaxItem != null) {
            parameters.setPreviewFpsRange(fpsMaxItem[0], fpsMaxItem[1]);
        }

        mCamera.setDisplayOrientation(90);
        CameraUtils.choosePreviewSize(parameters, desiredWidth, desiredHeight);
        mCamera.setParameters(parameters);

        mIsOpenCameraSuccess = true;
    }

    private void releaseCamera() {
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.setPreviewCallback(null);
            try {
                mCamera.setPreviewTexture(null);
            } catch (IOException e) {
                e.printStackTrace();
            }
            mCamera.release();
            mCamera = null;
            Log.e(TAG, "release camera");
        }
    }

    protected void switchCamera() {
        Log.e(TAG, "switchCameraChange");
        LivetecWrapper.liveTecReset();
        releaseCamera();
        if (mCurrentCameraType == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            openCamera(Camera.CameraInfo.CAMERA_FACING_BACK, cameraWidth, cameraHeight);
        } else {
            openCamera(Camera.CameraInfo.CAMERA_FACING_FRONT, cameraWidth, cameraHeight);
        }
        handleCameraStartPreview(glRenderer.mCameraSurfaceTexture);
    }

    float[] flipYMtx = {1.0f,0.0f,0.0f,0.0f, 0.0f,-1.0f,0.0f,0.0f, 0.0f,0.0f,1.0f,0.0f, 0.0f,0.0f,0.0f,1.0f};
    float[] resultMtx = new float[16];

    String[] bgFileNames = new String[3];

    byte[] authData = {66, 86, 48, 116, 108, 55, 106, 55, 76, 99, 104, 98, 84, 104, 105, 103, 100, 101, 82, 73, 103, 113, 48, 103, 111, 106, 85, 68, 78, 84, 71, 50, 75, 76, 115, 82, 90, 110, 118, 80, 112, 69, 90, 115, 120, 122, 97, 118, 56, 107, 43, 48, 117, 89, 47, 69, 56, 52, 89, 52, 100, 76, 88, 78, 106, 112, 97, 118, 106, 55, 73, 67, 119, 80, 88, 72, 55, 72, 74, 103, 116, 114, 111, 119, 56, 86, 110, 105, 48, 71, 120, 100, 56, 86, 102, 81, 116, 55, 68, 110, 83, 102, 71, 98, 105, 75, 104, 86, 66, 52, 104, 53, 114, 49, 77, 81, 74, 49, 54, 77, 114, 77, 117, 53, 101, 112, 121, 102, 81, 107, 122, 119, 71, 71, 71, 86, 110, 70, 102, 114, 73, 86, 74, 54, 49, 103, 74, 88, 99, 103, 61, 61};

    private int bgImgType = 1;
    private int bgDynamicImgPosition;

    private final static String STATIC_IMG_1 = "static1.jpg";
    private final static String STATIC_IMG_2 = "static2.jpg";
    private final static String STATIC_IMG_3 = "static3.jpg";
    private int staticImgTexture1 = -1;
    private int staticImgTexture2 = -1;
    private int staticImgTexture3 = -1;

    int dynamicImg1Length = 20;
    private int[] dynamicImg1Textures;
    int dynamicImg2Length = 20;
    private int[] dynamicImg2Textures;

    long lastOneHundredFrameTimeStamp = 0;
    int frameCnt = 0;

    int greenMode = 1;

    class GLRenderer implements GLSurfaceView.Renderer {

        FullFrameRect mFullScreenFUDisplay;
        FullFrameRect mFullScreenCamera;

        int mCameraTextureId;
        SurfaceTexture mCameraSurfaceTexture;

        Bitmap backBitmap;
        int backOriginTexture;
        int backWidth, backHeight;

        int surfaceWidth;
        int surfaceHeight;

        int currentBgIndex = 0;

        LiveTecInputFrame inputFrame;
        LiveTecBackgroundFrame backgroundFrame;
        LiveTecOutputFrame outputFrame;
        LiveTecRenderConfig renderConfig;

        @Override
        public void onDrawFrame(GL10 gl) {
            if (++frameCnt == 50) {
                final long nowTimestamp = System.currentTimeMillis();
                final float fps = 1000.0f / ((nowTimestamp - lastOneHundredFrameTimeStamp) / 50.0f);
                final int fpsInt = Math.round(fps);
                //Log.e(TAG, "time " + (nowTimestamp - lastOneHundredFrameTimeStamp));
                final long cpuUsed = AndroidUtils.getProcessCpuUsed();
                if (lastOneHundredFrameTimeStamp != 0) {
                    runOnUiThread(new Runnable() {
                        @Override
                        public void run() {
                            tvFPS.setText("FPS:" + fpsInt + "");
                            tvCPU.setText("CPU:" + cpuUsed + "%");
                        }
                    });
                }
                lastOneHundredFrameTimeStamp = System.currentTimeMillis();
                frameCnt = 0;
                Log.e(TAG, "cpu use " + cpuUsed);
            }

            float[] mtx = new float[16];
            mCameraSurfaceTexture.updateTexImage();
            mCameraSurfaceTexture.getTransformMatrix(mtx);

            //mFullScreenCamera.drawFrame(mCameraTextureId, mtx);
            //if (true)
              //  return;

            /**
             * Which bg type, static or dynamic.
             * If static, load the texture in first time.
             * If dynamic, load the textures in the first time.
             */
            switch (bgImgType) {
                case 1:
                    if (staticImgTexture1 == -1) {
                        staticImgTexture1 = getBackOriginTexture(STATIC_IMG_1);
                    }
                    backOriginTexture = staticImgTexture1;
                    break;
                case 2:
                    if (staticImgTexture2 == -1) {
                        staticImgTexture2 = getBackOriginTexture(STATIC_IMG_2);
                    }
                    backOriginTexture = staticImgTexture2;
                    break;
                case 3:
                    if (staticImgTexture3 == -1) {
                        staticImgTexture3 = getBackOriginTexture(STATIC_IMG_3);
                    }
                    backOriginTexture = staticImgTexture3;
                    break;
                case 4:
                    if (dynamicImg1Textures[bgDynamicImgPosition] == -1) {
                        dynamicImg1Textures[bgDynamicImgPosition] = getBackOriginTexture("d" + (bgDynamicImgPosition + 1) + ".jpg");
                    }
                    backOriginTexture = dynamicImg1Textures[bgDynamicImgPosition];
                    bgDynamicImgPosition = (bgDynamicImgPosition + 1) % dynamicImg1Length;
                    break;
                case 5:
                    if (dynamicImg2Textures[bgDynamicImgPosition] == -1) {
                        dynamicImg2Textures[bgDynamicImgPosition] = getBackOriginTexture("dd" + (bgDynamicImgPosition + 1) + ".jpg");
                    }
                    backOriginTexture = dynamicImg2Textures[bgDynamicImgPosition];
                    bgDynamicImgPosition = (bgDynamicImgPosition + 1) % dynamicImg2Length;
                    break;
            }

            inputFrame = new LiveTecInputFrame(mCameraTextureId, cameraWidth, cameraHeight);
            backgroundFrame = new LiveTecBackgroundFrame(backOriginTexture, backWidth, backHeight);
            renderConfig = new LiveTecRenderConfig();
            renderConfig = new LiveTecRenderConfig.Builder()
                            .bgRotation(0)
                            .inputFormat(LivetecWrapper.IMG_FORMAT_TEXTURE_EXT)
                            .isBgFlipX(false)
                            .isBgFlipY(mCurrentCameraType == Camera.CameraInfo.CAMERA_FACING_FRONT)
                            .transparency(0.5f)
                            .mattingType(LivetecWrapper.MATTING_TYPE_GREEN)
                            .build();
            /**
             *
             */
            renderConfig.settingTransparency(transparencyValue);

            outputFrame = LivetecWrapper.liveTecMatting(inputFrame, backgroundFrame, renderConfig);
            mFullScreenFUDisplay.drawFrame(outputFrame.texOut, mtx);
        }

        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            surfaceWidth = width;
            surfaceHeight = height;
            GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
        }

        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            mFullScreenFUDisplay = new FullFrameRect(new Texture2dProgram(
                    Texture2dProgram.ProgramType.TEXTURE_2D));
            mFullScreenCamera = new FullFrameRect(new Texture2dProgram(
                    Texture2dProgram.ProgramType.TEXTURE_EXT));
            mCameraTextureId = mFullScreenCamera.createTextureObject();
            mCameraSurfaceTexture = new SurfaceTexture(mCameraTextureId);

            final int setupRes = LivetecWrapper.liveTecSetup(authData); //authdata need to be delivered
            Log.e(TAG, "setup res " + setupRes);
            runOnUiThread(new Runnable() {
                @Override
                public void run() {
                    switch (setupRes) {
                        case LivetecWrapper.SETUO_RESULT_FAIL_EXPIRE_AUTH:
                            tvSetupHint.setText("状态：授权过期");
                            break;
                        case LivetecWrapper.SETUP_RESULT_FAIL:
                            tvSetupHint.setText("状态：失败");
                            break;
                        case LivetecWrapper.SETUP_RESULT_FAIL_NOT_AUTH:
                            tvSetupHint.setText("状态：未授权");
                            break;
                        case LivetecWrapper.SETUP_RESULT_SUCCESS:
                            tvSetupHint.setText("状态：成功");
                            break;
                    }
                }
            });

            staticImgTexture1 = -1;
            staticImgTexture2 = -1;
            staticImgTexture3 = -1;
            dynamicImg1Textures = new int[dynamicImg1Length];
            Arrays.fill(dynamicImg1Textures, -1);
            dynamicImg2Textures = new int[dynamicImg2Length];
            Arrays.fill(dynamicImg2Textures, -1);

            mMainHandler.sendMessage(mMainHandler.obtainMessage(
                    MainHandler.HANDLE_CAMERA_START_PREVIEW,
                    mCameraSurfaceTexture));
        }

        /**
         * decode from a bitmap, convert it to a texture.
         * @param fileName
         * @return
         */
        int getBackOriginTexture(String fileName) {
            try {
                Log.e(TAG, "getBackOriginTexture file name : " + fileName);
                InputStream inputStream = getAssets().open(fileName);
                backBitmap = BitmapFactory.decodeStream(inputStream);
                if (backBitmap.getConfig() == Bitmap.Config.ARGB_8888) {
                    Log.e(TAG, "ARGB_8888");
                } else {
                    Log.e(TAG, "NOT ARGB_8888");
                }
            } catch (IOException e) {
                e.printStackTrace();
                return -1;
            }

            backWidth = backBitmap.getWidth();
            backHeight = backBitmap.getHeight();
            int tex = GlUtil.createTexture(backBitmap.getWidth(), backBitmap.getHeight(), GLES20.GL_RGBA, false);
            Log.e("lirui", "width " + backBitmap.getWidth() + " height " + backBitmap.getHeight());

            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
            GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, backBitmap, 0);
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);

            return tex;

            //TODO recycle bitmap
        }
    }

    static class MainHandler extends Handler {

        static final int HANDLE_CAMERA_START_PREVIEW = 1;

        private WeakReference<MainActivity> mActivityWeakReference;

        MainHandler(MainActivity activity) {
            mActivityWeakReference = new WeakReference<>(activity);
        }

        @Override
        public void handleMessage(Message msg) {
            super.handleMessage(msg);
            MainActivity activity = mActivityWeakReference.get();
            switch (msg.what) {
                case HANDLE_CAMERA_START_PREVIEW:
                    activity.handleCameraStartPreview((SurfaceTexture) msg.obj);
                    break;
            }
        }
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        if (grantResults[0] == PackageManager.PERMISSION_GRANTED && !mIsOpenCameraSuccess) {
            openCamera(Camera.CameraInfo.CAMERA_FACING_BACK,
                    cameraWidth,
                    cameraHeight);
        }
    }

    public void onClick(View v) {
        switch (v.getId()) {
            case R.id.switch_camera_btn:
                //LivetecWrapper.liveTecReset();
                switchCamera();
                break;
            case R.id.staticimg1:
                restoreAllImgTV();
                staticImgTV1.setBackgroundColor(getResources().getColor(R.color.livetecBlue));
                bgImgType = 1;
                if (mediaPlayer != null && mediaPlayer.isPlaying()) {
                    mediaPlayer.stop();
                    mediaPlayer.release();
                    mediaPlayer = null;
                }
                break;
            case R.id.staticimg2:
                restoreAllImgTV();
                staticImgTV2.setBackgroundColor(getResources().getColor(R.color.livetecBlue));
                bgImgType = 2;
                if (mediaPlayer!= null && mediaPlayer.isPlaying()) {
                    mediaPlayer.stop();
                    mediaPlayer.release();
                    mediaPlayer = null;
                }
                break;
            case R.id.staticimg3:
                restoreAllImgTV();
                staticImgTV3.setBackgroundColor(getResources().getColor(R.color.livetecBlue));
                bgImgType = 3;
                if (mediaPlayer != null && mediaPlayer.isPlaying()) {
                    mediaPlayer.stop();
                    mediaPlayer.release();
                    mediaPlayer = null;
                }
                break;
            case R.id.dynamicimg1:
                restoreAllImgTV();
                dynamicImgTV1.setBackgroundColor(getResources().getColor(R.color.livetecBlue));
                bgImgType = 4;
                bgDynamicImgPosition = 0;
                if (mediaPlayer == null || mediaPlayer.isPlaying()) {
                    playMusic("gif1music.mp3");
                }
                break;
            case R.id.dynamicimg2:
                restoreAllImgTV();
                dynamicImgTV2.setBackgroundColor(getResources().getColor(R.color.livetecBlue));
                bgImgType = 5;
                bgDynamicImgPosition = 0;
                if (mediaPlayer == null || mediaPlayer.isPlaying()) {
                    playMusic("gif2music.mp3");
                }
                break;
            case R.id.green_mode:
                tvBluemode.setBackgroundColor(getResources().getColor(R.color.unselect_gray));
                tvGreenmode.setBackgroundColor(getResources().getColor(R.color.livetecBlue));
                greenMode = 1;
                break;
            case R.id.blue_mode:
                tvBluemode.setBackgroundColor(getResources().getColor(R.color.livetecBlue));
                tvGreenmode.setBackgroundColor(getResources().getColor(R.color.unselect_gray));
                greenMode = 0;
                break;
            //case R.id.no_mode:
              //  greenMode = 1;
                //break;
        }
    }

    void playMusic(String fileName) {
        if (mediaPlayer != null) {
            mediaPlayer.stop();
            mediaPlayer.release();
            mediaPlayer = null;
        }

        mediaPlayer = new MediaPlayer();

        /**
         * mp3
         */
        try {
            AssetFileDescriptor descriptor = getAssets().openFd(fileName);
            mediaPlayer.setDataSource(descriptor.getFileDescriptor(), descriptor.getStartOffset(), descriptor.getLength());
            descriptor.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
        mediaPlayer.prepareAsync();
        mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
            @Override
            public void onPrepared(MediaPlayer mp) {
                // 装载完毕回调
                //mediaPlayer.setVolume(1f, 1f);
                mediaPlayer.setLooping(true);
                mediaPlayer.start();
            }
        });
    }

    void restoreAllImgTV() {
        staticImgTV1.setBackgroundColor(getResources().getColor(R.color.unselect_gray));
        staticImgTV2.setBackgroundColor(getResources().getColor(R.color.unselect_gray));
        staticImgTV3.setBackgroundColor(getResources().getColor(R.color.unselect_gray));
        dynamicImgTV1.setBackgroundColor(getResources().getColor(R.color.unselect_gray));
        dynamicImgTV2.setBackgroundColor(getResources().getColor(R.color.unselect_gray));
    }
}
