package com.aliyun.frontframework;

import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;

import com.aliyun.frontserver.CoreService;
import com.android.grafika.gles.EglCore;
import com.android.grafika.gles.FullFrameRect;
import com.android.grafika.gles.Texture2dProgram;
import com.android.grafika.gles.WindowSurface;

import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.Message;
import android.provider.MediaStore.Audio.Media;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

@SuppressWarnings("deprecation")
public class NewFrontCamera implements SurfaceTexture.OnFrameAvailableListener {

    private static final String TAG = "NFCM";
    public static final boolean DEBUG_DRAW = false;
    public static final boolean DEBUG_SURFACE = true;
    private static final int DEBUG_SURFACE_TIME = 70;
    private int debug_surface_count = 0;

    // private static final int VIDEO_WIDTH = 1280; // dimensions for 720p video
    // private static final int VIDEO_HEIGHT = 720;
    // private static final int DESIRED_PREVIEW_FPS = 15;

    private EglCore mEglCore;
    // 客户端{
    // private WindowSurface mDisplayWindowSurface;
    // private int mClientHeight, mClientWidth;
    // private Surface mClientSurface;
    private ArrayList<WindowSurfaceBox> mBoxs = new ArrayList<WindowSurfaceBox>();
    // 客户端}
    // encode{
    private boolean isRecording = false;
    private RecordWorker mRecordWorker;
    private int cameraId = 0;
    // encode}
    private SurfaceTexture mCameraTexture;
    private FullFrameRect mFullFrameBlit;
    private final float[] mTmpMatrix = new float[16];
    private int mTextureId;

    private Camera mCamera;
    private int mCameraPreviewThousandFps;

    private MainHandler mHandler;
    private CoreService mService;

    /**
     * Custom message handler for main UI thread.
     * <p>
     * Used to handle camera preview "frame available" notifications, and
     * implement the blinking "recording" text. Receives callback messages from
     * the encoder thread.
     */
    private static class MainHandler extends Handler implements
            CircularEncoder.Callback {
        public static final int MSG_BLINK_TEXT = 0;
        public static final int MSG_FRAME_AVAILABLE = 1;
        public static final int MSG_FILE_SAVE_COMPLETE = 2;
        public static final int MSG_BUFFER_STATUS = 3;

        private WeakReference<CoreService> service;

        public MainHandler(CoreService ref) {
            service = new WeakReference<CoreService>(ref);
        }

        @Override
        public void handleMessage(Message msg) {
            CoreService ref = service.get();
            if (ref == null) {
                Log.d(TAG, "Got message for dead activity");
                return;
            }

            switch (msg.what) {
            case MSG_FRAME_AVAILABLE: {
                ref.drawFrame();
                break;
            }
            default:
                throw new RuntimeException("Unknown message " + msg.what);
            }
        }

        @Override
        public void fileSaveComplete(int status) {
            sendMessage(obtainMessage(MSG_FILE_SAVE_COMPLETE, status, 0, null));
        }

        @Override
        public void bufferStatus(long totalTimeMsec) {
            sendMessage(obtainMessage(MSG_BUFFER_STATUS,
                    (int) (totalTimeMsec >> 32), (int) totalTimeMsec));
        }
    }

    public NewFrontCamera(CoreService service) {
        mService = service;
        mHandler = new MainHandler(service);
        // mOutputFile = new File(Environment.getExternalStorageDirectory(),
        // "video.mp4");
        // mSecondsOfVideo = 0.0f;
    }

    /**
     * Opens a camera, and attempts to establish preview mode at the specified
     * width and height.
     * <p>
     * Sets mCameraPreviewFps to the expected frame rate (which might actually
     * be variable).
     */
    public void openCamera(int desiredWidth, int desiredHeight, int desiredFps) {
        if (mCamera != null) {
            log("camera already initialized");
            return;
        }

        Camera.CameraInfo info = new Camera.CameraInfo();

        // Try to find a front-facing camera (e.g. for videoconferencing).
        int numCameras = Camera.getNumberOfCameras();
        for (int i = 0; i < numCameras; i++) {
            Camera.getCameraInfo(i, info);
            Log.d(TAG, "getCameraInfo i " + i);
            if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                cameraId = i;
                mCamera = Camera.open(i);
                break;
            }
        }
        if (mCamera == null) {
            log("No front-facing camera found; opening default");
            cameraId = 0;
            mCamera = Camera.open(); // opens first back-facing camera
        }
        if (mCamera == null) {
            throw new RuntimeException("Unable to open camera");
        }

        Camera.Parameters parms = mCamera.getParameters();
        CameraUtils.choosePreviewSize(parms, desiredWidth, desiredHeight);
        // Try to set the frame rate to a constant value.
        mCameraPreviewThousandFps = CameraUtils.chooseFixedPreviewFps(parms,
                desiredFps * 1000);

        // Give the camera a hint that we're recording video. This can have a
        // big
        // impact on frame rate.
        parms.setRecordingHint(true);
        mCamera.setParameters(parms);
        mCamera.setDisplayOrientation(90);

        Camera.Size cameraPreviewSize = parms.getPreviewSize();
        String previewFacts = cameraPreviewSize.width + "x"
                + cameraPreviewSize.height + " @"
                + (mCameraPreviewThousandFps / 1000.0f) + "fps";
        log("Camera config: " + previewFacts);
    }

    public void setVideoOutputFile(File newFile) {
        // TODO ....循环
    }

    private void fileSaveComplete() {
        // TODO ....录制停止
    }

    public void releaseCamera() {
        mHandler.removeCallbacksAndMessages(null);
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.release();
            mCamera = null;
            Log.d(TAG, "releaseCamera -- done");
        }
        if (mCameraTexture != null) {
            mCameraTexture.release();
            mCameraTexture = null;
        }
        // if (mDisplayWindowSurface != null) {
        // mDisplayWindowSurface.release();
        // mDisplayWindowSurface = null;
        // }
        if (mBoxs != null) {
            for (int i = 0; i < mBoxs.size(); i++) {
                if (mBoxs.get(i).getDisplayWindowSurface() != null) {
                    Log.d(TAG, "DEADBEAF release window surface");
                    if (DEBUG_SURFACE) {
                        Log.v(TAG, "clientSurface release i " + i + " "
                                + mBoxs.get(i).getCode());
                    }
                    WindowSurface ws = mBoxs.get(i).getDisplayWindowSurface();
                    ws.release();
                    ws = null;
                }

                if (mBoxs.get(i).getDisplayWindowSurface() != null) {
                    Log.d(TAG, "DEADBEAF release window surface");
                    if (DEBUG_SURFACE) {
                        Log.v(TAG, "clientSurface release i " + i + " "
                                + mBoxs.get(i).getCode());
                    }
                    WindowSurface ws = mBoxs.get(i).getDisplayWindowSurface();
                    ws.release();
                    ws = null;
                }
            }
            mBoxs.clear();
        }

        if (mFullFrameBlit != null) {
            mFullFrameBlit.release(false);
            mFullFrameBlit = null;
        }
        if (mEglCore != null) {
            mEglCore.release();
            mEglCore = null;
        }
        Log.d(TAG, "onPause() done");
    }

    @Override
    public void onFrameAvailable(SurfaceTexture st) {
        if (DEBUG_DRAW)
            Log.v(TAG, "onFrameworkAvaible");
        mHandler.sendEmptyMessage(MainHandler.MSG_FRAME_AVAILABLE);
    }

    private static void log(String s) {
        Log.d(TAG, s);
    }

    public void setDisplaySurface(Surface sf, String code) {
        surfaceDestroyed(sf, code);
    }

    public void setDisplaySurface(Surface sf, int width, int height, String code) {
        surfaceCreated(sf, width, height, code);
    }

    private void surfaceChanged(SurfaceHolder holder, int format, int width,
            int height) {
        Log.d(TAG, "surfaceChanged fmt=" + format + " size=" + width + "x"
                + height + " holder=" + holder);
    }

    private void surfaceCreated(Surface sf, int width, int height, String code) {
        // Set up everything that requires an EGL context.
        //
        // We had to wait until we had a surface because you can't make an EGL
        // context current
        // without one, and creating a temporary 1x1 pbuffer is a waste of time.
        //
        // The display surface that we use for the SurfaceView, and the encoder
        // surface we
        // use for video, use the same EGL context.
        // if (sf == mClientSurface) {
        // return;
        // }
        if (sf == null) {
            Log.e(TAG, "why do u seed null surface to surfaceCreated?");
            return;
        }
        // 检索list中是否存在surfaceCreated这个surface，已有就不干活了
        int clientSize = mBoxs.size();
        if (mBoxs != null && clientSize > 0) {
            for (int i = 0; i < clientSize; i++) {
                if (mBoxs.get(i).getCode().equals(code)) {
                    return;
                }
            }
        }
        // mClientSurface = sf;
        // mClientWidth = width;
        // mClientHeight = height;
        Log.d(TAG, "DEADBEAF set surface " + sf);
        // if (mDisplayWindowSurface != null) {
        // mDisplayWindowSurface.release();
        // mDisplayWindowSurface = null;
        // }
        // 1. 创建客户端surface的相关对象
        if (mEglCore == null) {
            mEglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
        }
        WindowSurface mDisplayWindowSurface = new WindowSurface(mEglCore, sf,
                false);
        mDisplayWindowSurface.makeCurrent();

        // 添加到list中
        mBoxs.add(new WindowSurfaceBox(mDisplayWindowSurface, sf, width, height, code));
        // 2. 创建内存SurfaceTexture和开启camera
        createCameraTexture();
    }

    // 2. 创建内存SurfaceTexture和开启camera
    private void createCameraTexture() {
        if (mCamera == null) {
            Log.e("camera", "Camera is null");
            return;
        }
        if (mFullFrameBlit == null) {
            mFullFrameBlit = new FullFrameRect(new Texture2dProgram(
                    Texture2dProgram.ProgramType.TEXTURE_EXT));
            mTextureId = mFullFrameBlit.createTextureObject();
        }

        if (mCameraTexture == null) {
            mCameraTexture = new SurfaceTexture(mTextureId);
            mCameraTexture.setOnFrameAvailableListener(this);
            Log.d(TAG, "starting camera preview");
            try {
                mCamera.setPreviewTexture(mCameraTexture);
            } catch (IOException ioe) {
                throw new RuntimeException(ioe);
            }
            mCamera.startPreview();
        }
    }

    private RecordWorker.Callback mRecordWorkerCallback = new RecordWorker.Callback() {

        @Override
        public void onStatusChanged(int status) {
            Log.d(TAG, "onStatusChanged " + status);
        }

        @Override
        public void onRecordTimeUpdate(long time) {
            Log.d(TAG, "onRecordTimeUpdate " + time);
        }
    };

    public void startRecord() {
        // isRecording = true;
        // createEncoderSurface();

        if (isRecording) {
            Log.d(TAG, "isRecording....");
            return;
        }
        if (mRecordWorker == null)
            mRecordWorker = new RecordWorker(mCamera, mService,
                    mRecordWorkerCallback);
        else
            mRecordWorker.setCamera(mCamera);
        DVRRecordParamModel param = new DVRRecordParamModel();
        int val = 0;
        param._p = (val == 1) ? CamcorderProfile.QUALITY_1080P
                : CamcorderProfile.QUALITY_720P;
        param.width = 1280;
        param.height = 720;
        val = 0;
        int dur = 60000;
        if (val == 0) {
            dur = 300000;
        } else if (val == 1) {
            dur = 180000;
        }
        param.duration = dur;
        val = 1;
        param.isSound = (byte) ((val == 1) ? 1 : 0);
        param.cameraId = 0;
        Log.d(TAG, "startRecord");
        mRecordWorker.startRecord(param);
    }

    public void stopRecord() {
        Log.d(TAG, "stopRecord");
        isRecording = false;
        mRecordWorker.stopRecord();
    }

    // 从continuousCapture中看的，放在第三步，或许可以放在前面
    // private void createEncoderSurface() {
    // // 2. 创建本地surface
    // createCameraTexture();
    // // 3. 创建encode surface
    // if (mEncoderSurface == null) {
    // try {
    // mCircEncoder = new CircularEncoder(1280, 720, 6000000,
    // mCameraPreviewThousandFps / 1000, 7, mHandler);
    // } catch (IOException ioe) {
    // throw new RuntimeException(ioe);
    // }
    // mEncoderSurface = new WindowSurface(mEglCore,
    // mCircEncoder.getInputSurface(), true);
    // }
    // }

    private void surfaceDestroyed(Surface sf, String code) {
        // if (mDisplayWindowSurface != null) {
        // Log.d(TAG, "DEADBEAF release window surface");
        // mDisplayWindowSurface.release();
        // mDisplayWindowSurface = null;
        // }
        if (mBoxs == null) {
            Log.w(TAG, "client surface destroy warnning, no list found!");
            return;
        }
        for (int i = 0; i < mBoxs.size(); i++) {
            if (code.equals(mBoxs.get(i).getCode())) {
                if (mBoxs.get(i).getDisplayWindowSurface() != null) {
                    Log.d(TAG, "DEADBEAF release window surface");
                    WindowSurface ws = mBoxs.get(i).getDisplayWindowSurface();
                    ws.release();
                    ws = null;
                }
                if (mBoxs.get(i).getClientSurface() != null) {
                    Surface s = mBoxs.get(i).getClientSurface();
                    Log.d(TAG, "DEADBEAF release client surface " + s.toString());
                    s.release();
                    s = null;
                }

                mBoxs.remove(i); // 这里break跳出的前提是你不会重复添加同一个surface进入boxs。
                break;
            }
        }
    }

    /**
     * Draws a frame onto the SurfaceView and the encoder surface.
     * <p>
     * This will be called whenever we get a new preview frame from the camera.
     * This runs on the UI thread, which ordinarily isn't a great idea -- you
     * really want heavy work to be on a different thread -- but we're really
     * just throwing a few things at the GPU. The upside is that we don't have
     * to worry about managing state changes between threads.
     * <p>
     * If there was a pending frame available notification when we shut down, we
     * might get here after onPause().
     */
    public void drawFrame() {
        if (DEBUG_DRAW)
            Log.v(TAG, "drawFrame");
        if (mEglCore == null) {
            Log.d(TAG, "Skipping drawFrame after shutdown mEglCore = null");
            return;
        }
        if ((mBoxs == null || mBoxs.size() == 0)) {
            Log.d(TAG,
                    "Skipping drawFrame after shutdown no client surface and no encode surface!");
            return;
        }

        // Latch the next frame from the camera.
        // mDisplayWindowSurface.makeCurrent();
        // 1. 更新本地surfaceTexture数据
        mCameraTexture.updateTexImage();
        mCameraTexture.getTransformMatrix(mTmpMatrix);

        int size = mBoxs.size();
        debug_surface_count++;
        for (int i = 0; i < size; i++) {
            // 2. 一个一个的，客户端surface lock
            mBoxs.get(i).getDisplayWindowSurface().makeCurrent();
            if (DEBUG_SURFACE && debug_surface_count == DEBUG_SURFACE_TIME) {
                Log.v(TAG, "drawFrame surface " + i + " "
                        + mBoxs.get(i).getCode());
            }
            // 3. 使用EGL绘制
            mFullFrameBlit.drawFrame(mTextureId, mTmpMatrix);
            // 4. 填充客户端surface数据
            // int viewWidth = mClientWidth;
            // int viewHeight = mClientHeight;
            // GLES20.glViewport(0, 0, viewWidth, viewHeight);
            // mFullFrameBlit.drawFrame(mTextureId, mTmpMatrix);
            // mDisplayWindowSurface.swapBuffers();
            GLES20.glViewport(0, 0, mBoxs.get(i).getClientWidth(), mBoxs.get(i)
                    .getClientHeight());
            mBoxs.get(i).getDisplayWindowSurface().swapBuffers();
        }
        if (debug_surface_count == DEBUG_SURFACE_TIME) {
            debug_surface_count = 0;
        }

        // TODO 处理video encoder
        // // Send it to the video encoder.
        // if (!mFileSaveInProgress) {
        // mEncoderSurface.makeCurrent();
        // GLES20.glViewport(0, 0, VIDEO_WIDTH, VIDEO_HEIGHT);
        // mFullFrameBlit.drawFrame(mTextureId, mTmpMatrix);
        // mCircEncoder.frameAvailableSoon();
        // mEncoderSurface.setPresentationTime(mCameraTexture.getTimestamp());
        // mEncoderSurface.swapBuffers();
        // }
    }
}
