package com.android.av.edit;

import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMetadataRetriever;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.os.Build;
import android.util.Log;
import android.view.Surface;

import com.common.base.log.DLog;
import com.android.util.NativeGifEncoder;
import com.android.gpuimage.GPUImageOESFilter;
import com.android.gpuimage.GPUImageRenderer;
import com.android.gpuimage.IRenderCallback;
import com.android.gpuimage.Rotation;
import com.android.gpuimage.util.LocationUtil;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;

/**
 * 视频转GIF
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class VideoToGifDevice {

    private final static String TAG = VideoToGifDevice.class.getSimpleName();


    private static final boolean VERBOSE = false;           // lots of logging

    private static final int LIMIT = 360;

    private File mSrcFile;

    private File mDstFile;

    private int mWidth;
    private int mHeight;
    private int mBitRate;
    private int mDegrees;
    private long mDuration;
    private String[] mLocation;

    private ResultCallback mResultCallback;

    public VideoToGifDevice(File srcVideoFile, File dstVideoFile, ResultCallback callback) throws IOException {
        MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever();
        retrieverSrc.setDataSource(srcVideoFile.getAbsolutePath());
        String degreesString = retrieverSrc.extractMetadata(
                MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
        int degrees = degreesString == null ? 0 : Integer.valueOf(degreesString);
        String bitrateString = retrieverSrc.extractMetadata(
                MediaMetadataRetriever.METADATA_KEY_BITRATE);
        int bitrate = bitrateString == null ? 0 : Integer.valueOf(bitrateString);
        String widthString = retrieverSrc.extractMetadata(
                MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
        int width = Integer.valueOf(widthString);
        String heightString = retrieverSrc.extractMetadata(
                MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
        int height = Integer.valueOf(heightString);
        String durationString = retrieverSrc.extractMetadata(
                MediaMetadataRetriever.METADATA_KEY_DURATION);
        long duration = Long.valueOf(durationString);
        String location = retrieverSrc.extractMetadata(
                MediaMetadataRetriever.METADATA_KEY_LOCATION);
        init(srcVideoFile, dstVideoFile, width, height, degrees, duration, bitrate,
                LocationUtil.parseLocation(location), callback);
    }

    public VideoToGifDevice(File srcVideoFile, File dstVideoFile,
                            int width, int height, int degrees, int duration, int bitrate,
                            String[] location, ResultCallback callback) throws IOException {
        init(srcVideoFile, dstVideoFile, width, height, degrees, duration, bitrate, location, callback);
    }

    private void init(File srcVideoFile, File dstVideoFile,
                      int width, int height, int degrees, long duration, int bitrate, String[] location,
                      ResultCallback callback) throws IOException {
        mResultCallback = callback;
        mSrcFile = srcVideoFile;
        mDstFile = dstVideoFile;
        mWidth = width;
        mHeight = height;
        mDegrees = degrees;
        mDuration = duration;
        mBitRate = bitrate;
        mLocation = location;

        DLog.d(TAG, "Path:" + srcVideoFile + " Width:" + width + " Height:" + height
                + " Degrees:" + degrees + " mBitRate:" + bitrate + " Location:" + location);
    }

    public int getWidth() {
        return mWidth;
    }

    public int getHeight() {
        return mHeight;
    }

    public int getBitRate() {
        return mBitRate;
    }

    public String[] getLocaction() {
        return mLocation;
    }

    public int getOrintation() {
        return mDegrees;
    }

    public void start() {
        ExtractMpegFramesWrapper.excute(this);
    }

    private void onError() {
        if (mResultCallback != null) {
            mResultCallback.onError(mDstFile);
        }
    }

    private void onProgressUpdate(long currentTimestamp) {
        if (mResultCallback != null) {
            float progress = (float) (currentTimestamp / (double)mDuration);
            if (progress > 1f) {
                progress = 1f;
            } else if (progress < 0f) {
                progress = 0f;
            }
            mResultCallback.onProgressUpdate(progress);
        }
    }

    private void onSuccess() {
        if (mResultCallback != null) {
            mResultCallback.onSuccess(mDstFile);
        }
    }

    public interface ResultCallback {
        void onError(File outputFile);

        void onProgressUpdate(float progress);

        void onSuccess(File outputFile);
    }

    /**
     * Wraps extractMpegFrames().  This is necessary because SurfaceTexture will try to use
     * the looper in the current thread if one exists, and the CTS tests create one on the
     * test thread.
     *
     * The wrapper propagates exceptions thrown by the worker thread back to the caller.
     */
    private static class ExtractMpegFramesWrapper implements Runnable {
        private VideoToGifDevice mDeivce;

        private ExtractMpegFramesWrapper(VideoToGifDevice device) {
            mDeivce = device;
        }

        @Override
        public void run() {
            try {
                mDeivce.extractMpegFrames();
                mDeivce.onSuccess();
            } catch (Throwable th) {
                DLog.e(TAG, "", th);
                mDeivce.onError();
            }
        }

        /** Entry point. */
        public static void excute(VideoToGifDevice device) {
            ExtractMpegFramesWrapper wrapper = new ExtractMpegFramesWrapper(device);
            Thread th = new Thread(wrapper, "gif codec");
            th.start();
        }
    }

    /**
     * Tests extraction from an MP4 to a series of PNG files.
     * <p>
     * We scale the video to 640x480 for the PNG just to demonstrate that we can scale the
     * video with the GPU.  If the input video has a different aspect ratio, we could preserve
     * it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if
     * you're extracting frames you don't want black bars.
     */
    private void extractMpegFrames() throws IOException {
        MediaCodec decoder = null;
        CodecOutputSurface outputSurface = null;
        MediaExtractor extractor = null;
        int saveWidth = mWidth;
        int saveHeight = mHeight;
        if (mDegrees == 90 || mDegrees == 270) {
            saveWidth = mHeight;
            saveHeight = mWidth;
        }

        if (saveWidth <= saveHeight && saveWidth > LIMIT) {
            saveHeight = (int) (saveHeight / (float)saveWidth * LIMIT);
            saveWidth = LIMIT;
        } else if (saveHeight > LIMIT){
            saveWidth = (int) (saveWidth / (float)saveHeight * LIMIT);
            saveHeight = LIMIT;
        }

        try {
            File inputFile = mSrcFile;   // must be an absolute path
            // The MediaExtractor error messages aren't very useful.  Check to see if the input
            // file exists so we can throw a better one if it's not there.
            if (!inputFile.canRead()) {
                throw new FileNotFoundException("Unable to read " + inputFile);
            }

            extractor = new MediaExtractor();
            extractor.setDataSource(inputFile.toString());
            int trackIndex = selectTrack(extractor);
            if (trackIndex < 0) {
                throw new RuntimeException("No video track found in " + inputFile);
            }
            extractor.selectTrack(trackIndex);

            MediaFormat format = extractor.getTrackFormat(trackIndex);
            if (VERBOSE) {
                Log.d(TAG, "Video size is " + format.getInteger(MediaFormat.KEY_WIDTH) + "x" +
                        format.getInteger(MediaFormat.KEY_HEIGHT));
            }

            // Could use width/height from the MediaFormat to get full-size frames.
            outputSurface = new CodecOutputSurface(saveWidth, saveHeight, mDegrees, mDstFile);

            // Create a MediaCodec decoder, and configure it with the MediaFormat from the
            // extractor.  It's very important to use the format from the extractor because
            // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
            String mime = format.getString(MediaFormat.KEY_MIME);
            decoder = MediaCodec.createDecoderByType(mime);
            decoder.configure(format, outputSurface.getSurface(), null, 0);
            decoder.start();

            doExtract(extractor, trackIndex, decoder, outputSurface);
        } finally {
            // release everything we grabbed
            if (outputSurface != null) {
                outputSurface.release();
                outputSurface = null;
            }
            if (decoder != null) {
                decoder.stop();
                decoder.release();
                decoder = null;
            }
            if (extractor != null) {
                extractor.release();
                extractor = null;
            }
        }
    }

    /**
     * Selects the video track, if any.
     *
     * @return the track index, or -1 if no video track is found.
     */
    private int selectTrack(MediaExtractor extractor) {
        // Select the first video track we find, ignore the rest.
        int numTracks = extractor.getTrackCount();
        for (int i = 0; i < numTracks; i++) {
            MediaFormat format = extractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("video/")) {
                if (VERBOSE) {
                    Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
                }
                return i;
            }
        }

        return -1;
    }

    /**
     * Work loop.
     */
    private void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder,
                          CodecOutputSurface outputSurface) throws IOException {
        final int TIMEOUT_USEC = 10000;
        ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        int inputChunk = 0;
        int decodeCount = 0;
        long frameSaveTime = 0;

        boolean outputDone = false;
        boolean inputDone = false;
        while (!outputDone) {
            if (VERBOSE) Log.d(TAG, "loop");

            // Feed more data to the decoder.
            if (!inputDone) {
                int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
                if (inputBufIndex >= 0) {
                    ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                    // Read the sample data into the ByteBuffer.  This neither respects nor
                    // updates inputBuf's position, limit, etc.
                    int chunkSize = extractor.readSampleData(inputBuf, 0);
                    if (chunkSize < 0) {
                        // End of stream -- send empty frame with EOS flag set.
                        decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
                                MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        inputDone = true;
                        if (VERBOSE) Log.d(TAG, "sent input EOS");
                    } else {
                        if (extractor.getSampleTrackIndex() != trackIndex) {
                            Log.w(TAG, "WEIRD: got sample from track " +
                                    extractor.getSampleTrackIndex() + ", expected " + trackIndex);
                        }
                        long presentationTimeUs = extractor.getSampleTime();
                        decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
                                presentationTimeUs, 0 /*flags*/);
                        if (VERBOSE) {
                            Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                                    chunkSize);
                        }
                        inputChunk++;
                        extractor.advance();
                    }
                } else {
                    if (VERBOSE) Log.d(TAG, "input buffer not available");
                }
            }

            if (!outputDone) {
                int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
                if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    // no output available yet
                    if (VERBOSE) Log.d(TAG, "no output from decoder available");
                } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    // not important for us, since we're using Surface
                    if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
                } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = decoder.getOutputFormat();
                    if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
                } else if (decoderStatus < 0) {
                    Log.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
                } else { // decoderStatus >= 0
                    if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
                            " (size=" + info.size + ")");
                    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        if (VERBOSE) Log.d(TAG, "output EOS");
                        outputDone = true;
                    }

                    boolean doRender = (info.size != 0);

                    // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                    // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
                    // that the texture will be available before the call returns, so we
                    // need to wait for the onFrameAvailable callback to fire.
                    decoder.releaseOutputBuffer(decoderStatus, doRender);
                    if (doRender) {
                        if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount);
                        outputSurface.awaitNewImage();
                        outputSurface.drawImage(true);

                        long startWhen = System.nanoTime();
                        if (info.presentationTimeUs > 0) {
                            if (outputSurface.saveFrame(info.presentationTimeUs)) {
                                onProgressUpdate((long) (info.presentationTimeUs / 1000d));
                            }
                        }
                        frameSaveTime += System.nanoTime() - startWhen;
                        decodeCount++;
                    }
                }
            }
        }

        int numSaved = decodeCount;
        Log.d(TAG, "Saving " + numSaved + " frames took " +
                (frameSaveTime / numSaved / 1000) + " us per frame");
    }


    /**
     * Holds state associated with a Surface used for MediaCodec decoder output.
     * <p>
     * The constructor for this class will prepare GL, create a SurfaceTexture,
     * and then create a Surface for that SurfaceTexture.  The Surface can be passed to
     * MediaCodec.configure() to receive decoder output.  When a frame arrives, we latch the
     * texture with updateTexImage(), then render the texture with GL to a pbuffer.
     * <p>
     * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
     * can potentially drop frames.
     */
    private static class CodecOutputSurface
            implements IRenderCallback {
        private GPUImageRenderer mRender;
        private Surface mSurface;

        private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
        private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
        private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
        int mWidth;
        int mHeight;
        int mDegress;

        private Object mFrameSyncObject = new Object();     // guards mFrameAvailable
        private boolean mFrameAvailable;

        private ByteBuffer mPixelBuf;                       // used by saveFrame()
//        private NativeGifEncoder mNativeGifEncoder;
        private long mEncoderPtr;
        private long mPreFrameTimestamp;

        /**
         * Creates a CodecOutputSurface backed by a pbuffer with the specified dimensions.  The
         * new EGL context and surface will be made current.  Creates a Surface that can be passed
         * to MediaCodec.configure().
         */
        public CodecOutputSurface(int width, int height, int degrees, File outputFile) throws FileNotFoundException {
            if (width <= 0 || height <= 0) {
                throw new IllegalArgumentException();
            }
            mWidth = width;
            mHeight = height;
            mDegress = degrees;

//            mNativeGifEncoder = new NativeGifEncoder();
//            mNativeGifEncoder.start(new FileOutputStream(outputFile));
//            mNativeGifEncoder.setRepeat(0);
            mEncoderPtr = NativeGifEncoder.createEncoder(outputFile.getAbsolutePath(), mWidth, mHeight);
            if (mEncoderPtr == 0) {
                throw new RuntimeException("create gif encoder error");
            }

            eglSetup();
            makeCurrent();
            setup();
        }

        /**
         * Creates interconnected instances of TextureRender, SurfaceTexture, and Surface.
         */
        private void setup() {
            mRender = new GPUImageRenderer(new GPUImageOESFilter(), this, true);
            if (mDegress == 90 || mDegress == 270) {
                mRender.setRotation(Rotation.fromInt(mDegress), true, false);
            } else {
                mRender.setRotation(Rotation.fromInt(mDegress), false, true);
            }
            mRender.onSurfaceCreated(null, null);
            mRender.onSurfaceChanged(null, mWidth, mHeight);

            mPixelBuf = ByteBuffer.allocateDirect(mWidth * mHeight * 4);
            mPixelBuf.order(ByteOrder.LITTLE_ENDIAN);
        }

        /**
         * Prepares EGL.  We want a GLES 2.0 context and a surface that supports pbuffer.
         */
        private void eglSetup() {
            mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
            if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
                throw new RuntimeException("unable to get EGL14 display");
            }
            int[] version = new int[2];
            if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
                mEGLDisplay = null;
                throw new RuntimeException("unable to initialize EGL14");
            }

            // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB.
            int[] attribList = {
                    EGL14.EGL_RED_SIZE, 8,
                    EGL14.EGL_GREEN_SIZE, 8,
                    EGL14.EGL_BLUE_SIZE, 8,
                    EGL14.EGL_ALPHA_SIZE, 8,
                    EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                    EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
                    EGL14.EGL_NONE
            };
            EGLConfig[] configs = new EGLConfig[1];
            int[] numConfigs = new int[1];
            if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
                    numConfigs, 0)) {
                throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
            }

            // Configure context for OpenGL ES 2.0.
            int[] attrib_list = {
                    EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
                    EGL14.EGL_NONE
            };
            mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
                    attrib_list, 0);
            checkEglError("eglCreateContext");
            if (mEGLContext == null) {
                throw new RuntimeException("null context");
            }

            // Create a pbuffer surface.
            int[] surfaceAttribs = {
                    EGL14.EGL_WIDTH, mWidth,
                    EGL14.EGL_HEIGHT, mHeight,
                    EGL14.EGL_NONE
            };
            mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0);
            checkEglError("eglCreatePbufferSurface");
            if (mEGLSurface == null) {
                throw new RuntimeException("surface was null");
            }
        }

        /**
         * Discard all resources held by this class, notably the EGL context.
         */
        public void release() {
            mRender.onSurfaceDestroy();
            if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
                EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
                EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
                EGL14.eglReleaseThread();
                EGL14.eglTerminate(mEGLDisplay);
            }
            mEGLDisplay = EGL14.EGL_NO_DISPLAY;
            mEGLContext = EGL14.EGL_NO_CONTEXT;
            mEGLSurface = EGL14.EGL_NO_SURFACE;

            mSurface.release();
//            mNativeGifEncoder.finish();
            NativeGifEncoder.freeEncoder(mEncoderPtr);
            mEncoderPtr = 0;

            // this causes a bunch of warnings that appear harmless but might confuse someone:
            //  W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
            //mSurfaceTexture.release();

//            mTextureRender = null;
            mSurface = null;
//            mSurfaceTexture = null;
//            mNativeGifEncoder = null;
        }

        /**
         * Makes our EGL context and surface current.
         */
        public void makeCurrent() {
            if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
                throw new RuntimeException("eglMakeCurrent failed");
            }
        }

        /**
         * Returns the Surface.
         */
        public Surface getSurface() {
            return mSurface;
        }

        /**
         * Latches the next buffer into the texture.  Must be called from the thread that created
         * the CodecOutputSurface object.  (More specifically, it must be called on the thread
         * with the EGLContext that contains the GL texture object used by SurfaceTexture.)
         */
        public void awaitNewImage() {
            final int TIMEOUT_MS = 2500;

            synchronized (mFrameSyncObject) {
                while (!mFrameAvailable) {
                    try {
                        // Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
                        // stalling the test if it doesn't arrive.
                        mFrameSyncObject.wait(TIMEOUT_MS);
                        if (!mFrameAvailable) {
                            // TODO: if "spurious wakeup", continue while loop
                            throw new RuntimeException("frame wait timed out");
                        }
                    } catch (InterruptedException ie) {
                        // shouldn't happen
                        throw new RuntimeException(ie);
                    }
                }
                mFrameAvailable = false;
            }

            // Latch the data.
//            mTextureRender.checkGlError("before updateTexImage");
//            mSurfaceTexture.updateTexImage();
        }

        /**
         * Draws the data from SurfaceTexture onto the current EGL surface.
         *
         * @param invert if set, render the image with Y inverted (0,0 in top left)
         */
        public void drawImage(boolean invert) {
//            mTextureRender.drawFrame(mSurfaceTexture, invert);
            mRender.onDrawFrame(null);
        }


        /**
         * Saves the current frame to disk as a PNG image.
         */
        public boolean saveFrame(long timestamp) throws IOException {
            // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
            // data (i.e. a byte of red, followed by a byte of green...).  To use the Bitmap
            // constructor that takes an int[] array with pixel data, we need an int[] filled
            // with little-endian ARGB data.
            //
            // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
            // copying data around for a 720p frame.  It's better to do a bulk get() and then
            // rearrange the data in memory.  (For comparison, the PNG compress takes about 500ms
            // for a trivial frame.)
            //
            // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
            // get() into a straight memcpy on most Android devices.  Our ints will hold ABGR data.
            // Swapping B and R gives us ARGB.  We need about 30ms for the bulk get(), and another
            // 270ms for the color swap.
            //
            // We can avoid the costly B/R swap here if we do it in the fragment shader (see
            // http://stackoverflow.com/questions/21634450/ ).
            //
            // Having said all that... it turns out that the Bitmap#copyPixelsFromBuffer()
            // method wants RGBA pixels, not ARGB, so if we create an empty bitmap and then
            // copy pixel data in we can avoid the swap issue entirely, and just copy straight
            // into the Bitmap from the ByteBuffer.
            //
            // Making this even more interesting is the upside-down nature of GL, which means
            // our output will look upside-down relative to what appears on screen if the
            // typical GL conventions are used.  (For ExtractMpegFrameTest, we avoid the issue
            // by inverting the frame when we render it.)
            //
            // Allocating large buffers is expensive, so we really want mPixelBuf to be
            // allocated ahead of time if possible.  We still get some allocations from the
            // Bitmap / PNG creation.

            boolean first = false;
            if (mPreFrameTimestamp == 0) {
                mPreFrameTimestamp = timestamp;
                first = true;
            }

            int delay = (int) ((timestamp - mPreFrameTimestamp) / 10000d + 0.5d);
            if (!first && delay < 10) {
                return false;
            }

            mPixelBuf.rewind();
            GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
                    mPixelBuf);

//            Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
            mPixelBuf.rewind();
//            bmp.copyPixelsFromBuffer(mPixelBuf);
            NativeGifEncoder.addFrame(mEncoderPtr, mWidth, mHeight, delay, mPixelBuf);
//            NativeGifEncoder.addFrameFromBitmap(mEncoderPtr, mWidth, mHeight, delay, bmp);
//            if (delay > 0) {
//                mNativeGifEncoder.setDelay(delay);
//            }
//            mNativeGifEncoder.addFrame(bmp);
//            bmp.recycle();

//            File outputFile = new File("/sdcard/ZCamera",
//                    String.format("frame-%02d.png", (int) (timestamp - mPreFrameTimestamp)));
//            BufferedOutputStream bos = null;
//            try {
//                bos = new BufferedOutputStream(new FileOutputStream(outputFile));
//                Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
//                mPixelBuf.rewind();
//                bmp.copyPixelsFromBuffer(mPixelBuf);
//                bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
//                bmp.recycle();
//            } finally {
//                if (bos != null) bos.close();
//            }
            if (VERBOSE) {
                Log.d(TAG, "Saved " + mWidth + "x" + mHeight + " frame as '" + delay + "'");
            }
            mPreFrameTimestamp = timestamp;
            return true;
        }

        /**
         * Checks for EGL errors.
         */
        private void checkEglError(String msg) {
            int error;
            if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
                throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
            }
        }

        @Override
        public void onSurfaceTextureCreated(SurfaceTexture surfaceTexture) {
            mSurface = new Surface(surfaceTexture);
        }

        @Override
        public void onFrameAvaliable(long frameTimeNanos) {
            if (VERBOSE) Log.d(TAG, "new frame available");
            synchronized (mFrameSyncObject) {
                if (mFrameAvailable) {
                    throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
                }
                mFrameAvailable = true;
                mFrameSyncObject.notifyAll();
            }
        }
    }
}
