package com.jacky.demo.opengl;

import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.view.Surface;

import com.jacky.log.Logger;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;

/**
 * Created by lixinquan on 2019/3/5.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public final class MediaRecorder {

    private static final int MSG_START_RECORDING = 0;
    private static final int MSG_STOP_RECORDING = 1;
    private static final int MSG_FRAME_AVAILABLE = 2;
    private static final int MSG_SET_TEXTURE_ID = 3;
    private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
    private static final int MSG_QUIT = 5;

    private VideoEncoder mVideoEncoder;
    private AudioEncoder mAudioEncoder;
    private MediaMuxerWrapper mMediaMuxerWrapper;

    private int VIDEO_WIDTH = 720;
    private int VIDEO_HEIGHT = 1280;
    private int FRAME_RATE = 20;
    private int BIT_RATE = 1024*1024*2;
    private int AUDIO_SOURCE = android.media.MediaRecorder.AudioSource.MIC;

    public MediaRecorder(String path) {
        mMediaMuxerWrapper = new MediaMuxerWrapper(path);
    }
    public MediaRecorder setPreviewHolder(CameraGLSurfaceView surfaceView) {
        mVideoEncoder = new VideoEncoder(mMediaMuxerWrapper, surfaceView);
        return this;
    }
    public MediaRecorder setVideoSize(int width, int height) {
        VIDEO_WIDTH = width;
        VIDEO_HEIGHT = height;
        return this;
    }
    public MediaRecorder setVideoFrameRate(int rate) {
        FRAME_RATE = rate;
        return this;
    }
    public MediaRecorder setVideoEncodingBitRate(int bitRate) {
        BIT_RATE = bitRate;
        return this;
    }

    /**
     * @param audioEncoder {@link android.media.MediaRecorder.AudioSource#MIC}
     * @return
     */
    public MediaRecorder setAudioEncoder(int audioEncoder) {
        AUDIO_SOURCE = audioEncoder;
        mAudioEncoder = new AudioEncoder(mMediaMuxerWrapper);
        return this;
    }

    public void start() {
        if(mVideoEncoder == null && mAudioEncoder == null) {
            throw new IllegalStateException("U not call setPreviewHolder or setAudioEncoder.");
        }
        if(mVideoEncoder != null) mVideoEncoder.start();
        if(mAudioEncoder != null) mAudioEncoder.start();
    }

    public void stop() {
        if(mVideoEncoder != null) mVideoEncoder.stop();
        if(mAudioEncoder != null) mAudioEncoder.stop();

        while(true) {
            Logger.i("muxwrapper", mMediaMuxerWrapper.isStart);
            if(!mMediaMuxerWrapper.isStart) break;
            try {
                Thread.sleep(100); //this importance
            } catch (InterruptedException e) {
                Logger.e(e);
            }
        }
    }
    public void turnOffSound() {
        if(mAudioEncoder != null) mAudioEncoder.turnOffSound();
    }
    public void turnOnSound() {
        if(mAudioEncoder != null) mAudioEncoder.turnOnSound();
    }

    private class MediaMuxerWrapper {
        private MediaMuxer mMediaMuxer;
        private volatile boolean isStart;
        private int mEncoderCount, mStartCount;

        public MediaMuxerWrapper(String path) {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
                try {
                    mMediaMuxer = new MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
                } catch (IOException e) {
                    Logger.e(e);
                    mMediaMuxer = null;
                }
            }
        }

        private int addTrack(MediaFormat format) {
            if (mMediaMuxer != null) {
                int id = mMediaMuxer.addTrack(format);
                Logger.d("track id:",id, ",MediaFormat:", format);
                start();
                return id;
            }
            return -1;
        }
        private boolean start() {
            if(mMediaMuxer == null) return true;
            mStartCount++;
            if(isStart == false && mStartCount == mEncoderCount)  {
                mMediaMuxer.start();
                isStart = true;
            }
            Logger.e("muxer is start:", isStart);
            return isStart;
        }
        synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
            if(mMediaMuxer != null && isStart)  mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
        }
        private void stop() {
            mStartCount--;
            if(mMediaMuxer != null && mStartCount <= 0 && isStart)  {
                mMediaMuxer.stop();
                mMediaMuxer.release();
                mMediaMuxer = null;
                isStart = false;
            }
        }
    }

    /**
     * 音频文件录制
     */
    class AudioEncoder implements Runnable {
        private final static String MIME_TYPE = "audio/mp4a-latm";
        private static final int SAMPLE_RATE = 44100;    // 44.1[KHz] is only setting guaranteed to be available on all devices.
        private static final int BIT_RATE = 128000;
        private static final int CHANNEL = AudioFormat.CHANNEL_IN_STEREO;
        private static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;

        private MediaMuxerWrapper muxerWrapper;
        private int trackId;
        private MediaCodec mMediaCodec;
        private volatile boolean isCapturing, isSlient;
        private AudioHandler mHandler;
        private final Object mReadyFence = new Object();
        private AudioRecord audioRecord;
        private long startTime;
        private int buffer_size;

        public AudioEncoder(MediaMuxerWrapper wrapper) {
            muxerWrapper = wrapper;
            muxerWrapper.mEncoderCount++;
        }

        private void init() {
            final MediaCodecInfo audioCodecInfo = selectMediaCodec(MIME_TYPE, false);
            if (audioCodecInfo == null) {
                Logger.e("Unable to find an appropriate codec for " + MIME_TYPE);
                return;
            }
            Logger.i("selected codec: " + audioCodecInfo.getName());
            try {
                final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 2);
                audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
                audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
                Logger.i("format: " + audioFormat);
                mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
                mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                mMediaCodec.start();
                isCapturing = true;

                buffer_size = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL, AUDIO_FORMAT);
                audioRecord = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL, AUDIO_FORMAT, buffer_size);
                Logger.v("AudioThread:start audio recording");
                startTime = System.nanoTime();
                audioRecord.startRecording();

                mHandler.sendEmptyMessage(MSG_FRAME_AVAILABLE);
            } catch (Exception e) {
                mMediaCodec = null;
                Logger.e(e);
            }
        }

        public void start() {
            isSlient = false;
            if(isCapturing == false) {
                new Thread(this, "AudioEncoder").start();
            }
        }

        public void stop() {
            isCapturing = false;
        }

        public void turnOffSound() {
            isSlient = true;
        }

        public void turnOnSound() {
            isSlient = false;
        }

        @Override
        public void run() {
            Looper.prepare();
            synchronized (mReadyFence){
                mHandler = new AudioHandler(this);
                mHandler.sendEmptyMessage(MSG_START_RECORDING);
            }
            Looper.loop();
            Logger.w("audio release...");
            synchronized (mReadyFence){
                isCapturing=false;
                mHandler=null;
            }

            if (mMediaCodec != null) {
                mMediaCodec.stop();
                mMediaCodec.release();
                mMediaCodec = null;
            }
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;
            }
            if (muxerWrapper != null) {
                muxerWrapper.stop();
                muxerWrapper = null;
            }
        }

        private void handleAudioStep() {
            if (isCapturing) {
                audioStep();
                mHandler.sendEmptyMessage(MSG_FRAME_AVAILABLE);
            } else {
                drainEncoder();
                mHandler.sendEmptyMessage(MSG_QUIT);
            }
        }
        private void drainEncoder() {
            while (!audioStep());
        }

        private boolean audioStep() {
            int index = mMediaCodec.dequeueInputBuffer(0);
            if (index >= 0) {
                final ByteBuffer buffer = getInputBuffer(mMediaCodec, index);
                buffer.clear();
                int length = audioRecord.read(buffer, buffer_size);//读入数据
                if (length > 0) {
                    long time = (System.nanoTime() - startTime) / 1000;
                    mMediaCodec.queueInputBuffer(index, 0, length, time, isCapturing ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                }
            }
            MediaCodec.BufferInfo mInfo = new MediaCodec.BufferInfo();
            int outIndex;
            do {
                outIndex = mMediaCodec.dequeueOutputBuffer(mInfo, 0);
                if (outIndex >= 0) {
                    if ((mInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        Logger.e("audio end");
                        mMediaCodec.releaseOutputBuffer(outIndex, false);
                        return true;
                    }
                    ByteBuffer buffer = getOutputBuffer(mMediaCodec, outIndex);
                    buffer.position(mInfo.offset);
                    if(isSlient) { //暂停则不录制声音
                        mInfo.size = 1;
                    }
                    muxerWrapper.writeSampleData(trackId, buffer, mInfo);
//                    Logger.d("sent " + mInfo.size + " bytes to muxer, timeus=" + mInfo.presentationTimeUs);
                    mMediaCodec.releaseOutputBuffer(outIndex, false);
                } else if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {

                } else if (outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    synchronized (mReadyFence) {
                        trackId = muxerWrapper.addTrack(mMediaCodec.getOutputFormat());
                    }
                }
            } while (outIndex >= 0);
            return false;
        }

        private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
                return codec.getInputBuffer(index);
            } else {
                return codec.getInputBuffers()[index];
            }
        }
        private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
                return codec.getOutputBuffer(index);
            } else {
                return codec.getOutputBuffers()[index];
            }
        }
    }

    private static class AudioHandler extends Handler {

        private WeakReference<AudioEncoder> encoderWeakReference;

        public AudioHandler(AudioEncoder encoder) {
            encoderWeakReference = new WeakReference<>(encoder);
        }

        @Override
        public void handleMessage(Message msg) {
            AudioEncoder audioEncoder = encoderWeakReference.get();
            if (audioEncoder == null) {
                return;
            }
            int what = msg.what;
            switch (what) {
                case MSG_START_RECORDING :
                    audioEncoder.init();
                    break;
                case MSG_FRAME_AVAILABLE :
                    audioEncoder.handleAudioStep();
                    break;
                case MSG_QUIT:
                    Looper.myLooper().quit();
                    break;
            }
        }
    }

    /**
     * 视频文件录制
     */
    class VideoEncoder implements Runnable {

        private final static String MIME_TYPE = "video/avc"; // H.264 Advanced VideoEncoder

        private MediaMuxerWrapper muxerWrapper;
        private int trackId;
        private MediaCodec mCodec;
        private WindowSurface mInputWindowSurface;
        private EglCore mEglCore;
        private final Object mReadyFence = new Object();
        // ----- accessed by multiple threads -----
        private volatile EncoderHandler mHandler;
        private MediaCodec.BufferInfo mBufferInfo;
        volatile boolean isCapturing;
        GLMedia media;
        int mTextId;
        long startTime;

        public VideoEncoder(MediaMuxerWrapper wrapper, CameraGLSurfaceView surfaceView) {
            muxerWrapper = wrapper;
            muxerWrapper.mEncoderCount++;

            int rate = surfaceView.getFitFrameRate(FRAME_RATE);
            FRAME_RATE = rate;
            surfaceView.setMovieRecorder(this);
        }

        public void start() {
            if(isCapturing == false) {
                new Thread(this, "TextureMovieEncoder").start();
            }
        }
        public void stop() {
            if(isCapturing == false) return;
            mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
            mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
        }

        @Override
        public void run() {
            Looper.prepare();
            synchronized (mReadyFence) {
                mHandler = new EncoderHandler(this);
                mReadyFence.notify();
            }
            Looper.loop();

            Logger.d("Encoder thread exiting");
            synchronized (mReadyFence) {
                isCapturing = false;
                mHandler = null;
            }
        }

        /*package*/ void pause() {}
        /*package*/ void resume() {}
        /*package*/void setTextureId(int id) {
            synchronized (mReadyFence) {
                if (!isCapturing || media == null) {
                    return;
                }
            }
            mTextId = id;
        }
        /*package*/ void frameAvailable(SurfaceTexture st) {
            if (st == null) return;
            synchronized (mReadyFence) {
                if (mHandler == null) return;
                if (!isCapturing) {
                    Logger.d("start recording prepare");
//                Logger.d(mHandler.getLooper().getThread().getName());
                    isCapturing = true;
                    mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, 0, 0, EGL14.eglGetCurrentContext()));
                } else {
                    float[] transform = null;/*new float[16]; st.getTransformMatrix(transform);*/
                    long timestamp = st.getTimestamp();
                    if (timestamp == 0) {
                        Logger.w("HEY: got SurfaceTexture with timestamp of zero");
                        return;
                    }
                    mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, transform));
                }
            }
        }

        /*package*/void updateCurrentContext() {
            if(mHandler == null || isCapturing == false) return;
            mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, 0, 0, EGL14.eglGetCurrentContext()));
        }

        private void handlePrepare(EGLContext context) {
            mBufferInfo = new MediaCodec.BufferInfo();
            final MediaCodecInfo videoCodecInfo = selectMediaCodec(MIME_TYPE, true);
            if (videoCodecInfo == null) {
                Logger.e("Unable to find an appropriate codec for ", MIME_TYPE);
                return;
            }
            try {
                MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
                mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FORMAT);
                mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
                mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
                mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
                //通过多媒体格式名创建一个可用的解码器
                mCodec = MediaCodec.createEncoderByType(MIME_TYPE);
                mCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            } catch (Exception e) {
                mCodec = null;
                Logger.e(e);
                return;
            }
            startTime = System.nanoTime();
            Surface mInputSurface = mCodec.createInputSurface();
            mCodec.start();

            mEglCore = new EglCore(context, EglCore.FLAG_RECORDABLE);
            mInputWindowSurface = new WindowSurface(mEglCore, mInputSurface, true);
            mInputWindowSurface.makeCurrent();
            media = new GLMedia();
        }
        private void handleUpdateContext(EGLContext context) {
            // Release the EGLSurface and EGLContext.
            mInputWindowSurface.releaseEglSurface();
            mEglCore.release();

            // Create a new EGLContext and recreate the window surface.
            mEglCore = new EglCore(context, EglCore.FLAG_RECORDABLE);
            mInputWindowSurface.recreate(mEglCore);
            mInputWindowSurface.makeCurrent();
        }

        private void handleStopRecording() {
            Logger.d("handleStopRecording");
            drainEncoder(true);
            if (mCodec != null) {
                mCodec.stop();
                mCodec.release();
            }
            if(muxerWrapper != null)  muxerWrapper.stop();
            muxerWrapper = null;
            mCodec = null;

            if (mInputWindowSurface != null) {
                mInputWindowSurface.release();
                mInputWindowSurface = null;
            }
            if (mEglCore != null) {
                mEglCore.release();
                mEglCore = null;
            }
            synchronized (mReadyFence) {
                isCapturing = false;
                mHandler = null;
            }
        }
        private void handleFrameAvailable(float[] ta, long timestampNanos) {
            drainEncoder(false);
            drawCamera(ta);

            long time = timestampNanos - startTime;
            mInputWindowSurface.setPresentationTime(time);
            mInputWindowSurface.swapBuffers();
        }

        private void drawCamera(float[] ta) {
            media.setTextureID(mTextId, ta);
            media.draw();
        }

        private void drainEncoder(boolean endOfStream) {
            final int TIMEOUT_USEC = 10000;
//            Logger.d( "drainEncoder(" + endOfStream + ")");

            if (endOfStream) {
                Logger.d( "sending EOS to encoder");
                mCodec.signalEndOfInputStream();
            }

            ByteBuffer[] encoderOutputBuffers = mCodec.getOutputBuffers();
            while (true) {
                int encoderStatus = mCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
                if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    // no output available yet
                    if (!endOfStream) {
                        break;      // out of while
                    } else {
                        Logger.d( "no output available, spinning to await EOS");
                    }
                } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    // not expected for an encoder
                    encoderOutputBuffers = mCodec.getOutputBuffers();
                } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    // should happen before receiving buffers, and should only happen once
                    if (muxerWrapper.isStart) {
                        throw new RuntimeException("format changed twice");
                    }
                    MediaFormat newFormat = mCodec.getOutputFormat();
                    Logger.d("encoder output format changed: " + newFormat);
                    // now that we have the Magic Goodies, start the muxer
                    trackId = muxerWrapper.addTrack(newFormat);
                } else if (encoderStatus < 0) {
                    Logger.w("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    // let's ignore it
                } else {
                    ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                    if (encodedData == null) {
                        throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
                    }

                    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                        // The codec config data was pulled out and fed to the muxer when we got
                        // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                        Logger.d("ignoring BUFFER_FLAG_CODEC_CONFIG");
                        mBufferInfo.size = 0;
                    }

                    if (mBufferInfo.size != 0) {
                        // adjust the ByteBuffer values to match BufferInfo (not needed?)
                        encodedData.position(mBufferInfo.offset);
                        encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                        muxerWrapper.writeSampleData(trackId, encodedData, mBufferInfo);
//                        Logger.d( "sent " + mBufferInfo.size + " bytes to muxer, ts=" + mBufferInfo.presentationTimeUs);
                    }
                    mCodec.releaseOutputBuffer(encoderStatus, false);
                    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        if (!endOfStream) {
                            Logger.w("reached end of stream unexpectedly");
                        } else {
                            Logger.d( "end of stream reached");
                        }
                        break;      // out of while
                    }
                }
            }
        }
    }

    private static class EncoderHandler extends Handler {
        private WeakReference<VideoEncoder> mWeakEncoder;

        public EncoderHandler(VideoEncoder encoder) {
            mWeakEncoder = new WeakReference<>(encoder);
        }

        @Override  // runs on encoder thread
        public void handleMessage(Message msg) {
            int what = msg.what;

            VideoEncoder encoder = mWeakEncoder.get();
            if (encoder == null) {
                Logger.w("EncoderHandler.handleMessage: encoder is null");
                return;
            }
            switch (what) {
                case MSG_START_RECORDING :
                    encoder.handlePrepare((EGLContext) msg.obj);
                    break;
                case MSG_FRAME_AVAILABLE:
                    long timestamp = (((long) msg.arg1) << 32) |
                            (((long) msg.arg2) & 0xffffffffL);
                    encoder.handleFrameAvailable((float[]) msg.obj, timestamp);
                    break;
                case MSG_STOP_RECORDING:
                    encoder.handleStopRecording();
                    break;
                case MSG_UPDATE_SHARED_CONTEXT :
                    encoder.handleUpdateContext((EGLContext) msg.obj);
                case MSG_QUIT:
                    Looper.myLooper().quit();
                    if(encoder.media != null) encoder.media.release();
                    encoder.media = null;
                    break;
                default:
                    throw new RuntimeException("Unhandled msg what=" + what);
            }
        }
    }

    private final static int COLOR_FORMAT = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;

    private static final MediaCodecInfo selectMediaCodec(final String mimeType, boolean checkColorFormat) {
        Logger.v("selectMediaCodec:", mimeType, checkColorFormat);
        // get the list of available codecs
        final int numCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < numCodecs; i++) {
            final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
            if (!codecInfo.isEncoder()) {    // skipp decoder
                continue;
            }
            final String[] types = codecInfo.getSupportedTypes();
            for (int j = 0; j < types.length; j++) {
                Logger.i("supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
                if (types[j].equalsIgnoreCase(mimeType)) {
                    if(checkColorFormat) {
                        final int format = selectColorFormat(codecInfo, mimeType);
                        if (format > 0)  return codecInfo;
                    } else {
                        return codecInfo;
                    }
                }
            }
        }
        return null;
    }
    private static int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
        Logger.i("selectColorFormat: ");
        int result = 0;
        final MediaCodecInfo.CodecCapabilities caps;
        try {
            Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
            caps = codecInfo.getCapabilitiesForType(mimeType);
        } finally {
            Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
        }
        int colorFormat;
        for (int i = 0; i < caps.colorFormats.length; i++) {
            colorFormat = caps.colorFormats[i];
            if (COLOR_FORMAT == colorFormat) {
                if (result == 0) result = colorFormat;
                break;
            }
        }
        if (result == 0)
            Logger.e("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
        else
            Logger.i("find good color format for ", codecInfo.getName(), "/", mimeType);
        return result;
    }
}
