package org.webrtc;

import android.content.Context;
import android.graphics.ImageFormat;
import android.os.SystemClock;
import android.util.Log;

import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;

public class MyVideoCapturer implements VideoCapturer {
    private static final String TAG = MyVideoCapturer.class.getSimpleName();

    private CapturerObserver capturerObserver;

    @Override
    public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context context, CapturerObserver capturerObserver) {
        Log.i(TAG, "initialize()");
        this.capturerObserver = capturerObserver;
    }

    @Override
    public void startCapture(int width, int height, int framerate) {
        Log.i(TAG, "startCapture(), width: " + width + ", height: " + height + ", framerate: " + framerate);
    }

    @Override
    public void stopCapture() {
        Log.i(TAG, "stopCapture()");
    }

    @Override
    public void changeCaptureFormat(int width, int height, int framerate) {
        Log.i(TAG, "changeCaptureFormat(), width: " + width + ", height: " + height + ", framerate: " + framerate);
    }

    @Override
    public void dispose() {
        Log.i(TAG, "dispose()");
    }

    @Override
    public boolean isScreencast() {
        Log.i(TAG, "isScreencast()");
        return false;
    }

    /**
     * 提供给外部调用，告诉有新的 yuv 数据可以进行解码发送
     */
    public void onFrame(byte[] data, int width, int height, int format, int orientation, long timeInNs) {
        Log.i(TAG, "onFrame(), " + data);
        if (capturerObserver == null) {
            return;
        }
        if (timeInNs <= 0) {
            timeInNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
        }
        VideoFrame videoFrame = getNextFrame(data, width, height, format, orientation, timeInNs);
        capturerObserver.onFrameCaptured(videoFrame);
        videoFrame.release();
    }

    /**
     * i420 = YV21: YYYYYYYY UU VV<br>
     * NV21: YYYYYYYY VU VU <br>
     */
    private VideoFrame getNextFrame(byte[] data, int frameWidth, int frameHeight, int format, int rotation, long captureTimeNs) {
        JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
        ByteBuffer dataY = buffer.getDataY();
        ByteBuffer dataU = buffer.getDataU();
        ByteBuffer dataV = buffer.getDataV();
        int chromaHeight = (frameHeight + 1) / 2;
        int sizeY = frameHeight * buffer.getStrideY();
        int sizeU = chromaHeight * buffer.getStrideU();
        int sizeV = chromaHeight * buffer.getStrideV();


        if (format == ImageFormat.YUV_420_888) {
            dataY.put(data, 0, sizeY);
            dataY.flip();
            dataU.put(data, sizeY, sizeU);
            dataU.flip();
            dataV.put(data, sizeY + sizeU, sizeV);
            dataV.flip();

        } else if (format == ImageFormat.NV21) {
            //copy y
            dataY.put(data, 0, sizeY);
            dataY.flip();
            for (int i = 0; i < sizeU; i++) {
                dataU.put(data[sizeY + 2 * i + 1]);
                dataV.put(data[sizeY + 2 * i]);
            }
            dataU.flip();
            dataV.flip();

        } else {
            throw new RuntimeException("do not support this format now. format: " + format);
        }

        return new VideoFrame(buffer, rotation, captureTimeNs);
    }
}
