/**
 *
 * Created by 公众号：字节流动 on 2021/3/16.
 * https://github.com/githubhaohao/LearnFFmpeg
 * 最新文章首发于公众号：字节流动，有疑问或者技术交流可以添加微信 Byte-Flow ,领取视频教程, 拉你进技术交流群
 *
 * */


#include "NativeRender.h"
#include <android/bitmap.h>

NativeRender::NativeRender(JNIEnv *env, jobject surface) : VideoRender(VIDEO_RENDER_ANWINDOW) {
    //1. 利用 Java 层 SurfaceView 传下来的 Surface 对象，获取 ANativeWindow
    m_NativeWindow = ANativeWindow_fromSurface(env, surface);
}

NativeRender::~NativeRender() {
    //4. 释放 ANativeWindow
    if (m_NativeWindow)
        ANativeWindow_release(m_NativeWindow);

}

void NativeRender::Init(int videoWidth, int videoHeight, int *dstSize) {
    LOGCATE("NativeRender::Init m_NativeWindow=%p, video[w,h]=[%d, %d]", m_NativeWindow, videoWidth,
            videoHeight);
    if (m_NativeWindow == nullptr) return;

    int windowWidth = ANativeWindow_getWidth(m_NativeWindow);
    int windowHeight = ANativeWindow_getHeight(m_NativeWindow);

    if (windowWidth < windowHeight * videoWidth / videoHeight) {
        m_DstWidth = windowWidth;
        m_DstHeight = windowWidth * videoHeight / videoWidth;
    } else {
        m_DstWidth = windowHeight * videoWidth / videoHeight;
        m_DstHeight = windowHeight;
    }
    LOGCATE("NativeRender::Init window[w,h]=[%d, %d],DstSize[w, h]=[%d, %d]", windowWidth,
            windowHeight, m_DstWidth, m_DstHeight);

    //2. 设置渲染区域和输入格式
    ANativeWindow_setBuffersGeometry(m_NativeWindow, m_DstWidth,
                                     m_DstHeight, WINDOW_FORMAT_RGBA_8888);

    dstSize[0] = m_DstWidth;
    dstSize[1] = m_DstHeight;
}

void NativeRender::RenderVideoFrame(NativeImage *pImage) {
    //3. 渲染
    if (m_NativeWindow == nullptr || pImage == nullptr) return;
    //锁定当前 Window ，获取屏幕缓冲区 Buffer 的指针
    ANativeWindow_lock(m_NativeWindow, &m_NativeWindowBuffer, nullptr);
    uint8_t *dstBuffer = static_cast<uint8_t *>(m_NativeWindowBuffer.bits);

    int srcLineSize = pImage->width * 4;//RGBA      //输入图的步长（一行像素有多少字节）
    int dstLineSize = m_NativeWindowBuffer.stride * 4;  //RGBA 缓冲区步长

    for (int i = 0; i < m_DstHeight; ++i) {
        //一行一行地拷贝图像数据
        memcpy(dstBuffer + i * dstLineSize, pImage->ppPlane[0] + i * srcLineSize, srcLineSize);
        if (data != NULL) { // 存储要显示的图片
            if (i < h) {
//                memcpy(dstBuffer + i * dstLineSize, data + i * w * 4, w * 4);
/*                for (int j = 0; j < w * 4; j++) {
                    if (j % 4 == 0 && (data + i * w * 4)[j+4] != 255) {
                        ARGB8888_PixelMixing((dstBuffer + i * dstLineSize)[j],(dstBuffer + i * dstLineSize)[j+1],(dstBuffer + i * dstLineSize)[j+2],
                                             (data + i * w * 4)[j],(data + i * w * 4)[j+1],(data + i * w * 4)[j+2],(data + i * w * 4)[j+3],
                                             (dstBuffer + i * dstLineSize)[j],(dstBuffer + i * dstLineSize)[j+1],(dstBuffer + i * dstLineSize)[j+2],(dstBuffer + i * dstLineSize)[j+3]);
                        j+=4;
                        continue;
                    }
                    (dstBuffer + i * dstLineSize)[j] = (data + i * w * 4)[j];
                }*/



                for (int j = 0; j < w*4 ; j+=4) {
                    if (j % 4 == 0 && (data + i * w * 4)[j+4] != 255) {
                        ARGB8888_PixelMixing((dstBuffer + i * dstLineSize)[j],(dstBuffer + i * dstLineSize)[j+1],(dstBuffer + i * dstLineSize)[j+2],
                                             (data + i * w * 4)[j],(data + i * w * 4)[j+1],(data + i * w * 4)[j+2],(data + i * w * 4)[j+3],
                                             (dstBuffer + i * dstLineSize)[j],(dstBuffer + i * dstLineSize)[j+1],(dstBuffer + i * dstLineSize)[j+2],(dstBuffer + i * dstLineSize)[j+3]);
                    } else{
/*                        (dstBuffer + i * dstLineSize)[j] = (data + i * w * 4)[j];
                        (dstBuffer + i * dstLineSize)[j+1] = (data + i * w * 4)[j+1];
                        (dstBuffer + i * dstLineSize)[j+2] = (data + i * w * 4)[j+2];
                        (dstBuffer + i * dstLineSize)[j+3] = (data + i * w * 4)[j+3];*/
                        memcpy(dstBuffer + i * dstLineSize+j, data + i * w * 4+j,  4);
                    }
                }

/*                for (int j = 0; j < w * 4; j+=4) {
                        ARGB8888_PixelMixing((dstBuffer + i * dstLineSize)[j],(dstBuffer + i * dstLineSize)[j+1],(dstBuffer + i * dstLineSize)[j+2],
                                             (data + i * w * 4)[j],(data + i * w * 4)[j+1],(data + i * w * 4)[j+2],(data + i * w * 4)[j+3],
                                             (dstBuffer + i * dstLineSize)[j],(dstBuffer + i * dstLineSize)[j+1],(dstBuffer + i * dstLineSize)[j+2],(dstBuffer + i * dstLineSize)[j+3]);
                }*/
            }
        }
    }
    //解锁当前 Window ，渲染缓冲区数据
    ANativeWindow_unlockAndPost(m_NativeWindow);

}

void NativeRender::UnInit() {
    if (data != NULL) {
        delete[] data;
        data = NULL;
    }
}

void
NativeRender::ARGB8888_PixelMixing(uint8_t inBackR,uint8_t inBackG,uint8_t inBackB,
                                   uint8_t foreR,uint8_t foreG,uint8_t foreB,uint8_t foreA,
                                   uint8_t &outR,uint8_t &outG,uint8_t &outB,uint8_t &outA) {
    //背景层与前景层混合
    outR = (foreA * foreR + (255 - foreA) * inBackR) / 255;
    outG = (foreA * foreG + (255 - foreA) * inBackG) / 255;
    outB = (foreA * foreB + (255 - foreA) * inBackB) / 255;
    outA = 0xFF;   //混合后的最终图像不需要Alpha了,因为屏幕显示只需要RGB即可
}

