/*
# Copyright (c) 2024 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/

#include <cstdint>
#include "ohos_yuv_converter.h"
#include "ohos_egl_context_manager.h"
#include "rtc_base/logging.h"

namespace webrtc {

std::string YuvConverter::fragmentShader_ = R"delimiter(
#extension GL_OES_EGL_image_external : require
precision highp float;
varying vec2 vTexCoord;
uniform samplerExternalOES texture;
uniform vec2 xUnit;
uniform vec4 coeffs;
void main() {
    gl_FragColor.r = coeffs.a + dot(coeffs.rgb, texture2D(texture, vTexCoord - 1.5 * xUnit).rgb);
    gl_FragColor.g = coeffs.a + dot(coeffs.rgb, texture2D(texture, vTexCoord - 0.5 * xUnit).rgb);
    gl_FragColor.b = coeffs.a + dot(coeffs.rgb, texture2D(texture, vTexCoord + 0.5 * xUnit).rgb);
    gl_FragColor.a = coeffs.a + dot(coeffs.rgb, texture2D(texture, vTexCoord + 1.5 * xUnit).rgb);
})delimiter";

YuvConverter::YuvConverter()
{

}

YuvConverter::~YuvConverter()
{

}

void YuvConverter::Init()
{
    converterShaderCallbacks_ = std::make_shared<ShaderCallback>();
    drawer_ = std::make_shared<OhosGLDrawer>(fragmentShader_, converterShaderCallbacks_);
    drawer_->CreateGLResources();
}

void YuvConverter::Clear() {
    drawer_->CleanGLResources();
}

void YuvConverter::SetConvertThread(rtc::Thread* thread) {
    thread_ = thread;
}

rtc::scoped_refptr<webrtc::I420Buffer> YuvConverter::Convert(OhosVideoBuffer *videoBuffer)
{
    if (!thread_) {
        return nullptr;
    }

    return thread_->BlockingCall([this, videoBuffer]()->rtc::scoped_refptr<webrtc::I420Buffer> {
        int frameWidth = videoBuffer->width();
        int frameHeight = videoBuffer->height();
        int stride = ((frameWidth + 7) / 8) * 8;
        int uvHeight = (frameHeight + 1) / 2;
        int totalHeight = frameHeight + uvHeight;
        int viewportWidth = stride / 4;

        if (drawer_ == nullptr) {
            RTC_LOG_T(LS_ERROR)<<"YuvConverter::Convert, drawer is nullptr!";
            return nullptr;
        }

        if (!glTextureFrameBuffer_.SetSize(viewportWidth, totalHeight)) {
            RTC_LOG_T(LS_ERROR)<<"YuvConverter::Convert, SetSize failed!";
            return nullptr;
        }

        glBindFramebuffer(GL_FRAMEBUFFER, glTextureFrameBuffer_.GetFrameBufferID());

        {
            std::lock_guard<std::mutex> lock(*(OhosEGLContextManager::GetInstance().GetTextureMutex(videoBuffer->GetVideoFrameBuffer().textureID)));
            converterShaderCallbacks_->setPlanY();
            drawer_->DrawFrame(videoBuffer->GetVideoFrameBuffer(), matrix_,
                frameWidth, frameHeight, 0, 0, viewportWidth, frameHeight);

            converterShaderCallbacks_->setPlanU();
            drawer_->DrawFrame(videoBuffer->GetVideoFrameBuffer(), matrix_,
                frameWidth, frameHeight, 0, frameHeight, viewportWidth / 2 , uvHeight);

            converterShaderCallbacks_->setPlanV();
            drawer_->DrawFrame(videoBuffer->GetVideoFrameBuffer(), matrix_,
                frameWidth, frameHeight, viewportWidth / 2, frameHeight, viewportWidth / 2, uvHeight);
        }

        uint8_t *yuvP = (uint8_t*)malloc(viewportWidth * totalHeight * 4  * sizeof(uint8_t));
        if (yuvP == nullptr) {
            RTC_LOG_T(LS_ERROR)<<"YuvConverter::Convert, glReadPixels malloc failed!";
            return nullptr;
        }
        glReadPixels(0, 0, viewportWidth, totalHeight, GL_RGBA, GL_UNSIGNED_BYTE, yuvP);
        glBindFramebuffer(GL_FRAMEBUFFER, 0);

        uint8_t *yData = yuvP;
        int32_t ySize = stride * frameHeight;
        uint8_t *uData = yData + ySize;
        uint8_t *vData = uData + stride / 2;
        int32_t uvLineSize = stride / 2;
        int32_t uvStride = stride;
        rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = ::webrtc::I420Buffer::Copy(stride, frameHeight, yData, stride,
                                                            uData, uvStride, vData, uvStride);
        free(yuvP);
        return i420Buffer;
    });
}

void YuvConverter::ShaderCallback::setPlanY()
{
    coeffs = yCoeffs;
    stepSize = 1.0f;
}

void YuvConverter::ShaderCallback::setPlanU()
{
    coeffs = uCoeffs;
    stepSize = 2.0f;
}

void YuvConverter::ShaderCallback::setPlanV()
{
    coeffs = vCoeffs;
    stepSize = 2.0f;
}

void YuvConverter::ShaderCallback::OnNewShader(std::shared_ptr<ShaderProgram> shader)
{

}

void YuvConverter::ShaderCallback::OnPrepareShader(std::shared_ptr<ShaderProgram> shader, float *matrix,
            int frameWidth, int frameHeight, int viewportWidth, int viewportHeight)
{
    shader->SetFloat4v("coeffs", coeffs, 4);
    shader->SetFloat2v("xUnit", stepSize * 1 / frameWidth, 0);
}

} // namespace webrtc
