/*
 * @features: 功能
 * @description: 说明
 * @Date: 2022-08-26 18:39:27
 * @Author: judu233(769471424@qq.com)
 * @LastEditTime: 2022-08-28 19:53:19
 * @LastEditors: judu233
 */
import { ExtendsLoad } from "../CCExtends";

/**扩展原型组件 */
@ExtendsLoad(cc.Camera)
export class CameraBaseExtends {
}
/**扩展组件实例方法 */
@ExtendsLoad(cc.Camera.prototype)
export class CameraExtends {
    /**
     * kawaseBg背景模糊
     * @param offset 模糊程度
     * @param outSprite 要输出截图的精灵组件
     * @param kawaseTarget 模糊目标，为空默认全屏
     * @returns 
     */
    static kawaseBg(offset = 1, outSprite?: cc.Sprite, kawaseTarget?: cc.Node) {
        // 首先获取全屏的截图
        let { node, texture: srcRT } = this.screenShot(kawaseTarget);
        return new Promise<{ dstRT: cc.RenderTexture, sf: cc.SpriteFrame }>(resolve => {
            //实现动态创建effect ,material
            // let effectTest = '{"__type__":"cc.EffectAsset","_name":"eazax-kawase-blur","_objFlags":0,"_native":"","properties":null,"techniques":[{"passes":[{"blendState":{"targets":[{"blend":true}]},"rasterizerState":{"cullMode":0},"properties":{"resolution":{"value":[500,500],"type":14},"offset":{"value":[1],"range":[0,10],"type":13},"texture":{"value":"white","type":29},"alphaThreshold":{"value":[0.5],"type":13}},"program":"eazax-kawase-blur|vs|fs"}]}],"shaders":[{"hash":2278172867,"glsl3":{"vert":"\\nprecision highp float;\\nuniform CCGlobal {\\n  mat4 cc_matView;\\n  mat4 cc_matViewInv;\\n  mat4 cc_matProj;\\n  mat4 cc_matProjInv;\\n  mat4 cc_matViewProj;\\n  mat4 cc_matViewProjInv;\\n  vec4 cc_cameraPos;\\n  vec4 cc_time;\\n  mediump vec4 cc_screenSize;\\n  mediump vec4 cc_screenScale;\\n};\\nuniform CCLocal {\\n  mat4 cc_matWorld;\\n  mat4 cc_matWorldIT;\\n};\\nin vec3 a_position;\\nin vec4 a_color;\\nout vec4 v_color;\\n#if USE_TEXTURE\\nin vec2 a_uv0;\\nout vec2 v_uv0;\\n#endif\\nvoid main () {\\n  vec4 pos = vec4(a_position, 1);\\n  #if CC_USE_MODEL\\n  pos = cc_matViewProj * cc_matWorld * pos;\\n  #else\\n  pos = cc_matViewProj * pos;\\n  #endif\\n  #if USE_TEXTURE\\n  v_uv0 = a_uv0;\\n  #endif\\n  v_color = a_color;\\n  gl_Position = pos;\\n}","frag":"\\nprecision highp float;\\n#if USE_ALPHA_TEST\\n  uniform ALPHA_TEST {\\n    float alphaThreshold;\\n  };\\n#endif\\nvoid ALPHA_TEST (in vec4 color) {\\n  #if USE_ALPHA_TEST\\n      if (color.a < alphaThreshold) discard;\\n  #endif\\n}\\nvoid ALPHA_TEST (in float alpha) {\\n  #if USE_ALPHA_TEST\\n      if (alpha < alphaThreshold) discard;\\n  #endif\\n}\\nin vec4 v_color;\\n#if USE_TEXTURE\\nin vec2 v_uv0;\\nuniform sampler2D texture;\\n#endif\\nuniform Properties {\\n  vec2 resolution;\\n  float offset;\\n};\\n#if USE_TEXTURE\\nvec4 kawaseBlur(sampler2D tex, vec2 uv, vec2 texelSize, float offset) {\\n  vec4 o = vec4(0);\\n  o += texture2D(tex, uv + vec2(offset + 0.5, offset + 0.5) * texelSize);\\n  o += texture2D(tex, uv + vec2(-offset - 0.5, offset + 0.5) * texelSize);\\n  o += texture2D(tex, uv + vec2(-offset - 0.5, -offset - 0.5) * texelSize);\\n  o += texture2D(tex, uv + vec2(offset + 0.5, -offset - 0.5) * texelSize);\\n  return o * 0.25;\\n}\\n#endif\\nvoid main () {\\n  vec4 o = vec4(1, 1, 1, 1);\\n  #if USE_TEXTURE\\n  o *= kawaseBlur(texture, v_uv0, 1.0 / resolution, offset);\\n  #endif\\n  o *= v_color;\\n  ALPHA_TEST(o);\\n  #if USE_BGRA\\n    gl_FragColor = o.bgra;\\n  #else\\n    gl_FragColor = o.rgba;\\n  #endif\\n}"},"glsl1":{"vert":"\\nprecision highp float;\\nuniform mat4 cc_matViewProj;\\nuniform mat4 cc_matWorld;\\nattribute vec3 a_position;\\nattribute vec4 a_color;\\nvarying vec4 v_color;\\n#if USE_TEXTURE\\nattribute vec2 a_uv0;\\nvarying vec2 v_uv0;\\n#endif\\nvoid main () {\\n  vec4 pos = vec4(a_position, 1);\\n  #if CC_USE_MODEL\\n  pos = cc_matViewProj * cc_matWorld * pos;\\n  #else\\n  pos = cc_matViewProj * pos;\\n  #endif\\n  #if USE_TEXTURE\\n  v_uv0 = a_uv0;\\n  #endif\\n  v_color = a_color;\\n  gl_Position = pos;\\n}","frag":"\\nprecision highp float;\\n#if USE_ALPHA_TEST\\n  uniform float alphaThreshold;\\n#endif\\nvoid ALPHA_TEST (in vec4 color) {\\n  #if USE_ALPHA_TEST\\n      if (color.a < alphaThreshold) discard;\\n  #endif\\n}\\nvoid ALPHA_TEST (in float alpha) {\\n  #if USE_ALPHA_TEST\\n      if (alpha < alphaThreshold) discard;\\n  #endif\\n}\\nvarying vec4 v_color;\\n#if USE_TEXTURE\\nvarying vec2 v_uv0;\\nuniform sampler2D texture;\\n#endif\\nuniform vec2 resolution;\\nuniform float offset;\\n#if USE_TEXTURE\\nvec4 kawaseBlur(sampler2D tex, vec2 uv, vec2 texelSize, float offset) {\\n  vec4 o = vec4(0);\\n  o += texture2D(tex, uv + vec2(offset + 0.5, offset + 0.5) * texelSize);\\n  o += texture2D(tex, uv + vec2(-offset - 0.5, offset + 0.5) * texelSize);\\n  o += texture2D(tex, uv + vec2(-offset - 0.5, -offset - 0.5) * texelSize);\\n  o += texture2D(tex, uv + vec2(offset + 0.5, -offset - 0.5) * texelSize);\\n  return o * 0.25;\\n}\\n#endif\\nvoid main () {\\n  vec4 o = vec4(1, 1, 1, 1);\\n  #if USE_TEXTURE\\n  o *= kawaseBlur(texture, v_uv0, 1.0 / resolution, offset);\\n  #endif\\n  o *= v_color;\\n  ALPHA_TEST(o);\\n  #if USE_BGRA\\n    gl_FragColor = o.bgra;\\n  #else\\n    gl_FragColor = o.rgba;\\n  #endif\\n}"},"builtins":{"globals":{"blocks":[{"name":"CCGlobal","defines":[]}],"samplers":[]},"locals":{"blocks":[{"name":"CCLocal","defines":[]}],"samplers":[]}},"defines":[{"name":"USE_TEXTURE","type":"boolean","defines":[]},{"name":"CC_USE_MODEL","type":"boolean","defines":[]},{"name":"USE_ALPHA_TEST","type":"boolean","defines":[]},{"name":"USE_BGRA","type":"boolean","defines":[]}],"blocks":[{"name":"ALPHA_TEST","members":[{"name":"alphaThreshold","type":13,"count":1}],"defines":["USE_ALPHA_TEST"],"binding":0},{"name":"Properties","members":[{"name":"resolution","type":14,"count":1},{"name":"offset","type":13,"count":1}],"defines":[],"binding":1}],"samplers":[{"name":"texture","type":29,"count":1,"defines":["USE_TEXTURE"],"binding":30}],"record":null,"name":"eazax-kawase-blur|vs|fs"}]}';
            // // @ts-expect-error
            // let effect = cc.deserialize(effectTest, { priority: 0, responseType: 'json' })
            // effect?.onLoad?.();
            // effect.__onLoadInvoked__ = true;
            cc.resources.load('Shader/eazax-kawase-blur', cc.EffectAsset, (err, effect) => {
                dealTx(effect);
            });
            let dealTx = (effect) => {
                // 然后根据节点设置材质
                let material = cc.Material.create(effect, 0);
                material.addRef();
                material.define('USE_TEXTURE', true);
                material.setProperty('resolution', cc.v2(node.width, node.height));
                material.setProperty('offset', offset);
                // 创建临时 RenderTexture
                let dstRT = new cc.RenderTexture();
                // 多 Pass 处理，注：由于 OpenGL 中的纹理是倒置的，所以双数 Pass 的出的图像是颠倒的
                this.renderWithMaterial(srcRT, dstRT, material);
                this.renderWithMaterial(dstRT, srcRT, material);
                this.renderWithMaterial(srcRT, dstRT, material);
                this.renderWithMaterial(dstRT, srcRT, material);
                this.renderWithMaterial(srcRT, dstRT, material);
                // 使用经过处理的 RenderTexture
                let sf = new cc.SpriteFrame(dstRT);
                if (outSprite)
                    outSprite.spriteFrame = sf;
                // 销毁不用的临时 RenderTexture
                srcRT.destroy();
                //销毁临时存放截图的节点
                node.destroy();
                resolve({ dstRT, sf });
            }
        });
    }

    /**
     * 获取节点的 RenderTexture
     * @param node 节点
     * @param out 输出
     * @see RenderUtil.ts https://gitee.com/ifaswind/eazax-ccc/blob/master/utils/RenderUtil.ts
     */
    static getRenderTexture(node: cc.Node, out?: cc.RenderTexture) {
        // 检查参数
        if (!cc.isValid(node)) {
            return null;
        }
        if (!out || !(out instanceof cc.RenderTexture)) {
            out = new cc.RenderTexture();
        }
        // 获取宽高
        let width = Math.floor(node.width);
        let height = Math.floor(node.height);
        // 初始化 RenderTexture
        out.initWithSize(width, height);
        // 创建临时摄像机用于渲染目标节点
        const cameraNode = new cc.Node();
        cameraNode.parent = node;
        const camera = cameraNode.addComponent(cc.Camera);
        camera.clearFlags |= cc.Camera.ClearFlags.COLOR;
        camera.backgroundColor = cc.color(0, 0, 0, 0);
        camera.zoomRatio = cc.winSize.height / height;
        // 将节点渲染到 RenderTexture 中
        camera.targetTexture = out;
        camera.render(node);
        // 销毁临时对象
        cameraNode.destroy();
        // 返回 RenderTexture
        return out;
    }

    /**
     * 使用指定材质来将 RenderTexture 渲染到另一个 RenderTexture
     * @param srcRT 来源
     * @param dstRT 目标
     * @param material 材质
     * @see RenderUtil.ts https://gitee.com/ifaswind/eazax-ccc/blob/master/utils/RenderUtil.ts
     */
    static renderWithMaterial(srcRT: cc.RenderTexture, dstRT: cc.RenderTexture | cc.Material, material?: cc.Material) {
        // 检查参数
        if (dstRT instanceof cc.Material) {
            material = dstRT;
            dstRT = new cc.RenderTexture();
        }
        // 创建临时节点（用于渲染 RenderTexture）
        const tempNode = new cc.Node();
        tempNode.setParent(cc.Canvas.instance.node);
        const tempSprite = tempNode.addComponent(cc.Sprite);
        tempSprite.sizeMode = cc.Sprite.SizeMode.RAW;
        tempSprite.trim = false;
        tempSprite.spriteFrame = new cc.SpriteFrame(srcRT);
        // 获取图像宽高
        const width = srcRT.width,
            height = srcRT.height;
        // 初始化 RenderTexture
        dstRT.initWithSize(width, height);
        // 更新材质
        if (material instanceof cc.Material) {
            tempSprite.setMaterial(0, material);
        }
        // 创建临时摄像机（用于渲染临时节点）
        const cameraNode = new cc.Node();
        cameraNode.setParent(tempNode);
        const camera = cameraNode.addComponent(cc.Camera);
        camera.clearFlags |= cc.Camera.ClearFlags.COLOR;
        camera.backgroundColor = cc.color(0, 0, 0, 0);
        camera.zoomRatio = cc.winSize.height / height;
        // 将临时节点渲染到 RenderTexture 中
        camera.targetTexture = dstRT;
        camera.render(tempNode);
        // 销毁临时对象
        cameraNode.destroy();
        tempNode.destroy();
        // 返回 RenderTexture
        return dstRT;
    }


    /**
     * 截图
     * @param targetNode  截图目标节点，如果为null则表示截全屏
     * @returns 返回截屏图片的node
     */
    static screenShot(targetNode: cc.Node = null) {
        //获取需要渲染场景的Camera，此处由于全局只有一个主相机，一个ui相机，所以选中主相机
        let camera = cc.Camera.main;
        //创建新的texture
        let texture = new cc.RenderTexture();
        texture.initWithSize(cc.winSize.width, cc.winSize.height, (cc.game as any)._renderContext.STENCIL_INDEX8);
        //创建新的spriteFrame
        let spriteFrame = new cc.SpriteFrame();
        if (targetNode == null) {
            spriteFrame.setTexture(texture);
        } else {
            let nodeX = cc.winSize.width / 2 + targetNode.x - targetNode.width / 2;
            let nodeY = cc.winSize.height / 2 + targetNode.y - targetNode.height / 2;
            let nodeWidth = targetNode.width;
            let nodeHeight = targetNode.height;
            //只显示node部分的图片
            spriteFrame.setTexture(texture, new cc.Rect(nodeX, nodeY, nodeWidth, nodeHeight));
        }
        //创建新的node
        let node = new cc.Node();
        let sprite = node.addComponent(cc.Sprite);
        node.parent = cc.Canvas.instance.node;//截图节点载使存放在场景根节点
        node.opacity = 0;
        sprite.spriteFrame = spriteFrame;
        //截图是反的，这里将截图scaleY取反，这样就是正的了
        sprite.node.scaleY = - Math.abs(sprite.node.scaleY);
        //手动渲染camera
        camera.cullingMask = 0xffffffff;
        camera.targetTexture = texture;
        camera.render();
        camera.targetTexture = null;
        return { node, texture };
    }
}



