use std::iter;

use alice2d::{
    math::Vector2u,
    render_api::{
        self, CustomRenderPipeline, Order, RenderTarget, Rhi, ScreenDescriptorInfo, TextureView,
    },
    ui::{egui::TextureId, UiOutput}, AliceContext,
};

use super::gui_pass::{GUIPass, ScreenDescriptor};

pub struct EditorRenderPipeline {
    // main_camera_pass: MainCameraPass,
    gui_pass: GUIPass,
    rhi: Rhi,
    pub output_id:TextureId,
}

impl EditorRenderPipeline {


    pub fn draw(&mut self, ui_output: UiOutput, screen_desc: &ScreenDescriptorInfo) {
        let output = self.rhi.borrow().surface.get_current_texture().unwrap();
        let view = output
            .texture
            .create_view(&render_api::TextureViewDescriptor::default());

        {
            let mut encoder = self.rhi.borrow().device.create_command_encoder(
                &render_api::CommandEncoderDescriptor {
                    label: Some("Output Render Encoder"),
                },
            );

            let screen_descriptor = ScreenDescriptor {
                physical_width: screen_desc.view_port_size.x,
                physical_height: screen_desc.view_port_size.y,
                scale_factor: screen_desc.scale_factor,
            };

            {
                self.gui_pass
                    .add_textures(
                        &self.rhi.borrow().device,
                        &self.rhi.borrow().queue,
                        &ui_output.texture_delta,
                    )
                    .expect("add texture ok");
                self.gui_pass.update_buffers(
                    &self.rhi.borrow().device,
                    &self.rhi.borrow().queue,
                    &ui_output.primitive,
                    &screen_descriptor,
                );

                // Record all render passes.

                self.gui_pass
                    .draw(
                        &mut encoder,
                        &view,
                        &ui_output.primitive,
                        &screen_descriptor,
                        Some(render_api::Color::BLACK),
                    )
                    .unwrap();
                self.gui_pass
                    .remove_textures(&ui_output.texture_delta)
                    .expect("remove texture ok");
            }
            self.rhi.borrow().queue.submit(iter::once(encoder.finish()));
        }
        output.present();
    }

    fn get_pipeline_name(&self) -> &'static str {
        "editor_gui_pipeline"
    }

    fn as_any(&self) -> &dyn std::any::Any {
        self as &dyn std::any::Any
    }

    fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
        self as &mut dyn std::any::Any
    }
}

impl EditorRenderPipeline {
    pub fn new(mut ctx: AliceContext) -> Self {
        let size = ctx.config().get_window_size();
       
        let rhi = ctx.rhi();
        let (_, view, group) =
            RenderTarget::create_render_attachment_texture(rhi.clone(), size);

        ctx.set_render_pipeline_output(view);
        let mut gui_pass = GUIPass::new(rhi.clone(), 1);
        let output_id =  gui_pass
        .egui_texture_from_wgpu_group(&rhi.borrow().device, group);
        Self {
            gui_pass,
            rhi,
            output_id,
        }
    }

    pub fn resize(&mut self, mut ctx:AliceContext, size:Vector2u) {
    
       
        let (_, view, group) =
            RenderTarget::create_render_attachment_texture(self.rhi.clone(), size);

        ctx.set_render_pipeline_output(view);
  
        self.gui_pass.update_egui_texture_from_wgpu_group(&self.rhi.borrow().device, group,self.output_id);

    }

    // pub fn draw(&mut self, view: &wgpu::TextureView, output: Option<&[wgpu::BindGroup]>) {

    //     if let Some(groups) = output {
    //         let mut encoder =
    //             self.rhi
    //                 .borrow()
    //                 .device
    //                 .create_command_encoder(&wgpu::CommandEncoderDescriptor {
    //                     label: Some("Output Render Encoder"),
    //                 });

    //         self.output_pass.draw(&mut encoder, view, groups);
    //         self.rhi.borrow().queue.submit(iter::once(encoder.finish()));
    //     }

    //     // self.main_camera_pass.draw(&mut encoder, view,self.rhi.clone());

    //     // {
    //     //     let screen_descriptor = ScreenDescriptor {
    //     //         physical_width: 1280,
    //     //         physical_height: 760,
    //     //         scale_factor: 1.0,
    //     //     };

    //     //     self.ui_pass
    //     //         .add_textures(&self.rhi.borrow().device, &self.rhi.borrow().queue, &self.ui_frame.0)
    //     //         .expect("add texture ok");
    //     //     self.ui_pass.update_buffers(&self.rhi.borrow().device, &self.rhi.borrow().queue, &self.ui_frame.1, &screen_descriptor);

    //     // // Record all render passes.

    //     //     self.ui_pass.draw(&mut encoder,&view,&self.ui_frame.1,&screen_descriptor,   Some(wgpu::Color::BLACK)).unwrap();
    //     //     self.ui_pass
    //     //     .remove_textures(&self.ui_frame.0)
    //     //     .expect("remove texture ok");
    //     // }
    //     // invoke custom pass draw
    // }
    // // pub fn draw(&self , render_scene:&RenderScene)
}
