use alloc::{string::String, vec};

use hashbrown::HashMap;

pub static ONNX: &'static [u8] = include_bytes!("../../zhulong/example/dead_code_f32.onnx");

pub fn run_zhulong() {
    let model = zhulong::onnx::load(&ONNX).unwrap();
    let opset = zhulong::onnx::opset_ai_onnx(&model).unwrap();
    info!("ONNX opset: {:?}", opset);

    // parse to IR
    let mut graph = zhulong::ir::graph::Graph::from_onnx(&model);
    info!(
        "{}",
        graph.pretty(zhulong::ir::graph::PrettyOpts {
            show_attrs: true,
            show_constants: true,
            const_max_elems: 16,
        })
    );

    // run optimization passes
    let changed = zhulong::pass::pipeline::run_all_passes(&mut graph);
    if changed {
        info!("after optimization passes:");
        info!(
            "{}",
            graph.pretty(zhulong::ir::graph::PrettyOpts {
                show_attrs: true,
                show_constants: true,
                const_max_elems: 16,
            })
        );
    }

    let ops = zhulong::ops::register::register_all_ops();
    info!("ONNX runtime registered ops: {:?}", ops);

    // build input tensors
    let x = zhulong::ir::tensor::Tensor::from_f32(&[2, 2], vec![1., 2., 3., 4.]);
    let y = zhulong::ir::tensor::Tensor::from_f32(&[2, 2], vec![10., 20., 30., 40.]);

    info!("x = {:?}", x.as_slice::<f32>());
    info!("y = {:?}", y.as_slice::<f32>());

    //
    let mut feeds: HashMap<String, zhulong::ir::tensor::Tensor> = HashMap::new();
    feeds.insert("x".into(), x);
    feeds.insert("y".into(), y);

    //
    let outs = zhulong::runtime::executor::run(&graph, feeds);
    let z = outs.get("z").unwrap();

    info!("z = {:?}", z.as_slice::<f32>());
}
