use std::{path::PathBuf, sync::Arc};

use anyhow::Result;
use burn::{
    backend::LibTorch,
    data::{dataloader::DataLoaderBuilder, dataset::vision::ImageDatasetItem},
    prelude::Backend,
};

use crate::data::{
    batcher::{HWDBBatch, HWDBBatcher},
    dataset::HWDBDataset,
    utils::write_txt,
};

pub fn calculate_mean_and_var<B: Backend>(dataset: HWDBDataset) -> (f32, f32, i32, i32) {
    let batcher = HWDBBatcher::new(true, 32, 32, None);
    let statistics = Arc::clone(&batcher.statistics);
    let data_loader = DataLoaderBuilder::<B, ImageDatasetItem, HWDBBatch<B>>::new(batcher)
        .batch_size(1000)
        .num_workers(16)
        .build(dataset.test);

    for (idx, batch) in data_loader.iter().enumerate() {
        println!("idx: {}, shape: {:?}", idx, batch.images.shape());
    }

    let eps = 1e-7f32;
    let (n, sum, sq, mh, mw) = *statistics.lock().unwrap();
    let mean = sum / n;
    let variance = sq / n - mean * mean;
    (mean, (variance + eps).sqrt(), mh, mw)
}

pub fn preprocess_images(
    images_dir: String,
    parts: Vec<String>,
    classes_filename: String,
    label_filename: String,
) -> Result<()> {
    let hwdbdataset = HWDBDataset::new(&vec![], &parts, &classes_filename, &label_filename);
    type Backend = LibTorch;
    if let Ok(hwdbdataset) = hwdbdataset {
        let (mean, std, mh, mw) = calculate_mean_and_var::<Backend>(hwdbdataset);
        println!(
            "mean {}, std: {}, max height: {}, max width: {}",
            mean, std, mh, mw
        );
        let root_dir = PathBuf::from(images_dir);
        assert!(root_dir.exists(), "images dir param is not useful");
        let data = vec![
            mean.to_string(),
            std.to_string(),
            mh.to_string(),
            mw.to_string(),
        ];
        write_txt(root_dir.join("statistics.txt"), &data, " ")?;
    }
    Ok(())
}
