use crate::neural::NeuralNetwork;
use crate::transformer::Transformer;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::Path;

/// 模型持久化工具
pub struct ModelPersistence;

impl ModelPersistence {
    /// 保存神经网络模型到文件
    pub fn save_neural_network<P: AsRef<Path>>(
        network: &NeuralNetwork,
        path: P,
    ) -> io::Result<()> {
        let serialized = serde_json::to_string_pretty(network)
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
        
        let mut file = File::create(path)?;
        file.write_all(serialized.as_bytes())?;
        
        Ok(())
    }

    /// 从文件加载神经网络模型
    pub fn load_neural_network<P: AsRef<Path>>(path: P) -> io::Result<NeuralNetwork> {
        let mut file = File::open(path)?;
        let mut contents = String::new();
        file.read_to_string(&mut contents)?;
        
        let network = serde_json::from_str(&contents)
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
        
        Ok(network)
    }

    /// 保存神经网络模型到二进制文件（更高效）
    pub fn save_neural_network_binary<P: AsRef<Path>>(
        network: &NeuralNetwork,
        path: P,
    ) -> io::Result<()> {
        let serialized = bincode::serialize(network)
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
        
        let mut file = File::create(path)?;
        file.write_all(&serialized)?;
        
        Ok(())
    }

    /// 从二进制文件加载神经网络模型
    pub fn load_neural_network_binary<P: AsRef<Path>>(path: P) -> io::Result<NeuralNetwork> {
        let mut file = File::open(path)?;
        let mut buffer = Vec::new();
        file.read_to_end(&mut buffer)?;
        
        let network = bincode::deserialize(&buffer)
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
        
        Ok(network)
    }

    /// 保存Transformer模型
    pub fn save_transformer<P: AsRef<Path>>(
        transformer: &Transformer,
        path: P,
    ) -> io::Result<()> {
        let serialized = bincode::serialize(transformer)
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
        
        let mut file = File::create(path)?;
        file.write_all(&serialized)?;
        
        Ok(())
    }

    /// 加载Transformer模型
    pub fn load_transformer<P: AsRef<Path>>(path: P) -> io::Result<Transformer> {
        let mut file = File::open(path)?;
        let mut buffer = Vec::new();
        file.read_to_end(&mut buffer)?;
        
        let transformer = bincode::deserialize(&buffer)
            .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
        
        Ok(transformer)
    }

    /// 获取模型文件大小
    pub fn get_model_size<P: AsRef<Path>>(path: P) -> io::Result<u64> {
        let metadata = std::fs::metadata(path)?;
        Ok(metadata.len())
    }

    /// 检查模型文件是否存在
    pub fn model_exists<P: AsRef<Path>>(path: P) -> bool {
        path.as_ref().exists()
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use crate::neural::{Activation, Layer};

    #[test]
    fn test_save_and_load() {
        let mut nn = NeuralNetwork::new();
        nn.add_layer(Layer::new(3, 4, Activation::ReLU));
        nn.add_layer(Layer::new(4, 2, Activation::Sigmoid));

        let path = "test_model.json";
        ModelPersistence::save_neural_network(&nn, path).unwrap();
        let loaded = ModelPersistence::load_neural_network(path).unwrap();
        
        assert_eq!(nn.layer_count(), loaded.layer_count());
        std::fs::remove_file(path).ok();
    }
}

