use std::collections::HashMap;
use serde::{Deserialize, Serialize};

/// 简单的中文分词器（基于词典的最大匹配）
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tokenizer {
    vocab: HashMap<String, usize>,
    id_to_word: Vec<String>,
    max_word_len: usize,
}

impl Tokenizer {
    /// 创建新的分词器
    pub fn new() -> Self {
        let mut tokenizer = Tokenizer {
            vocab: HashMap::new(),
            id_to_word: Vec::new(),
            max_word_len: 0,
        };

        // 添加特殊标记
        tokenizer.add_word("[PAD]");  // 填充
        tokenizer.add_word("[UNK]");  // 未知词
        tokenizer.add_word("[CLS]");  // 句子开始
        tokenizer.add_word("[SEP]");  // 句子分隔

        tokenizer
    }

    /// 添加词到词表
    pub fn add_word(&mut self, word: &str) -> usize {
        if let Some(&id) = self.vocab.get(word) {
            return id;
        }

        let id = self.id_to_word.len();
        self.vocab.insert(word.to_string(), id);
        self.id_to_word.push(word.to_string());
        self.max_word_len = self.max_word_len.max(word.chars().count());
        id
    }

    /// 批量添加词
    pub fn add_words(&mut self, words: &[&str]) {
        for word in words {
            self.add_word(word);
        }
    }

    /// 前向最大匹配分词
    pub fn tokenize(&self, text: &str) -> Vec<String> {
        let chars: Vec<char> = text.chars().collect();
        let mut tokens = Vec::new();
        let mut i = 0;

        while i < chars.len() {
            let mut matched = false;
            
            // 从最长词开始匹配
            for len in (1..=self.max_word_len.min(chars.len() - i)).rev() {
                let word: String = chars[i..i + len].iter().collect();
                
                if self.vocab.contains_key(&word) {
                    tokens.push(word);
                    i += len;
                    matched = true;
                    break;
                }
            }

            // 如果没有匹配，按单字符处理
            if !matched {
                tokens.push(chars[i].to_string());
                i += 1;
            }
        }

        tokens
    }

    /// 将词转换为ID
    pub fn encode(&self, tokens: &[String]) -> Vec<usize> {
        tokens
            .iter()
            .map(|token| {
                *self.vocab.get(token).unwrap_or(&1) // 1 = [UNK]
            })
            .collect()
    }

    /// 将ID转换为词
    pub fn decode(&self, ids: &[usize]) -> Vec<String> {
        ids.iter()
            .map(|&id| {
                self.id_to_word
                    .get(id)
                    .unwrap_or(&self.id_to_word[1])
                    .clone()
            })
            .collect()
    }

    /// 端到端：文本 -> tokens -> IDs
    pub fn encode_text(&self, text: &str) -> Vec<usize> {
        let tokens = self.tokenize(text);
        self.encode(&tokens)
    }

    /// 获取词表大小
    pub fn vocab_size(&self) -> usize {
        self.vocab.len()
    }

    /// 获取词ID
    pub fn word_to_id(&self, word: &str) -> Option<usize> {
        self.vocab.get(word).copied()
    }
}

impl Default for Tokenizer {
    fn default() -> Self {
        Self::new()
    }
}

/// 词嵌入层
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Embedding {
    embeddings: Vec<Vec<f64>>,
    embedding_dim: usize,
}

impl Embedding {
    /// 创建随机初始化的词嵌入
    pub fn new(vocab_size: usize, embedding_dim: usize) -> Self {
        use rand::Rng;
        let mut rng = rand::thread_rng();
        let scale = (1.0 / embedding_dim as f64).sqrt();

        let embeddings = (0..vocab_size)
            .map(|_| {
                (0..embedding_dim)
                    .map(|_| rng.gen_range(-scale..scale))
                    .collect()
            })
            .collect();

        Embedding {
            embeddings,
            embedding_dim,
        }
    }

    /// 获取词嵌入
    pub fn get(&self, id: usize) -> Option<&Vec<f64>> {
        self.embeddings.get(id)
    }

    /// 批量获取嵌入
    pub fn get_batch(&self, ids: &[usize]) -> Vec<Vec<f64>> {
        ids.iter()
            .map(|&id| {
                self.embeddings
                    .get(id)
                    .unwrap_or(&self.embeddings[1])
                    .clone()
            })
            .collect()
    }

    /// 获取嵌入维度
    pub fn dim(&self) -> usize {
        self.embedding_dim
    }

    /// 设置特定词的嵌入
    pub fn set(&mut self, id: usize, embedding: Vec<f64>) {
        if id < self.embeddings.len() && embedding.len() == self.embedding_dim {
            self.embeddings[id] = embedding;
        }
    }
}

/// 意图类型
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Intent {
    Greeting,           // 问候
    Question,           // 提问
    Capability,         // 询问能力
    Farewell,          // 告别
    Affirmative,       // 肯定
    Negative,          // 否定
    Help,              // 求助
    Unknown,           // 未知
}

impl std::fmt::Display for Intent {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match self {
            Intent::Greeting => write!(f, "问候"),
            Intent::Question => write!(f, "提问"),
            Intent::Capability => write!(f, "询问能力"),
            Intent::Farewell => write!(f, "告别"),
            Intent::Affirmative => write!(f, "肯定"),
            Intent::Negative => write!(f, "否定"),
            Intent::Help => write!(f, "求助"),
            Intent::Unknown => write!(f, "未知"),
        }
    }
}

/// 意图识别器
#[derive(Debug, Clone)]
pub struct IntentClassifier {
    patterns: HashMap<Intent, Vec<Vec<String>>>,
}

impl IntentClassifier {
    /// 创建意图识别器
    pub fn new() -> Self {
        let mut patterns = HashMap::new();

        // 问候意图
        patterns.insert(
            Intent::Greeting,
            vec![
                vec!["你好".to_string()],
                vec!["您好".to_string()],
                vec!["嗨".to_string()],
                vec!["hi".to_string()],
                vec!["hello".to_string()],
                vec!["早上好".to_string()],
                vec!["晚上好".to_string()],
            ],
        );

        // 询问能力意图
        patterns.insert(
            Intent::Capability,
            vec![
                vec!["你".to_string(), "能".to_string(), "做".to_string(), "什么".to_string()],
                vec!["你".to_string(), "会".to_string(), "什么".to_string()],
                vec!["你".to_string(), "可以".to_string(), "做".to_string(), "什么".to_string()],
                vec!["你".to_string(), "有".to_string(), "什么".to_string(), "功能".to_string()],
                vec!["介绍".to_string(), "一下".to_string(), "自己".to_string()],
                vec!["你".to_string(), "是".to_string(), "谁".to_string()],
                vec!["你".to_string(), "是".to_string(), "什么".to_string()],
            ],
        );

        // 提问意图
        patterns.insert(
            Intent::Question,
            vec![
                vec!["什么".to_string(), "是".to_string()],
                vec!["如何".to_string()],
                vec!["怎么".to_string()],
                vec!["为什么".to_string()],
                vec!["能".to_string(), "告诉".to_string(), "我".to_string()],
                vec!["请问".to_string()],
            ],
        );

        // 告别意图
        patterns.insert(
            Intent::Farewell,
            vec![
                vec!["再见".to_string()],
                vec!["拜拜".to_string()],
                vec!["goodbye".to_string()],
                vec!["bye".to_string()],
            ],
        );

        // 求助意图
        patterns.insert(
            Intent::Help,
            vec![
                vec!["帮助".to_string()],
                vec!["help".to_string()],
                vec!["怎么".to_string(), "用".to_string()],
                vec!["使用".to_string(), "方法".to_string()],
            ],
        );

        IntentClassifier { patterns }
    }

    /// 识别意图
    pub fn classify(&self, tokens: &[String]) -> Intent {
        let text = tokens.join("");
        
        // 精确匹配优先
        for (intent, patterns) in &self.patterns {
            for pattern in patterns {
                // 如果模式只有一个词，精确匹配
                if pattern.len() == 1 {
                    let pattern_text = &pattern[0];
                    if text == *pattern_text || tokens.contains(pattern_text) {
                        return intent.clone();
                    }
                }
            }
        }

        // 模式匹配
        let mut best_intent = Intent::Unknown;
        let mut best_score = 0.0;

        for (intent, patterns) in &self.patterns {
            for pattern in patterns {
                let score = self.match_score(tokens, pattern);
                if score > best_score {
                    best_score = score;
                    best_intent = intent.clone();
                }
            }
        }

        // 关键词匹配作为后备
        if best_intent == Intent::Unknown || best_score < 0.5 {
            if text.contains("什么是") || text.contains("什么") && text.len() > 2 {
                return Intent::Question;
            }
            if text.contains("如何") || text.contains("怎么") {
                return Intent::Question;
            }
        }

        best_intent
    }

    /// 计算匹配分数（返回0-1之间的归一化分数）
    fn match_score(&self, tokens: &[String], pattern: &[String]) -> f64 {
        if pattern.is_empty() {
            return 0.0;
        }

        let mut matched = 0;
        let text = tokens.join("");

        for p in pattern {
            // 完全匹配得分更高
            if tokens.contains(p) {
                matched += 2;
            } else if text.contains(p) {
                matched += 1;
            }
        }

        // 归一化：实际匹配 / 最大可能匹配
        matched as f64 / (pattern.len() * 2) as f64
    }
}

impl Default for IntentClassifier {
    fn default() -> Self {
        Self::new()
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_tokenizer() {
        let mut tokenizer = Tokenizer::new();
        tokenizer.add_words(&["你好", "世界", "人工智能"]);

        let tokens = tokenizer.tokenize("你好世界");
        assert_eq!(tokens, vec!["你好", "世界"]);
    }

    #[test]
    fn test_intent_classifier() {
        let classifier = IntentClassifier::new();
        
        // 测试问候意图
        let tokens = vec!["你好".to_string()];
        let intent = classifier.classify(&tokens);
        assert_eq!(intent, Intent::Greeting);
        
        // 测试能力查询意图
        let tokens2 = vec!["你".to_string(), "能".to_string(), "做".to_string(), "什么".to_string()];
        let intent2 = classifier.classify(&tokens2);
        assert_eq!(intent2, Intent::Capability);
    }

    #[test]
    fn test_embedding() {
        let embedding = Embedding::new(100, 64);
        let vec = embedding.get(0).unwrap();
        assert_eq!(vec.len(), 64);
    }
}

