// src/sql/parser/lexer.rs
use std::{iter::Peekable, str::Chars};
use std::fmt::Display;
use crate::{error::Result };
use crate::error::Error;

#[derive(Debug, Clone, PartialEq)]
pub enum Keyword {
    Create,
    Table,
    Int,
    Integer,
    Boolean,
    Bool,
    String,
    Text,
    Varchar,
    Float,
    Double,
    Select,
    From,
    Insert,
    Into,
    Values,
    True,
    False,
    Default,
    Not,
    Null,
    Primary,
    Key,
}

impl Keyword {
    pub fn from_str(ident: &str) -> Option<Self> {
        Some(match ident.to_uppercase().as_ref() {
            "CREATE" => Keyword::Create,
            "TABLE" => Keyword::Table,
            "INT" => Keyword::Int,
            "INTEGER" => Keyword::Integer,
            "BOOLEAN" => Keyword::Boolean,
            "BOOL" => Keyword::Bool,
            "STRING" => Keyword::String,
            "TEXT" => Keyword::Text,
            "VARCHAR" => Keyword::Varchar,
            "FLOAT" => Keyword::Float,
            "DOUBLE" => Keyword::Double,
            "SELECT" => Keyword::Select,
            "FROM" => Keyword::From,
            "INSERT" => Keyword::Insert,
            "INTO" => Keyword::Into,
            "VALUES" => Keyword::Values,
            "TRUE" => Keyword::True,
            "FALSE" => Keyword::False,
            "DEFAULT" => Keyword::Default,
            "NOT" => Keyword::Not,
            "NULL" => Keyword::Null,
            "PRIMARY" => Keyword::Primary,
            "KEY" => Keyword::Key,
            _ => return None,
        })
    }

    pub fn to_str(&self) -> &str {
        match self {
            Keyword::Create => "Create",
            Keyword::Table => "Table",
            Keyword::Int => "Int",
            Keyword::Integer => "Integer",
            Keyword::Boolean => "Boolean",
            Keyword::Bool => "Bool",
            Keyword::String => "String",
            Keyword::Text => "Text",
            Keyword::Varchar => "Varchar",
            Keyword::Float => "Float",
            Keyword::Double => "Double",
            Keyword::Select => "Select",
            Keyword::From => "From",
            Keyword::Insert => "Insert",
            Keyword::Into => "Into",
            Keyword::Values => "Values",
            Keyword::True => "True",
            Keyword::False => "False",
            Keyword::Default => "Default",
            Keyword::Not => "Not",
            Keyword::Null => "Null",
            Keyword::Primary => "Primary",
            Keyword::Key => "Key",
        }
    }
}

impl Display for Keyword {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        f.write_str(self.to_str())
    }
}

#[derive(Debug, Clone, PartialEq)]
pub enum Token {
    // 关键字
    Keyword(Keyword),
    // (标识符)其他类型的字符串Token, 如表名, 列名
    Ident(String),
    // 字符串类型数据
    String(String),
    // 数值类型数据
    Number(String),
    // 左括号 (
    OpenParen,
    // 右括号 )
    CloseParen,
    // 逗号 ,
    Comma,
    // 分号 ;
    Semicolon,
    // 星号 *
    Asterisk,
    // 加号 +
    Plus,
    // 减号 -
    Minus,
    // 斜杠 /
    Slash,
}

impl Display for Token {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        f.write_str(match self {
            Token::Keyword(keyword) => keyword.to_str(),
            Token::Ident(ident) => ident,
            Token::String(string) => string,
            Token::Number(number) => number,
            Token::OpenParen => "(",
            Token::CloseParen => ")",
            Token::Comma => ",",
            Token::Semicolon => ";",
            Token::Asterisk => "*",
            Token::Plus => "+",
            Token::Minus => "-",
            Token::Slash => "/",
        })
    }
}

// 词法解析器
// 表示该结构体与某个生命周期'a相关联
// 整个Lexer结构体的生命周期受限于'a，确保不会出现悬垂指针
pub struct Lexer<'a> {
    // Peekable 增加前瞻能力，支持非破坏性查看下一个字符
    // Peekable 在标准迭代器基础上增加 peek() 方法 允许查看下一个元素而不消费它 保持迭代状态不变直到调用 next()
    // Chars 将字符串分解为字符序列的基础迭代器
    // 生命周期'a 保证词法分析器工作时，原始 SQL 字符串始终有效
    // 'a 表示迭代器的生命周期不能超过它引用的原始字符串
    iter: Peekable<Chars<'a>>,
}

// 实现迭代器接口
impl<'a> Iterator for Lexer<'a> {
    // 返回token类型
    type Item = Result<Token>;

    fn next(&mut self) -> Option<Self::Item> {
        match self.scan() {
            Ok(Some(token)) => Some(Ok(token)),
            Ok(None) => self.iter.peek().map(
                |c|
                Err(Error::Parse(format!("[Lexer] Unexpected character {}", c)))
            ),
            Err(err) => Some(Err(err))
        }
    }
}

impl<'a> Lexer<'a> {
    pub fn new(sql_text: &'a str) -> Self {
        Lexer {
            iter: sql_text.chars().peekable(),
        }
    }

    // 消除空白字符
    // e.g. select    *   from    t;
    fn erase_whitespace(&mut self) {
        self.next_while(|c| c.is_whitespace());
    }

    // 满足条件则跳转下一个字符, 并返回该字符
    fn next_if<F: Fn(char) -> bool>(&mut self, predicate: F) -> Option<char> {
        self.iter.peek().filter(|&c| predicate(*c))?;
        self.iter.next()
    }

    // 判断当前字符是否满足条件, 如果是就跳转下一个字符
    fn next_while<F: Fn(char) -> bool>(&mut self, predicate: F) -> Option<String> {
        let mut value = String::new();
        while let Some(c) = self.next_if(&predicate) {
            value.push(c);
        }

        Some(value).filter(|v| !v.is_empty())
    }

    // 只有token类型才跳转下一个, 并返回token
    fn next_if_token<F: Fn(char) -> Option<Token>>(&mut self, predicate: F) -> Option<Token> {
        let token = self.iter.peek().and_then(|c| predicate(*c))?;
        self.iter.next();
        Some(token)
    }

    // 扫描拿到下一个token
    fn scan(&mut self) -> Result<Option<Token>> {
        // 消除空白字符
        self.erase_whitespace();
        // 根据第一个字符判断
        match self.iter.peek() {
            Some('\'') => self.scan_string(), // 扫描字符串
            Some(c) if c.is_ascii_digit() => Ok(self.scan_number()), // 扫描数字
            Some(c)if c.is_alphabetic() => Ok(self.scan_ident()), // 扫描标识符
            Some(c) => Ok(self.scan_symbol()), // 扫描符号
            None => Ok(None),
        }
    }

    // 扫描字符串
    fn scan_string(&mut self) -> Result<Option<Token>> {
        // 判断是否是单引号开头
        if self.next_if(|c| c == '\'').is_none() {
            return Ok(None);
        }

        let mut val = String::new();
        loop {
            match self.iter.next() {
                Some('\'') => break,
                Some(c) => val.push(c),
                None => return Err(Error::Parse(format!("[Lexer] Unexpected end of string"))),
            }
        }

        Ok(Some(Token::String(val)))
    }

    // 扫描数字
    fn scan_number(&mut self) -> Option<Token> {
        // 先扫描一部分
        let mut num = self.next_while(|c| c.is_ascii_digit())?;
        // 如果中间有小数点, 说明是浮点数
        if let Some(sep) = self.next_if(|c| c == '.') {
            num.push(sep);
            // 扫描剩下的部分
            while let Some(c) = self.next_if(|c| c.is_ascii_digit()) {
                num.push(c);
            }
        }

        Some(Token::Number(num))
    }

    // 扫描标识符, 也可能是关键字, true / false
    fn scan_ident(&mut self) -> Option<Token> {
        // 第一个字符如果是字符
        let mut value = self.next_if(|c| c.is_alphabetic())?.to_string();
        // 下一个如果是字符或数字或下划线
        while let Some(c) = self.next_if(|c| c.is_alphanumeric() || c == '_') {
            value.push(c);
        }

        Some(Keyword::from_str(&value).map_or(Token::Ident(value.to_lowercase()), Token::Keyword))
    }

    // 扫描符号
    fn scan_symbol(&mut self) -> Option<Token> {
        self.next_if_token(|c| match c {
            '*' => Some(Token::Asterisk),
            '+' => Some(Token::Plus),
            '-' => Some(Token::Minus),
            '/' => Some(Token::Slash),
            ',' => Some(Token::Comma),
            ';' => Some(Token::Semicolon),
            '(' => Some(Token::OpenParen),
            ')' => Some(Token::CloseParen),
            _ => None
        })
    }
}

#[cfg(test)]
mod tests {
    use crate::error::Result;
    use crate::sql::parser::lexer::{Keyword, Lexer, Token};

    #[test]
    fn test_lexer_create_table() -> Result<()> {
        let tokens = Lexer::new(
            "CREATE TABLE users (
                id INT PRIMARY KEY,
                name integer
            );"
        ).peekable().collect::<Result<Vec<_>>>()?;

        assert_eq!(tokens, vec![
            Token::Keyword(Keyword::Create),
            Token::Keyword(Keyword::Table),
            Token::Ident("users".to_string()),
            Token::OpenParen,
            Token::Ident("id".to_string()),
            Token::Keyword(Keyword::Int),
            Token::Keyword(Keyword::Primary),
            Token::Keyword(Keyword::Key),
            Token::Comma,
            Token::Ident("name".to_string()),
            Token::Keyword(Keyword::Integer),
            Token::CloseParen,
            Token::Semicolon,
        ]);

        let tokens2: Vec<Token> = Lexer::new(
            "CREATE table tbl (
                 id1 int primary key,
                 id2 integer,
                 c1 bool null,
                 c2 boolean not null,
                 c3 float null,
                 c4 double,
                 c5 string,
                 c6 text,
                 c7 varchar default 'foo',
                 c8 int default 100,
                 c9 integer
             );"
        )
            .peekable().collect::<Result<Vec<_>>>()?;

        assert_eq!(tokens2, vec![
            Token::Keyword(Keyword::Create),
            Token::Keyword(Keyword::Table),
            Token::Ident("tbl".to_string()),
            Token::OpenParen,
            Token::Ident("id1".to_string()),
            Token::Keyword(Keyword::Int),
            Token::Keyword(Keyword::Primary),
            Token::Keyword(Keyword::Key),
            Token::Comma,
            Token::Ident("id2".to_string()),
            Token::Keyword(Keyword::Integer),
            Token::Comma,
            Token::Ident("c1".to_string()),
            Token::Keyword(Keyword::Bool),
            Token::Keyword(Keyword::Null),
            Token::Comma,
            Token::Ident("c2".to_string()),
            Token::Keyword(Keyword::Boolean),
            Token::Keyword(Keyword::Not),
            Token::Keyword(Keyword::Null),
            Token::Comma,
            Token::Ident("c3".to_string()),
            Token::Keyword(Keyword::Float),
            Token::Keyword(Keyword::Null),
            Token::Comma,
            Token::Ident("c4".to_string()),
            Token::Keyword(Keyword::Double),
            Token::Comma,
            Token::Ident("c5".to_string()),
            Token::Keyword(Keyword::String),
            Token::Comma,
            Token::Ident("c6".to_string()),
            Token::Keyword(Keyword::Text),
            Token::Comma,
            Token::Ident("c7".to_string()),
            Token::Keyword(Keyword::Varchar),
            Token::Keyword(Keyword::Default),
            Token::String("foo".to_string()),
            Token::Comma,
            Token::Ident("c8".to_string()),
            Token::Keyword(Keyword::Int),
            Token::Keyword(Keyword::Default),
            Token::Number("100".to_string()),
            Token::Comma,
            Token::Ident("c9".to_string()),
            Token::Keyword(Keyword::Integer),
            Token::CloseParen,
            Token::Semicolon,
        ]
        );
        // println!("{:?}", tokens2);

        Ok(())
    }

    #[test]
    fn text_lexer_insert_into() -> Result<()> {
        let tokens1 = Lexer::new(
            "insert into tbl values (1, 2, '3', true, false, 4.55);"
        )
            .peekable().collect::<Result<Vec<_>>>()?;

        assert_eq!(tokens1, vec![
            Token::Keyword(Keyword::Insert),
            Token::Keyword(Keyword::Into),
            Token::Ident("tbl".to_string()),
            Token::Keyword(Keyword::Values),
            Token::OpenParen,
            Token::Number("1".to_string()),
            Token::Comma,
            Token::Number("2".to_string()),
            Token::Comma,
            Token::String("3".to_string()),
            Token::Comma,
            Token::Keyword(Keyword::True),
            Token::Comma,
            Token::Keyword(Keyword::False),
            Token::Comma,
            Token::Number("4.55".to_string()),
            Token::CloseParen,
            Token::Semicolon,
        ]);

        let tokens2 = Lexer::new(
            "INSERT INTO     tbl (id,name, age) values (100, 'db', 10);"
        )
            .peekable().collect::<Result<Vec<_>>>()?;

        assert_eq!(tokens2, vec![
            Token::Keyword(Keyword::Insert),
            Token::Keyword(Keyword::Into),
            Token::Ident("tbl".to_string()),
            Token::OpenParen,
            Token::Ident("id".to_string()),
            Token::Comma,
            Token::Ident("name".to_string()),
            Token::Comma,
            Token::Ident("age".to_string()),
            Token::CloseParen,
            Token::Keyword(Keyword::Values),
            Token::OpenParen,
            Token::Number("100".to_string()),
            Token::Comma,
            Token::String("db".to_string()),
            Token::Comma,
            Token::Number("10".to_string()),
            Token::CloseParen,
            Token::Semicolon,
        ]);

        Ok(())
    }

    #[test]
    fn text_lexer_select() -> Result<()> {
        let tokens1 = Lexer::new(
            "select * from tbl;"
        )
            .peekable().collect::<Result<Vec<_>>>()?;

        assert_eq!(tokens1, vec![
            Token::Keyword(Keyword::Select),
            Token::Asterisk,
            Token::Keyword(Keyword::From),
            Token::Ident("tbl".to_string()),
            Token::Semicolon,
        ]);

        Ok(())
    }
}