use std::{collections::HashMap, fmt::Display, iter::Peekable, str::Chars};

#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum TokenType {
    // Single-character tokens.
    LeftParen,
    RightParen,
    LeftBrace,
    RightBrace,
    Comma,
    Dot,
    Minus,
    Plus,
    Semicolon,
    Slash,
    Star,

    // One or two character tokens.
    Bang,
    BangEqual,
    Equal,
    EqualEqual,
    Greater,
    GreaterEqual,
    Less,
    LessEqual,

    // Literals.
    Identifier,
    Str,
    Number,

    // Keywords.
    If,
    Else,
    For,
    And,
    Fn,
    False,
    Class,
    Nil,
    Or,
    Print,
    Return,
    Super,
    This,
    True,
    Let,
    While,

    Eof,
}

#[derive(Debug, PartialEq, Eq)]
pub struct Token {
    token: TokenType,
    // todo: value use &str
    // 现有的 基于 Chars 迭代器的实现很难实现基于 &str 的 token，可能要考虑修改
    value: Option<String>,
    line: i32,
}

impl Token {
    fn new(token: TokenType, line: i32) -> Self {
        Self {
            token,
            value: None,
            line,
        }
    }

    fn with_value(token: TokenType, value: String, line: i32) -> Self {
        Self {
            token,
            value: Some(value),
            line,
        }
    }
}

impl Display for Token {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match &self.value {
            Some(v) => write!(f, "line: {}, {:?}: {}", self.line, self.token, v),
            None => write!(f, "line: {}, {:?}", self.line, self.token),
        }
    }
}

pub struct Scanner<'a> {
    tokens: Vec<Token>,
    line: i32,
    source: &'a str,
    chars: Peekable<Chars<'a>>,
    keywords: HashMap<&'static str, TokenType>,
    // 这个是排序好的，可以用 binary_search, 不应该修改它
    ident_invaild_chars: Vec<char>,
}

use TokenType::*;
impl<'a> Scanner<'a> {
    pub fn new(source: &'a str) -> Self {
        let chars = source.chars().peekable();

        let mut ident_invaild_chars: Vec<char> = "{}()[];,.+-*/=!><\"".chars().collect();
        ident_invaild_chars.sort();

        let mut keywords = HashMap::with_capacity(30);

        {
            // add keyword
            keywords.insert("let", Let);
            keywords.insert("if", If);
            keywords.insert("for", For);
            keywords.insert("else", Else);
            keywords.insert("and", And);
            keywords.insert("fn", Fn);
            keywords.insert("false", False);
            keywords.insert("class", Class);
            keywords.insert("nil", Nil);
            keywords.insert("or", Or);
            keywords.insert("print", Print);
            keywords.insert("return", Return);
            keywords.insert("super", Super);
            keywords.insert("this", This);
            keywords.insert("true", True);
            keywords.insert("while", While);
        }

        Scanner {
            tokens: Vec::new(),
            line: 1,
            source,
            chars,
            keywords,
            ident_invaild_chars,
        }
    }

    pub fn tokens(&mut self) -> &[Token] {
        &self.tokens
    }

    pub fn scan(&mut self) {
        while let Some(_) = self.chars.peek() {
            self.get_token();
        }

        self.tokens.push(Token {
            token: Eof,
            value: None,
            line: self.line,
        });
    }

    fn add_token_if_next_char_is_or(
        &mut self,
        expect_next_char: char,
        if_token: TokenType,
        or_token: TokenType,
    ) {
        if let Some(_) = self.chars.next_if_eq(&expect_next_char) {
            self.add_token(if_token);
        } else {
            self.add_token(or_token);
        }
    }

    fn get_token(&mut self) {
        if let Some(c) = self.chars.next() {
            match c {
                '(' => self.add_token(LeftParen),
                ')' => self.add_token(RightParen),
                '{' => self.add_token(LeftBrace),
                '}' => self.add_token(RightBrace),
                ',' => self.add_token(Comma),
                '.' => self.add_token(Dot),
                '+' => self.add_token(Plus),
                '-' => self.add_token(Minus),
                ';' => self.add_token(Semicolon),
                '*' => self.add_token(Star),

                '!' => self.add_token_if_next_char_is_or('=', BangEqual, Bang),
                '=' => self.add_token_if_next_char_is_or('=', EqualEqual, Equal),
                '>' => self.add_token_if_next_char_is_or('=', GreaterEqual, Greater),
                '<' => self.add_token_if_next_char_is_or('=', LessEqual, Less),

                '/' => {
                    // 跳过注释
                    if let Some(_) = self.chars.next_if_eq(&'/') {
                        while let Some(_) = self.chars.next_if(|c| *c != '\n') {}
                    } else {
                        self.add_token(Slash);
                    }
                }
                '\n' => self.line += 1,

                '"' => self.string(),

                '_' => self.ident_or_keyword(c),
                c if c.is_ascii_alphabetic() => self.ident_or_keyword(c),
                c if c.is_whitespace() => {
                    while let Some(_) = self.chars.next_if(|c| c.is_whitespace() && *c != '\n') {}
                }
                c if c.is_ascii_digit() => self.number(c),

                _ => {
                    todo!("error handle")
                }
            }
        }
    }

    fn ident_or_keyword(&mut self, cur: char) {
        let mut value = cur.to_string();

        while let Some(c) = self.chars.peek() {
            if self.ident_invaild_chars.binary_search(c).is_ok() || c.is_whitespace() {
                break;
            }
            value.push(*c);
            self.chars.next();
        }

        let token_opt = self.keywords.get(&value[..]).cloned();
        if let Some(token) = token_opt {
            self.add_token(token);
        } else {
            self.add_token_with_value(Identifier, value);
        }
    }

    fn string(&mut self) {
        let mut value = "".to_string();
        while let Some(c) = self.chars.next() {
            if c == '"' {
                break;
                // todo: 考虑没有 配对的 " 的情况
            } else if c == '\n' {
                self.line += 1;
            }
            value.push(c);
        }
        self.add_token_with_value(Str, value);
    }

    fn number(&mut self, cur: char) {
        let mut value = cur.to_string();
        while let Some(c) = self.chars.peek() {
            if c.is_ascii_digit() || *c == '.' {
                // todo: 考虑多个小数点的情况
                value.push(*c);
                self.chars.next();
            } else {
                break;
            }
        }
        self.add_token_with_value(Number, value);
    }

    fn add_token(&mut self, token: TokenType) {
        self.tokens.push(Token::new(token, self.line))
    }

    fn add_token_with_value(&mut self, token: TokenType, value: String) {
        self.tokens.push(Token::with_value(token, value, self.line))
    }
}

#[cfg(test)]
mod test {
    use super::*;
    #[test]
    fn test_scanner() {
        let code = "let a = 10; \nif ( x > 0) { return 10; }";
        let mut scanner = Scanner::new(code);
        scanner.scan();

        let n = Token::new;
        let v = Token::with_value;

        let except = [
            n(Let, 1),
            v(Identifier, "a".to_string(), 1),
            n(Equal, 1),
            v(Number, "10".to_string(), 1),
            n(Semicolon, 1),
            n(If, 2),
            n(LeftParen, 2),
            v(Identifier, "x".to_string(), 2),
            n(Greater, 2),
            v(Number, "0".to_string(), 2),
            n(RightParen, 2),
            n(LeftBrace, 2),
            n(Return, 2),
            v(Number, "10".to_string(), 2),
            n(Semicolon, 2),
            n(RightBrace, 2),
            n(Eof, 2),
        ];

        assert_eq!(scanner.tokens(), &except);
    }

    #[test]
    fn test_whitespace() {
        assert!('\n'.is_whitespace());
        println!("{:?}", Let);
    }
}
