use std::{hash::Hash, collections::HashMap};


#[derive(Debug)]
pub enum TokenType {
    // 单字符
    None,
    // (
    LeftParen, 
    // )
    RightParen,
    // {
    LeftBrace,
    // }
    RightBrace,
    // ,
    Comma,
    // .
    Dot,
    // -
    Minus,
    // +
    Plus,
    // ;
    Semicolon,
    // *
    Star,

    // 单个或两个字符
    // !
    Exclamation, 
    // !=
    ExclamationEquals,
    // =
    Equal,
    // ==
    EqualEqual,
    // =>
    FunLambda,
    // >
    Greater,
    // >=
    GreaterEqual,
    
    // <
    Less,
    // <=
    LessEqual,
    // <-
    FlatLambda,
    
    // /
    Slash,
    // //
    Comment,

    // 字面量
    // 标识符
    Identifier,
    // 字符串
    StringX,
    // 数字
    NumberX,

    // 关键字
    // and
    AndX,
    // or
    OrX,
    // class
    ClassX,
    // if
    IfX,
    // else
    ElseX,
    // true
    TrueX,
    // false
    FalseX,
    // def
    DefX,
    // for
    ForX,
    // while
    WhileX,
    // print
    PrintX,
    // return
    ReturnX,
    // extends
    ExtendsX,
    // implements
    ImplementsX,
    // super
    SuperX,
    // this
    ThisX,
    // var
    VarX,
    // val
    ValX,
    NULL,

    // 特殊字符
    Eof,

    // 忽略
    Ignore
}

impl TokenType {
    /**
     * 关键字匹配 
     */
    fn match_keywords(word: &str) -> Option<TokenType> {
        match word {
            "and"   => Some(TokenType::AndX),
            "or"    => Some(TokenType::OrX),
            "class" => Some(TokenType::ClassX),
            "if"    => Some(TokenType::IfX),
            "else"  => Some(TokenType::ElseX),
            "true"  => Some(TokenType::TrueX),
            "false" => Some(TokenType::FalseX),
            "def"           => Some(TokenType::DefX),
            "for"           => Some(TokenType::ForX),
            "while"         => Some(TokenType::WhileX),
            "print"         => Some(TokenType::PrintX),
            "return"        => Some(TokenType::ReturnX),
            "extends"       => Some(TokenType::ExtendsX),
            "implements"    => Some(TokenType::ImplementsX),
            "super" => Some(TokenType::SuperX),
            "this"  => Some(TokenType::ThisX),
            "var"   => Some(TokenType::VarX),
            "val"   => Some(TokenType::ValX),
            "null"  => Some(TokenType::NULL),
            _       => None,
        }
    }
}

/**
 * 单元
 */
#[derive(Debug)]
pub struct Token {
    pub t_type: TokenType,
    pub value: String,
    skip: Option<usize>,
}

impl Token {
    pub fn of(t_type: TokenType, value: String) -> Self {
        Self::of_t(t_type, value, None)
    }

    fn of_t(t_type: TokenType, value: String, skip: Option<usize>) -> Self {
        Self{
            t_type: t_type,
            value: value,
            skip: skip,
        }
    }
}

pub struct Scanner {
    // 存储一行解析里面的所有token
    pub tokens: Vec<Token>,
    // 原始字符串 - 数组
    pub source: Vec<char>,
    // 当前节点
    pub current: usize
}

impl Scanner {

    pub fn scan_tokens(&mut self) -> Vec<Token> {
        let mut tokens: Vec<Token> = vec![];
        while !self.is_at_end() {
            tokens.push(self.scan_token());
        };

        tokens.push(Token::of(TokenType::Eof, "".to_string()));

        tokens
    }

    fn is_at_end(&self) -> bool {
        // println!("ppp = {}, {}", self.current, self.source.len());
        self.current >= self.source.len()
    }

    // fn advance(&mut self) -> char {
    //     // 获取单个字符
    //     let c = self.source.chars().nth(self.current).unwrap();
    //     self.current = self.current + 1;
    //     c
    // }

    fn scan_token(&mut self) -> Token {
        let start = self.current;
        let remaining_sequence = &self.source[self.current..];
        let token = match remaining_sequence {
            ['(', ..]        => Token::of(TokenType::LeftParen, "(".to_string()),
            [')', ..]        => Token::of(TokenType::RightParen, ")".to_string()),
            ['{', ..]        => Token::of(TokenType::LeftBrace, "{".to_string()),
            ['}', ..]        => Token::of(TokenType::RightBrace, "}".to_string()),
            [',', ..]        => Token::of(TokenType::Comma, "，".to_string()),
            ['.', ..]        => Token::of(TokenType::Dot, ".".to_string()),
            ['-', ..]        => Token::of(TokenType::Minus, "-".to_string()),
            ['+', ..]        => Token::of(TokenType::Plus,  "+".to_string()),
            [';', ..]        => Token::of(TokenType::Semicolon, ";".to_string()),
            ['*', ..]        => Token::of(TokenType::Star, "*".to_string()),
            ['!', '=', ..]   => Token::of(TokenType::ExclamationEquals, "!=".to_string()),
            ['!', ..]        => Token::of(TokenType::Exclamation, "!".to_string()),
            ['=', '=', ..]   => Token::of(TokenType::EqualEqual, "==".to_string()),
            ['=', '>', ..]   => Token::of(TokenType::FunLambda, ">=".to_string()),
            ['=', ..]        => Token::of(TokenType::Equal, "=".to_string()),
            ['>', '=', ..]   => Token::of(TokenType::GreaterEqual, ">=".to_string()),
            ['>', ..]        => Token::of(TokenType::Greater, ">".to_string()),
            ['<', '=', ..]   => Token::of(TokenType::LessEqual, "<=".to_string()),
            ['<', '-']       => Token::of(TokenType::FlatLambda, "<-".to_string()),
            ['<', ..]        => Token::of(TokenType::Less, "<".to_string()),
            ['/', '/', ..]   => Token::of(TokenType::Comment, String::from_iter(self.source[start..].iter())),
            ['/', ..]        => Token::of(TokenType::Slash, "/".to_string()),
            // 字符串字面量
            ['"', ..]                       => self.scan_token_string(),
            // 数字字面量
            ['0'..='9', ..]                 => self.scan_token_number(),
            // 标识符 以 _或字母开头
            ['_'|'a'..='z'|'A'..='Z', ..]   => self.scan_token_identifier(),
            // 忽略字 ' ' \r \t \n
            [' '|'\r'|'\t'|'\n', ..]        => self.scan_token_ignore(),
            _                               => panic!("Unexpected token"),
        };

        self.current = start + token.skip.map_or(token.value.len(), |x| x + token.value.len());
        token
    }

    /**
     * 字符串字面量
     */
    fn scan_token_string(&mut self) -> Token {
        let start: usize = self.current;
        let has_next = |t: &mut Scanner| {
            t.current = t.current + 1;
            !t.is_at_end()
        };
        while has_next(self) && self.source[self.current] != '"' {
            // do nothing
        }

        if self.is_at_end() {
            panic!("不合符的字符串字面量");
        }
        
        let value = String::from_iter(self.source[start+1..self.current].iter());
        Token::of_t(TokenType::StringX, value, Some(2))
    }

    /**
     * 数字字面量
     */
    fn scan_token_number(&mut self) -> Token {
        let start: usize = self.current;

        let has_next = |t: &mut Scanner| {
            t.current = t.current + 1;
            !t.is_at_end()
        };

        while has_next(self) && is_digit(self.source[self.current]) {
            // do_thing
        }
        
        // 数字验证正确
        if self.is_at_end() {
            let value = String::from_iter(self.source[start..self.current].iter());
            return Token::of(TokenType::NumberX, value);
        }

        // 如果是 .
        if self.source[self.current] == '.' {
            if is_digit(self.source[self.current + 1]) {
                self.current = self.current + 1;
                // do_nothing
            } else {
                panic!("{}", "数字字面量出现错误, . 后面必须有数字");
            }
        }

        while !self.is_at_end() && is_digit(self.source[self.current]) {
            // do_thing
            self.current = self.current + 1;
        }

        let value = String::from_iter(self.source[start..self.current].iter());
        Token::of(TokenType::NumberX, value)
    }

    /**
     * 扫描标识符
     */
    fn scan_token_identifier(&mut self) -> Token {
        let start: usize = self.current;

        let has_next = |t: &mut Scanner| {
            t.current = t.current + 1;
            !t.is_at_end()
        };

        while has_next(self) && is_identifier(self.source[self.current]) {
            // do_thing
        }

        let value = String::from_iter(self.source[start..self.current].iter());
        let token_type = TokenType::match_keywords(&value).map_or(TokenType::Identifier, |x| x);
        Token::of(token_type, value)
    }

    /**
     * 扫描被忽略的字符
     */
    fn scan_token_ignore(&mut self) -> Token {
        let value = self.source[self.current].to_string();
        self.current = self.current+1;
        Token::of(TokenType::Ignore, value)
    }
}

fn is_digit(c: char) -> bool {
    c >= '0' && c <= '9'
}

fn is_identifier(c: char) -> bool {
    c == '_' || is_alpha(c) || is_digit(c)
}

fn is_alpha(c: char) -> bool {
    c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z'
}