use std::collections::VecDeque;
use regex::Regex;
use super::error::LexerError;

#[derive(Copy, Clone, Debug, PartialEq)]
pub enum TokenType
{
    LeftBrace,     // 左大括号
    RightBrace,    // 右大括号
    Number,         // 浮点数
    Bool,           // 布尔
    String,         // 字符串
    TokenNull,     // 空
    LeftBracket,   // 左中括号
    RightBracket,  // 右中括号
    Comma,          // 逗号
    Colon,          // 冒号
    TokenError,    // 错误
    TokenEof,
}

#[derive(Clone, Debug)]
pub struct Token
{
    token_str: String,
    token_type: TokenType,
}

pub struct Lexer
{
    m_regex: Regex,
    m_que: VecDeque<Token>,
    current_token: Token,
    next_token: Token,
}


impl Token {
    pub fn new(token_str: &str, token_type: TokenType) -> Token {
        Token { token_str: token_str.to_string(), token_type }
    }

    pub fn eof() -> Token {
        Token { token_str: "".to_string(), token_type: TokenType::TokenEof }
    }

    pub fn get_str(&self) -> &str {
        &self.token_str
    }

    pub fn get_type(&self) -> TokenType {
        self.token_type
    }

    pub fn is_eof(&self) -> bool {
        if let TokenType::TokenEof = self.token_type {
            true
        } else {
            false
        }
    }
}
impl Lexer {
    const REGEX_VALUE: &'static str = "[\\[\\]]|true|false|TRUE|FALSE|True|False|\".*?\"|null|,|:|[{}]|[0-9]+\\.[0-9]+|[0-9]+";

    pub fn new() -> Lexer {
        Lexer {
            m_regex: Regex::new(Self::REGEX_VALUE).unwrap(),
            m_que: VecDeque::new(),
            current_token: Token::eof(),
            next_token: Token::eof(),
        }
    }

    pub fn view_current(&self) -> &Token {
        &self.current_token
    }

    pub fn view_next(&self) -> &Token {
        &self.next_token
    }

    pub fn scan(&mut self, json_str: &str) -> Result<(), LexerError> {
        let capture_items = self.m_regex.captures_iter(json_str);
        for item in capture_items {
            let temp_toke_str = item.get(0).unwrap().as_str().to_lowercase();
            let temp_token = Self::str_to_token(&temp_toke_str)?;
            self.m_que.push_back(temp_token)
        }
        self.next();
        return Ok(());
    }

    pub fn next(&mut self) {
        self.current_token = self.next_token.clone();
        if let Some(temp_token) = self.m_que.pop_front() {
            self.next_token = temp_token;
        } else {
            self.next_token = Token::eof();
        }
    }

    fn str_to_token(view: &str) -> Result<Token, LexerError> {
        if view == "{" {
            Ok(Token::new(view, TokenType::LeftBrace))
        } else if view == "}" {
            Ok(Token::new(view, TokenType::RightBrace))
        } else if view == "[" {
            Ok(Token::new(view, TokenType::LeftBracket))
        } else if view == "]" {
            Ok(Token::new(view, TokenType::RightBracket))
        } else if view == "," {
            Ok(Token::new(view, TokenType::Comma))
        } else if view == ":" {
            Ok(Token::new(view, TokenType::Colon))
        } else if view == "true" || view == "false" {
            Ok(Token::new(view, TokenType::Bool))
        } else if view == "null" {
            Ok(Token::new(view, TokenType::TokenNull))
        } else if *view.as_bytes().first().unwrap() >= '0' as u8 &&
            *view.as_bytes().first().unwrap() <= '9' as u8 {
            Ok(Token::new(view, TokenType::Number))
        } else if *view.as_bytes().first().unwrap() == '"' as u8 &&
            *view.as_bytes().last().unwrap() == '"' as u8 {
            Ok(Token::new(&view[1..view.len() - 1], TokenType::String))
        } else {
            Err(LexerError::new(Token::new(view, TokenType::TokenError)))
        }
    }

    pub fn clear(&mut self) {
        self.m_que.clear();
        self.next_token = Token::eof();
        self.current_token = Token::eof();
    }
}

#[cfg(test)]
mod lexer_test {
    use crate::json::lexer::Lexer;

    #[test]
    fn lexer_creat_test() {
        let _ = Lexer::new();
    }

    #[test]
    fn lexer_scan_test() {
        let mut lexer = Lexer::new();
        lexer.scan("{\"hello\": [1.24,\"hello\",null,true,false,{\"key\":\"value\"}] }").unwrap();
        while !lexer.view_current().is_eof() {
            println!("{:?}", *lexer.view_current());
        }
    }
}