use self::{expression::CallExpression, expression_statement::ExpressionStatement};
use crate::parser::expression::{
    ArrayLiteral, EqualSign, HashLiteral, IndexExpression, Null, StringLiteral,
};
use crate::{
    lexer::{self, token::Token, Lexer},
    parser::{
        block_statement::BlockStatement,
        expression::{
            Boolean, Expression, FunctionLiteral, IfExpression, InfixExpression, Integer,
            PrefixExpression,
        },
        identifier::Identifier,
        letstatement::LetStatement,
        program::Program,
        return_statement::ReturnStatement,
        statement::Statement,
    },
    Error, Result,
};
use std::collections::HashMap;
mod block_statement;
pub mod evaluator;
mod expression;
mod expression_statement;
mod identifier;
mod letstatement;
mod node;
mod program;
mod return_statement;
mod statement;
/// 优先级
pub const LOWEST: i32 = 0;
pub const EQUAL_SIGN: i32 = 1;
pub const EQUALS: i32 = 2;
pub const LESSGREATER: i32 = 3;
pub const SUM: i32 = 4;
pub const PRODUCT: i32 = 5;
pub const PREFIX: i32 = 6;
pub const CALL: i32 = 7;
pub const INDEX: i32 = 8;

pub struct CompilerParser {
    lexer: Vec<Token>,
    token_pos: usize,
    program: Program,
    prefix_parse_fns:
        HashMap<i32, fn(&mut CompilerParser) -> Result<Box<dyn Expression + 'static>>>,
    priority_map: HashMap<i32, i32>,
    infix_parse_fns:
        HashMap<i32, fn(&mut CompilerParser, Box<dyn Expression>) -> Result<Box<dyn Expression>>>,
}

impl CompilerParser {
    pub fn new(mut lexer: Lexer) -> Result<Self> {
        let tokens = lexer.lexing()?;
        let hash_fns = Self::generate_parse_fn_table();
        let hash_priority = Self::generate_priority_table();
        let infix_hash = Self::generate_infix_map();
        let n = Self {
            token_pos: 0, // -1表示当前为初始化
            lexer: tokens,
            program: Program::new(),
            prefix_parse_fns: hash_fns,
            priority_map: hash_priority,
            infix_parse_fns: infix_hash,
        };

        Ok(n)
    }

    // 主函数，开始解析token
    pub fn parse_program(&mut self) -> Result<&Program> {
        loop {
            if lexer::is_eof(self.current_token()) {
                break;
            }
            let smt = self.parse_statement()?;
            self.program.add_statement(smt);
            self.current_token_pos_add(1);
        }
        return Ok(&self.program);
    }

    // 一条一条的解析语句
    fn parse_statement(&mut self) -> Result<Box<dyn Statement>> {
        let cur_tok = self.current_token();
        match cur_tok.r#type() {
            lexer::LET => self.parse_let_statement(),
            lexer::RETURN => self.parse_return_statement(),
            _ => self.parse_expression_statement(),
        }
    }

    /// 解析let语句
    fn parse_let_statement(&mut self) -> Result<Box<dyn Statement>> {
        // 能到这里，let肯定存在
        let cur_token = self.current_token().clone();
        self.expect_next(
            lexer::IDENTIFIER,
            "let语句后面必须是一个标识符, 比如let age = 4;",
        )?;

        let identifier_token = self.current_token().clone();
        let value = identifier_token.literal_ref().to_string();
        let identifier = Identifier::new(identifier_token, value);

        self.expect_next(
            lexer::EQUAL_SIGN,
            "let语句标识符后面必须是一个=, 比如let age = 4;",
        )?;

        // cur_pos指向了=后面的表达式的开始
        self.current_token_pos_add(1);
        // let identifier = expression; =后面是一个表达式.
        let expr = self.parse_expression(LOWEST)?;
        // parse_expression返回后是处理完毕的最后一个token
        self.expect_next(
            lexer::SEMICOLON,
            "let语句最后必须以分号结尾,比如let age = 4;",
        )?;

        let let_statement = LetStatement::new(cur_token, identifier, expr);
        Ok(Box::new(let_statement))
    }

    /// 解析return语句
    pub fn parse_return_statement(&mut self) -> Result<Box<dyn Statement>> {
        let cur_token = self.current_token().clone();
        self.current_token_pos_add(1);
        let expr = self.parse_expression(LOWEST)?;
        self.expect_next(
            lexer::SEMICOLON,
            "return语句的最后必须以分号结尾, 比如return 10；",
        )?;
        let return_statement = ReturnStatement::new(cur_token, expr);
        Ok(Box::new(return_statement))
    }

    /// 解析表达式语句 1 + 2 + 3;
    pub fn parse_expression_statement(&mut self) -> Result<Box<dyn Statement>> {
        let cur_token = self.current_token().clone();
        let expression = self.parse_expression(LOWEST)?;
        if self.current_token().r#type() != lexer::BRACE_RIGHT {
            self.expect_next(lexer::SEMICOLON, "表达式语句丢失分号")?;
        }

        let stmt = ExpressionStatement::new(cur_token, expression);
        Ok(Box::new(stmt))
    }

    // 解析表达式
    // 当前是token就是数  priority就是上个符号的优先级
    // 表达式可能会返回一个空的表达式，比如return;
    pub fn parse_expression(&mut self, priority: i32) -> Result<Box<dyn Expression>> {
        let prefix = self.prefix_parse_fns.get(&self.current_token().r#type());
        if prefix.is_none() {
            return Err(Error::from(format!(
                "当前的token:{} 找不到对应的解析函数",
                self.current_token().literal_ref()
            )));
        }

        let prefix = prefix.unwrap();
        let left_exp = (*prefix)(self)?;
        if !self.peek_token_is(lexer::SEMICOLON) && priority < self.peek_priority() {
            let infix = self.infix_parse_fns.get(&self.peek_token().r#type());
            if let Some(infix) = infix {
                let infix = *infix;
                // 加1之后进入infix函数之后，当前的token就是操作符了.
                self.current_token_pos_add(1);
                return infix(self, left_exp);
            }
        }
        Ok(left_exp)
    }

    // 构建前置表达式表
    fn generate_parse_fn_table(
    ) -> HashMap<i32, fn(&mut CompilerParser) -> Result<Box<dyn Expression>>> {
        let mut hash: HashMap<
            i32,
            fn(&mut CompilerParser) -> Result<Box<dyn Expression + 'static>>,
        > = HashMap::new();

        let _ = hash.insert(lexer::IDENTIFIER, Self::parse_identifier);
        let _ = hash.insert(lexer::NULL, Self::parse_null);
        let _ = hash.insert(lexer::INTEGER, Self::parse_integer_literal);
        let _ = hash.insert(lexer::BAND, Self::parse_prefix_expression);
        let _ = hash.insert(lexer::MINUS, Self::parse_prefix_expression);
        let _ = hash.insert(lexer::TRUE, Self::parse_boolean);
        let _ = hash.insert(lexer::FALSE, Self::parse_boolean);
        let _ = hash.insert(lexer::PARENTHESIS_LEFT, Self::parse_grouped_expression);
        let _ = hash.insert(lexer::IF, Self::parse_if_expression);
        let _ = hash.insert(lexer::FUNCTION, Self::parse_function_literal);
        let _ = hash.insert(lexer::STRING, Self::parse_string_literal);
        let _ = hash.insert(lexer::SQUARE_BRACKET_LEFT, Self::parse_array_literal);
        let _ = hash.insert(lexer::BRACE_LEFT, Self::parse_hash_literal);
        hash
    }

    // 构建优先级表
    fn generate_priority_table() -> HashMap<i32, i32> {
        let mut hash = HashMap::<i32, i32>::new();
        let _ = hash.insert(lexer::EQUAL_SIGN, EQUAL_SIGN);
        let _ = hash.insert(lexer::EQ, EQUALS);
        let _ = hash.insert(lexer::NOT_EQ, EQUALS);
        let _ = hash.insert(lexer::LT, LESSGREATER);
        let _ = hash.insert(lexer::GT, LESSGREATER);
        let _ = hash.insert(lexer::PLUS, SUM);
        let _ = hash.insert(lexer::MINUS, SUM);
        let _ = hash.insert(lexer::SLASH, PRODUCT);
        let _ = hash.insert(lexer::ASTERISK, PRODUCT);
        let _ = hash.insert(lexer::PARENTHESIS_LEFT, CALL);
        let _ = hash.insert(lexer::SQUARE_BRACKET_LEFT, INDEX);
        hash
    }

    // 注册中序map
    fn generate_infix_map(
    ) -> HashMap<i32, fn(&mut CompilerParser, Box<dyn Expression>) -> Result<Box<dyn Expression>>>
    {
        let mut hash: HashMap<
            i32,
            fn(&mut CompilerParser, Box<dyn Expression>) -> Result<Box<dyn Expression>>,
        > = HashMap::new();
        let _ = hash.insert(lexer::PLUS, Self::parse_infix_expression);
        let _ = hash.insert(lexer::MINUS, Self::parse_infix_expression);
        let _ = hash.insert(lexer::ASTERISK, Self::parse_infix_expression);
        let _ = hash.insert(lexer::SLASH, Self::parse_infix_expression);
        let _ = hash.insert(lexer::EQ, Self::parse_infix_expression);
        let _ = hash.insert(lexer::EQUAL_SIGN, Self::parse_equal_sign);
        let _ = hash.insert(lexer::NOT_EQ, Self::parse_infix_expression);
        let _ = hash.insert(lexer::LT, Self::parse_infix_expression);
        let _ = hash.insert(lexer::GT, Self::parse_infix_expression);
        let _ = hash.insert(lexer::PARENTHESIS_LEFT, Self::parse_call_expression);
        let _ = hash.insert(lexer::SQUARE_BRACKET_LEFT, Self::parse_index_expression);
        hash
    }

    // 表达式函数表函数  标识符表达式
    fn parse_identifier(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        Ok(Box::new(Self::create_identifier(caller.current_token())))
    }

    // 解析null
    fn parse_null(_caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        Ok(Box::new(Null::new()))
    }

    // 整型表达式
    fn parse_integer_literal(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        let cur_token = caller.current_token();
        let value = cur_token.literal_ref().parse::<f64>()?;
        Ok(Box::new(Integer::new(cur_token.clone(), value)))
    }

    // 解析前置表达式
    fn parse_prefix_expression(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        let cur_token = caller.current_token().clone();
        let operator = cur_token.literal_ref().to_string();
        caller.current_token_pos_add(1);
        let express = caller.parse_expression(PREFIX)?;
        let prefix_express = PrefixExpression::new(cur_token, operator, express);

        Ok(Box::new(prefix_express))
    }

    // 解析中序表达式
    // 当前的token就是运算符
    fn parse_infix_expression(
        caller: &mut CompilerParser,
        left: Box<dyn Expression>,
    ) -> Result<Box<dyn Expression>> {
        let cur_token = caller.current_token().clone();
        let operator = cur_token.literal_ref().to_string();
        let cur_pri = caller.current_priority();
        caller.current_token_pos_add(1);
        let right = caller.parse_expression(cur_pri)?;
        let infix = Box::new(InfixExpression::new(cur_token, left, operator, right));
        // 如果后面还有那么，直接把当前中序表达式作为左分支
        if !caller.peek_token_is(lexer::SEMICOLON)
            && caller.current_priority() < caller.peek_priority()
        {
            caller.current_token_pos_add(1);
            Self::parse_infix_expression(caller, infix)
        } else {
            Ok(infix)
        }
    }

    // 解析赋值语句 注意这里是一个语句
    fn parse_equal_sign(
        caller: &mut CompilerParser,
        left: Box<dyn Expression>,
    ) -> Result<Box<dyn Expression>> {
        //left是一个标识符
        let left_ident = node::cast_down_type::<Identifier>(left.cast_to_node());
        caller.current_token_pos_add(1);
        let right = Self::parse_expression(caller, LOWEST)?;
        Ok(Box::new(EqualSign::new(left_ident.clone(), right)))
    }

    // 解析bool表达式
    fn parse_boolean(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        let cur_token = caller.current_token().clone();
        let value = cur_token.r#type() == lexer::TRUE;
        let boolean = Boolean::new(cur_token, value);
        Ok(Box::new(boolean))
    }

    // 解析组表达式，例如括号
    fn parse_grouped_expression(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        caller.current_token_pos_add(1);
        let exp = caller.parse_expression(LOWEST)?;
        caller.expect_next(lexer::PARENTHESIS_RIGHT, "组表达式丢失右侧括号")?;
        Ok(exp)
    }

    // 解析if表达式
    fn parse_if_expression(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        let cur_token = caller.current_token().clone();
        caller.expect_next(
            lexer::PARENTHESIS_LEFT,
            "if表达式中的条件表达式丢失左侧括号",
        )?;
        caller.current_token_pos_add(1);
        let condition = caller.parse_expression(LOWEST)?;
        caller.expect_next(
            lexer::PARENTHESIS_RIGHT,
            "if表达式中的条件表达式丢失右侧括号",
        )?;
        caller.expect_next(lexer::BRACE_LEFT, "if表达式语句块丢失左大括号")?;
        let consequence = caller.parse_block_statement()?;
        let mut alternative: Option<BlockStatement> = None;
        if caller.peek_token_is(lexer::ELSE) {
            caller.current_token_pos_add(1);
            caller.expect_next(lexer::BRACE_LEFT, "if表达式else丢失左侧大括号")?;
            alternative = Some(caller.parse_block_statement()?);
        }
        let if_expression = IfExpression::new(cur_token, condition, consequence, alternative);
        Ok(Box::new(if_expression))
    }

    // 解析函数表达式
    fn parse_function_literal(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        let cur_token = caller.current_token().clone();

        // fn后面是左小括号
        caller.expect_next(lexer::PARENTHESIS_LEFT, "函数参数丢失左侧小括号")?;
        let parameters = caller.parse_function_parameters()?;

        // 左侧的大括号
        caller.expect_next(lexer::BRACE_LEFT, "函数主题丢失左侧大括号")?;

        let body = caller.parse_block_statement()?;
        Ok(Box::new(FunctionLiteral::new(cur_token, parameters, body)))
    }

    // 解析字符串
    fn parse_string_literal(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        Ok(Box::new(StringLiteral::new(caller.current_token().clone())))
    }

    // 解析数组
    fn parse_array_literal(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        let token = caller.current_token().clone();
        let elements = caller.parse_expression_list(lexer::SQUARE_BRACKET_RIGHT)?;
        Ok(Box::new(ArrayLiteral::new(token, elements)))
    }

    // 解析hash类型
    fn parse_hash_literal(caller: &mut CompilerParser) -> Result<Box<dyn Expression>> {
        let token = caller.current_token().clone();
        let mut keys: Vec<Box<dyn Expression>> = vec![];
        let mut values: Vec<Box<dyn Expression>> = vec![];
        loop {
            if caller.peek_token_is(lexer::BRACE_RIGHT) {
                break;
            }
            caller.current_token_pos_add(1);
            let key = caller.parse_expression(LOWEST)?;
            caller.expect_next(lexer::COLON, "哈希类型关键值后面丢失冒号")?;
            caller.current_token_pos_add(1);
            let value = caller.parse_expression(LOWEST)?;
            keys.push(key);
            values.push(value);
            if !caller.peek_token_is(lexer::COMMA) && !caller.peek_token_is(lexer::BRACE_RIGHT) {
                return Err(Self::error(
                    &caller.current_token(),
                    "后面必须是一个逗号或者右大括号",
                ));
            }
            if caller.peek_token_is(lexer::COMMA) {
                caller.current_token_pos_add(1);
            }
        }
        caller.current_token_pos_add(1);
        Ok(Box::new(HashLiteral::new(token, keys, values)))
    }

    fn parse_expression_list(&mut self, end: i32) -> Result<Vec<Box<dyn Expression>>> {
        let mut list = vec![];
        if self.peek_token_is(end) {
            self.current_token_pos_add(1);
            return Ok(list);
        }
        self.current_token_pos_add(1);
        list.push(self.parse_expression(LOWEST)?);
        loop {
            if self.peek_token_is(lexer::COMMA) {
                self.current_token_pos_add(2);
                list.push(self.parse_expression(LOWEST)?);
            } else {
                break;
            }
        }
        if !self.peek_token_is(end) {
            return Err(Error::from("数组丢失右侧的中括号"));
        }
        self.current_token_pos_add(1);
        Ok(list)
    }

    // 解析大括号里面的语句组
    fn parse_block_statement(&mut self) -> Result<BlockStatement> {
        // {
        let cur_token = self.current_token().clone();
        let mut statements: Vec<Box<dyn Statement>> = vec![];
        self.current_token_pos_add(1);
        while !self.current_token_is(lexer::BRACE_RIGHT) {
            let smt = self.parse_statement()?;
            statements.push(smt);
            self.current_token_pos_add(1);
        }
        let block_statement = BlockStatement::new(cur_token, statements);
        Ok(block_statement)
    }

    // 解析函数的参数
    fn parse_function_parameters(&mut self) -> Result<Vec<Identifier>> {
        // 当前(
        let mut parameters: Vec<Identifier> = vec![];
        self.current_token_pos_add(1);
        let cur_token = self.current_token().clone();
        match cur_token.r#type() {
            lexer::PARENTHESIS_RIGHT => Ok(parameters),
            lexer::EOF => Err(Self::error(self.prev_token(), "函数参数丢失右侧括号")),
            lexer::IDENTIFIER => {
                parameters.push(Identifier::new(cur_token, "".to_string()));
                while self.peek_token_is(lexer::COMMA) {
                    self.current_token_pos_add(2);
                    let tok = self.current_token().clone();
                    if tok.r#type() != lexer::IDENTIFIER {
                        return Err(Self::error(self.prev_token(), "函数的参数必须是一个标识符"));
                    }
                    parameters.push(Identifier::new(tok, "".to_string()))
                }
                self.expect_next(lexer::PARENTHESIS_RIGHT, "函数参数丢失右侧小括号")?;
                Ok(parameters)
            }
            _ => Err(Self::error(self.prev_token(), "函数参数必须是标识符")),
        }
    }

    // 解析函数调用
    fn parse_call_expression(
        caller: &mut CompilerParser,
        func: Box<dyn Expression>,
    ) -> Result<Box<dyn Expression>> {
        // 当前的token是(
        let cur_token = caller.current_token().clone();
        let arguments = caller.parse_call_arguments()?;
        let func_obj = Box::new(CallExpression::new(cur_token, func, arguments));
        // 判断函数的后面是否有高优先级的
        if !caller.peek_token_is(lexer::SEMICOLON) && caller.peek_priority() > CALL {
            caller.current_token_pos_add(1);
            let infix = caller.infix_parse_fns.get(&caller.current_token().r#type());
            if let Some(infix) = infix {
                let infix = *infix;
                // 加1之后进入infix函数之后，当前的token就是操作符了.
                return infix(caller, func_obj);
            }
        }
        Ok(func_obj)
    }

    // 解析数组的索引
    fn parse_index_expression(
        caller: &mut CompilerParser,
        left: Box<dyn Expression>,
    ) -> Result<Box<dyn Expression>> {
        let cur_token = caller.current_token().clone();
        caller.current_token_pos_add(1);
        let index = caller.parse_expression(LOWEST)?;
        if !caller.peek_token_is(lexer::SQUARE_BRACKET_RIGHT) {
            return Err(Error::from("丢失右侧中括号"));
        }
        caller.current_token_pos_add(1);
        Ok(Box::new(IndexExpression::new(cur_token, left, index)))
    }

    // 用指定的tok创建标识符
    fn create_identifier(tok: &Token) -> Identifier {
        let value = tok.literal_ref().to_string();
        Identifier::new(tok.clone(), value)
    }

    // 解析函数调用的参数
    fn parse_call_arguments(&mut self) -> Result<Vec<Box<dyn Expression>>> {
        let mut args: Vec<Box<dyn Expression>> = vec![];
        if self.peek_token_is(lexer::PARENTHESIS_RIGHT) {
            self.current_token_pos_add(1);
            return Ok(args);
        }
        self.current_token_pos_add(1);
        let ret = self.parse_expression(LOWEST)?;
        args.push(ret);
        while self.peek_token_is(lexer::COMMA) {
            self.current_token_pos_add(2);
            args.push(self.parse_expression(LOWEST)?);
        }
        self.expect_next(lexer::PARENTHESIS_RIGHT, "函数调用丢失右侧小括号")?;
        Ok(args)
    }

    // 获取当前的位置的Token
    fn current_token(&self) -> &Token {
        self.lexer
            .get(self.token_pos)
            .expect("current_token is none")
    }

    // 获取当前位置的下一个位置的token
    pub fn peek_token(&self) -> &Token {
        self.lexer
            .get(self.token_pos + 1)
            .expect("peek_token is none")
    }

    fn peek_token_is(&self, ty: i32) -> bool {
        Self::token_is_type(self.peek_token(), ty)
    }

    // 获取当前的token并加上1
    pub fn current_token_pos_add(&mut self, n: usize) {
        self.token_pos += n;
    }

    pub fn prev_token(&self) -> &Token {
        self.lexer
            .get(self.token_pos - 1)
            .expect("prev_token is none")
    }

    // 判断指定的token是否是ty类型
    fn token_is_type(tok: &Token, ty: i32) -> bool {
        tok.r#type() == ty
    }

    fn current_token_is(&self, ty: i32) -> bool {
        Self::token_is_type(self.current_token(), ty)
    }

    // 生成指定的error
    fn error(tok: &Token, msg: &str) -> Error {
        Error::from(format!(
            "{} in line: {} token: {}",
            msg,
            tok.line_num(),
            tok.literal_ref()
        ))
    }

    // 期待next一个token是一个什么类型
    fn expect_next(&mut self, ty: i32, msg: &str) -> Result<()> {
        let current_token = self.current_token();
        let peek_token = self.peek_token();
        if !Self::token_is_type(peek_token, ty) {
            return Err(Self::error(current_token, msg));
        }
        self.token_pos += 1;
        Ok(())
    }

    fn peek_priority(&self) -> i32 {
        let peek_tok = self.peek_token();
        let peek_priority = self.priority_map.get(&peek_tok.r#type());
        if let Some(p) = peek_priority {
            return *p;
        }
        LOWEST
    }

    fn current_priority(&self) -> i32 {
        let current_tok = self.current_token();
        let current_priority = self.priority_map.get(&current_tok.r#type());
        if let Some(p) = current_priority {
            return *p;
        }
        LOWEST
    }
}

#[cfg(test)]
mod tests {
    use crate::lexer;
    use crate::parser::evaluator;
    use crate::parser::evaluator::Evaluator;
    use crate::parser::CompilerParser;

    #[test]
    fn test_parse() {
        let code = std::fs::read_to_string("./tmp.sky").unwrap();
        let mut parse = CompilerParser::new(
            lexer::LexerBuilder::new()
                .set_source_code(&code)
                .set_line_count(1)
                .build(),
        )
        .unwrap();
        let p = parse.parse_program().unwrap();
        //println!("p: {:#?}", p);
        let mut evaluator = evaluator::Evaluator::new();
        let result = evaluator.eval(p.as_node());
        if Evaluator::is_error(result.as_ref()) {
            println!("发生错误: {}", result.inspect());
        }
        //println!("result: {:?}", result.inspect());
        // for p in p.statements().iter() {
        //     let obj = evaluator::Evaluator::eval(p.cast_to_node());
        //     if let Err(e) = obj {
        //         println!("error: {}", e.to_string());
        //     } else {
        //         let obj = obj.unwrap();
        //         println!("{}", obj.inspect());
        //     }
        // }
    }
}
