use crate::lexer::{Lexer};
use crate::{parser, token};
use crate::token::{Token, TokenType};
use crate::ast::{BlockStatement, BoolLiteral, CallExpression, Expression, ExpressionStatement, FunctionLiteral, Identifier, IfExpression, InfixExpression, IntegerLiteral, LetStatement, PrefixExpression, Program, ReturnStatement, Statement};
use core::{fmt, panic};
use std::collections::HashMap;
use std::fmt::Debug;
use std::sync::Arc;
use tracing::{info, instrument,Level};
use tracing_subscriber;
// 优先级
#[derive(Clone, Copy, Debug)]
enum PRIORITY{
    LOWEST = 1,
	EQUALS = 2,     //==
	LESSGREATER = 3, //> or <
	SUM = 4,         //+ or -
	PRODUCT = 5,     //* or /
	PREFIX = 6,     //-X or !X
	CALL = 7,      
}

impl PartialEq for PRIORITY {
    fn eq(&self, other: &Self) -> bool {
        *self as u8 == *other as u8
    }
}

impl PartialOrd for PRIORITY {
    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
        Some((*self as u8).cmp(&(*other as u8)))
    }
}

#[derive( Clone)]
pub struct Parser{
    // l: &'a mut Lexer,
    l: Lexer,
    cur_token: Option<Token>,
    peek_token: Option<Token>,
    errors: Vec<String>,
    /**
        为了使语法分析器为当前词法单元类型正确调用prefixParseFn或infixParseFn，我们给Parser结构添加两个映射（map）
        有了这些映射，就可以检查相应的中缀映射或前缀映射是否具有与curToken.Type相关联的解析函数
	*/
    prefix_parse_fns: Option<HashMap<TokenType, Arc<dyn Fn(&mut Parser) -> Option<Arc<dyn Expression>> + Send + Sync >>>,
    infix_parse_fns: Option<HashMap<TokenType, Arc<dyn Fn(&mut Parser, Arc<dyn Expression>) -> Option<Arc<dyn Expression>> + Send + Sync  >>>,
    // infix_parse_fns: Option<HashMap<TokenType, Arc<fn(parser: &mut Parser, left: Box<dyn Expression>) -> Option<Box<dyn Expression>>>>,
}


impl fmt::Debug for Parser {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        f.debug_struct("Parser")
            .field("cur_token", &self.cur_token)
            .field("peek_token", &self.peek_token)
            .field("errors", &self.errors)
            .field("prefix_parse_fns", &"<function map>")
            .field("infix_parse_fns", &"<function map>")
            .finish()
    }
}

impl Parser{
    fn next_token(&mut self){
        self.cur_token = self.peek_token.clone();
        self.peek_token = Some(self.l.next_token()) ;
    }

    pub fn new(l: Lexer) -> Parser {
        // unimplemented!()
        let mut p = Parser{
            l,
            cur_token: None,
            peek_token: None,
            errors: Vec::new(),
            prefix_parse_fns: None,
            infix_parse_fns: None,
        };
        p.prefix_parse_fns = Some(HashMap::<TokenType, Arc<dyn Fn(&mut Parser) -> Option<Arc<dyn Expression>> + Send + Sync>>::new());
        // 注册标识符解析函数，与Go代码中的实现对应
        // TokenType::BANG代表!5中的!, TokenType::MINUS代表-5中的-
        // 注册前缀函数
        p.register_prefix(TokenType::IDENT, Arc::new(|parser| parser.parse_identifier()));
        p.register_prefix(TokenType::INT, Arc::new(|parser| parser.parse_integer_literal()));
        p.register_prefix(TokenType::BANG, Arc::new(|parser| parser.parse_prefix_expression()));
        p.register_prefix(TokenType::MINUS, Arc::new(|parser| parser.parse_prefix_expression()));
        p.register_prefix(TokenType::FALSE, Arc::new(|parser| parser.parse_bool_literal()));
        p.register_prefix(TokenType::TRUE, Arc::new(|parser| parser.parse_bool_literal()));
        p.register_prefix(TokenType::LPAREN, Arc::new(|parser| parser.parse_grouped_expression()));
        p.register_prefix(TokenType::IF, Arc::new(|parser| parser.parse_if_expression()));
        p.register_prefix(TokenType::FUNCTION, Arc::new(|parser| parser.parse_function_literal()));
        
        // 注册中缀函数
        p.infix_parse_fns = Some(HashMap::<TokenType, Arc<dyn Fn(&mut Parser, Arc<dyn Expression>) -> Option<Arc<dyn Expression>> + Send + Sync>>::new());
        p.register_infix(TokenType::PLUS, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::MINUS, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::ASTERISK, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::SLASH, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::EQ, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::NOT_EQ, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::GT, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::LT, Arc::new(|parser, left| parser.parse_infix_expression(left)));
        p.register_infix(TokenType::LPAREN, Arc::new(|parser, left| parser.parse_call_expression(left)));
        p.next_token();
        p.next_token();
        p
    }

    pub fn parse_program(&mut self)-> Program{
        let mut program = Program{
            statements: Vec::new(),
        };
        while !self.cur_token_is(&TokenType::EOF){

            if let Some(stmt) = self.parse_statement(){
                program.statements.push(stmt);
            }else{
                panic!("parser error: failed to parse statement");
            }
            self.next_token();
        }
        program
    }

    fn parse_statement(&mut self) -> Option<Arc<dyn Statement>>{
        match self.cur_token.as_ref().unwrap().token_type{
            TokenType::LET => {
                if let Some(l_stmt) = self.parse_let_statement(){
                    Some(Arc::new(l_stmt))
                }else{
                    None
                }
            },
            TokenType::RETURN => {
                if let Some(r_stmt) = self.parse_return_statement(){
                    Some(Arc::new(r_stmt))
                }else{
                    None
                }
            },
            // TokenType::IF =>{
            //     if let Some(if_stmt) = self.parse_if_statement(){
            //         Some(Box::new(if_stmt))
            //     }else{
            //         None
            //     }
            // }
            _ => {
                if let Some(ex_stmt) = self.parse_expression_statement(){
                    Some(Arc::new(ex_stmt))
                }else{
                    None
                }
            }
        }
        // unimplemented!()
    }

    fn cur_token_is(&self, token: &TokenType) -> bool{
        self.cur_token.as_ref().unwrap().token_type == *token
    }

    fn peek_token_is(&self, token: &TokenType) -> bool{
        self.peek_token.as_ref().unwrap().token_type == *token
    }

    // expectPeek是语法分析器的“断言函数”，其主要目的是通过检查下一个词法单元的类型，确保词法单元顺序的正确性。只有在类型正确的情况下，他才会调用nextToken前移测法单元。
    fn expect_peek(&mut self, token: &TokenType) -> bool{
        if self.peek_token_is(token){
            self.next_token();
            return true;
        }
        self.peek_error(&token);
        false
    }

    // 解析let语句
    fn parse_let_statement(&mut self) -> Option<LetStatement>{
        // println!("cccccc0-0-{}", self.cur_token.as_ref().unwrap().clone().token_type.to_string());
        let mut stmt = LetStatement{
            token: self.cur_token.as_ref().unwrap().clone(),
            name: None,
            value: None,
        };
        if !self.expect_peek(&TokenType::IDENT){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::IDENT, self.peek_token.as_ref().unwrap().token_type);
            // println!("--none111---");
            return None;
        }
        stmt.name = Some(Identifier{
            token: self.cur_token.as_ref().unwrap().clone(),
            value: self.cur_token.as_ref().unwrap().literal.clone(),
        });
        if !self.expect_peek(&TokenType::ASSIGN){
            // println!("--none222---");
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::ASSIGN, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }
        //TODO: 跳过对表达式的处理，直到遇见分号
        self.next_token();
        stmt.value = self.parse_expression(PRIORITY::LOWEST);
        if self.peek_token_is(&TokenType::SEMICOLON){
            self.next_token();
        }
        // while !self.cur_token_is(&TokenType::SEMICOLON){
        //     self.next_token();
        // }
        Some(stmt)
    }

    fn parse_return_statement(&mut self) -> Option<ReturnStatement>{
        let mut stmt = ReturnStatement{
            token: self.cur_token.as_ref().unwrap().clone(),
            value: Option::None,
        };
        // while !self.cur_token_is(&TokenType::SEMICOLON){
        //     self.next_token();
        // }
        self.next_token();
        stmt.value = self.parse_expression(PRIORITY::LOWEST);
        if self.peek_token_is(&TokenType::SEMICOLON){
            self.next_token();
        }
        // unimplemented!()
        Some(stmt)
        // unimplemented!()
    }

    // fn parse_if_statement(&mut self) -> Option<IfStatement>{
    //     let stmt = IfStatement{
    //         token: self.cur_token.as_ref().unwrap().clone(),
    //         value: Option::None,
    //     };
    //     while !self.cur_token_is(&TokenType::SEMICOLON){
    //         self.next_token();
    //     }
    //     // unimplemented!()
    //     Some(stmt)
    //     // unimplemented!()
    // }

    pub fn get_errors(&self) -> Vec<String>{
        self.errors.clone()
    }

    fn peek_error(&mut self, t: &TokenType){
        self.errors.push(format!("Expected next token to be {}, got {} instead", t.to_string(), self.peek_token.as_ref().unwrap().token_type.to_string()))
    }

    fn register_prefix(&mut self, token_type: TokenType, func: Arc<dyn Fn(&mut Parser) -> Option<Arc<dyn Expression>> + Send + Sync>){
        match &mut self.prefix_parse_fns{
            Some(ppf)=>{
                ppf.insert(token_type, func);
            },
            _=>{}
        }
        // self.prefix_parse_fns.insert(toke_type, func);
    }
fn register_infix(&mut self, token_type: TokenType, func: Arc<dyn Fn(&mut Parser, Arc<dyn Expression>) -> Option<Arc<dyn Expression>> + Send + Sync>){
        match &mut self.infix_parse_fns{
            Some(ppf)=>{
                ppf.insert(token_type, func);
            },
            _=>{}
        }
        // self.prefix_parse_fns.insert(toke_type, func);
    }
    
    //小型辅助方法no_prefix_parse_fn_error只是将格式化的错误信息添加到语法分析器的errors字段中
    //但这足以在失败的测试中获得更具体的错误消息
    fn no_prefix_parse_fn_error(&mut self, t: TokenType){
        // let msg = "no prefix parse function for {} found";
        eprintln!("no prefix parse function for {} found", t.to_string());
        self.errors.push(format!("no prefix parse function for {} found", t.to_string()));
    }

    /**
     * 第一版的parse_expression
     */
    // fn parse_expression(&mut self, precedence: PRIORITY) -> Option<Arc<dyn Expression>>{
    //     let prefix = self.prefix_parse_fns.as_mut();
    //     if let Some(pf) = prefix{
    //         let left_exp = pf.get(&self.cur_token.as_ref().unwrap().token_type);
    //         if let Some(le) = left_exp{
    //             le.clone()(self)
    //         }else{
    //             self.no_prefix_parse_fn_error(self.cur_token.as_ref().unwrap().token_type.clone());
    //             None
    //         }
    //     }else{
    //         None
    //     }
    // }

    /**
     * 普拉特语法分析器的核心
     * 第一版的parse_expression中没有用到precedence参数令人困惑。前面已经介绍了关于其用法的一些重要信息：优先级的值取决于调用者所了解的情况和上下文
     * parse_expression_statement是此处解析表达式的优先方法，他对运算符的优先级一无所知，仅使用了LOWEST优先级。
     * 不过parse_prefix_expression会把PREFIX优先级传递给parse_expression，因为这个函数负责解析前缀表达式。
     */
    /**
    * 调用parse_expression时，precedence的值表示当前parse_expression调用中的“右约束能力”。
    * 右约束能力越强，当前表达式(包含后续的词法单元)右边可以“约束”的词法单元、运算符、操作数就越多，
    * 也可以理解成能够“融合”的内容更多。
    */
    fn parse_expression(&mut self, precedence: PRIORITY) -> Option<Arc<dyn Expression>>{
        // let subscriber = tracing_subscriber::fmt()
        // // filter spans/events with level TRACE or higher.
        // .with_max_level(Level::TRACE)
        // // build but do not install the subscriber.
        // .finish();
        
        let prefix = self.prefix_parse_fns.as_mut();
        if let Some(pf) = prefix{
            let left_exp = pf.get(&self.cur_token.as_ref().unwrap().token_type);
            if let Some(le) = left_exp{
                //*le 为函数指针
                 let mut lf_exp = le.clone()(self);
                 while !self.peek_token_is(&TokenType::SEMICOLON) && precedence < self.peek_precedence(){
                        let inf_closure = {
                            let infix = self.infix_parse_fns.as_mut();
                            if let Some(ifx) = infix {
                                let ix = ifx.get(&self.peek_token.as_ref().unwrap().token_type);
                                if let Some(inf) = ix {
                                    inf.clone()
                                } else {
                                    return lf_exp;
                                }
                            } else {
                                return lf_exp;
                            }
                        };
                        // cloned_parser.next_token();
                        self.next_token();
                        // 执行parser.parse_infix_expression(left)函数
                        lf_exp = inf_closure(&mut *self, lf_exp.as_ref().unwrap().clone());
                 }
                //  println!("lf_exp.as_ref().unwrap().clone()={:?}",lf_exp.as_ref().unwrap().clone());
                // tracing::subscriber::with_default(subscriber, || {
                //     info!("parse_expression");
                // });
                 lf_exp
                // unimplemented!()
            }else{
                self.no_prefix_parse_fn_error(self.cur_token.as_ref().unwrap().token_type.clone());
                None
            }
        }else{
            None
        }
    }

    fn parse_identifier(&mut self) -> Option<Arc<dyn Expression>> {
        Some(Arc::new(Identifier {
            token: self.cur_token.as_ref().unwrap().clone(),
            value: self.cur_token.as_ref().unwrap().literal.clone(),
        }))
    }

    fn parse_expression_statement(&mut self) ->Option<ExpressionStatement>{
        // let subscriber = tracing_subscriber::fmt()
        // // filter spans/events with level TRACE or higher.
        // .with_max_level(Level::TRACE)
        // // build but do not install the subscriber.
        // .finish();
        // info!("This will _not_ be logged to stdout");
        // info!("parse_expression_statement");
        let stmt = ExpressionStatement{
            token: self.cur_token.as_ref().unwrap().clone(),
            expression: self.parse_expression(PRIORITY::LOWEST),
        };
        // tracing::subscriber::with_default(subscriber, || {
        //     info!("parse_expression_statement:{}", stmt.to_string());
        // });
        while self.peek_token_is(&TokenType::SEMICOLON) {
            self.next_token();
        }
        Some(stmt)
    }

    fn parse_integer_literal(&mut self) -> Option<Arc<dyn Expression>>{
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_integer_literal:{}", self.cur_token.as_ref().unwrap().literal);
        });
        if let Ok(value) = self.cur_token.as_ref().unwrap().clone().literal.parse::<i64>() {
            Some(Arc::new(IntegerLiteral{
            token: self.cur_token.as_ref().unwrap().clone(),
            value: value,
        }))
        }else{
            self.errors.push("could not parse ".to_string());
            self.errors.push(self.cur_token.as_ref().unwrap().clone().literal);
            self.errors.push(" as integer".to_string());
            None
            // panic!("could not parse value as integer");
        }
        // unimplemented!()
    }

    fn parse_bool_literal(&self)->Option<Arc<dyn Expression>>{
        Some(Arc::new(BoolLiteral{
            token: self.cur_token.as_ref().unwrap().clone(),
            value:self.cur_token_is(&TokenType::TRUE)
        }))
    }

    fn parse_prefix_expression(&mut self) -> Option<Arc<dyn Expression>>{ 
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_prefix_expression");
        });
        let mut expression = PrefixExpression{
            token: self.cur_token.as_ref().unwrap().clone(),
            operator: self.cur_token.as_ref().unwrap().literal.clone(),
            right: None,
        };
        self.next_token();
        expression.right = self.parse_expression(PRIORITY::PREFIX);
        Some(Arc::new(expression))
    }

    /**
     * 复制一份self.cur_token初始化expression，把指针移到右边节点，并把该节点赋值给复制出来的expression
     */
    fn parse_infix_expression(&mut self,  left: Arc<dyn Expression>)->Option<Arc<dyn Expression>>{
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        
        let mut expression = InfixExpression{
            token: self.cur_token.as_ref().unwrap().clone(),
            left: Some(left),
            operator: self.cur_token.as_ref().unwrap().literal.clone(),
            right: None,
        };
        let precedence = self.cur_precedence();
        self.next_token();
        expression.right = self.parse_expression(precedence);
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_infix_expression:{:?}", expression);
        });
        Some(Arc::new(expression))
    }

    fn parse_grouped_expression(&mut self) -> Option<Arc<dyn Expression>>{
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        self.next_token();
        let exp = self.parse_expression(PRIORITY::LOWEST);
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_grouped_expression:{}", exp.as_ref().unwrap().to_string());
        });
        
        if !self.expect_peek(&TokenType::RPAREN){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::RPAREN, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }
        exp
        // unimplemented!()
    }

    fn parse_if_expression(&mut self) -> Option<Arc<dyn Expression>>{
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        let mut expression = IfExpression{
            token: self.cur_token.as_ref().unwrap().clone(),
            condition: None,
            consequence: None,
            alternative: None,
        };
        if !self.expect_peek(&TokenType::LPAREN){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::LPAREN, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }
        self.next_token();
        expression.condition = self.parse_expression(PRIORITY::LOWEST);
        if !self.expect_peek(&TokenType::RPAREN){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::RPAREN, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }
        if !self.expect_peek(&TokenType::LBRACE){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::LBRACE, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }
        expression.consequence = self.parse_block_statement();
        /*
         * 这里用peek_token_is加上self.next_token()而不用expect_peek，区别在于前者没有else不会报异常，
         * 因为允许有if而else为空的情况，而后者必须有else，否则直接报错;
        */
        if self.peek_token_is(&TokenType::ELSE){
            self.next_token();
        // if self.expect_peek(&TokenType::ELSE){
            if !self.expect_peek(&TokenType::LBRACE){
                eprintln!("expected next token to be {:?},got {:?} instead",TokenType::LBRACE, self.peek_token.as_ref().unwrap().token_type);
                return None;
            }
            expression.alternative = self.parse_block_statement();
        }
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_grouped_expression:{:?}", expression);
        });
        Some(Arc::new(expression))
    }

    fn parse_block_statement(&mut self) -> Option<BlockStatement>{
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        let mut block = BlockStatement{
            token: self.cur_token.as_ref().unwrap().clone(),
            statements: vec![],
        };
        self.next_token();
        while !self.cur_token_is(&TokenType::RBRACE) && !self.cur_token_is(&TokenType::EOF){
            let stmt = self.parse_statement().unwrap();
            block.statements.push(stmt.into());
            self.next_token();
        }
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_grouped_expression:{:?}", block);
        });
        Some(block)
    }
    //peek_precedence方法根据self.peek_token中的词法单元类型，返回所关联的优先级
    fn peek_precedence(&self) ->PRIORITY{
        get_precedences(self.peek_token.as_ref().unwrap().token_type.clone())
    }

    fn cur_precedence(&self) ->PRIORITY{
        get_precedences(self.cur_token.as_ref().unwrap().token_type.clone())
    }

    fn parse_function_parameters(&mut self)->Option<Vec<Identifier>>{
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        let mut identifiers = Vec::<Identifier>::new();
        if self.peek_token_is(&TokenType::RPAREN){
            self.next_token();
            return Some(identifiers);
        }
        self.next_token();
        let mut ident = Identifier{
            token: self.cur_token.as_ref().unwrap().clone(),
            value: self.cur_token.as_ref().unwrap().literal.clone(),
        };
        identifiers.push(ident);
        while self.peek_token_is(&TokenType::COMMA){
            self.next_token();
            self.next_token();
            ident = Identifier{
                token: self.cur_token.as_ref().unwrap().clone(),
                value: self.cur_token.as_ref().unwrap().literal.clone(),
            };
            identifiers.push(ident);
        }
        if !self.expect_peek(&TokenType::RPAREN){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::RPAREN, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_function_parameters:{:?}", identifiers);
        });
        return Some(identifiers);
    }

    fn parse_function_literal(&mut self) ->Option<Arc<dyn Expression>>{
        let subscriber = tracing_subscriber::fmt()
        // filter spans/events with level TRACE or higher.
        .with_max_level(Level::TRACE)
        // build but do not install the subscriber.
        .finish();
        let mut lit = FunctionLiteral{
            token: self.cur_token.as_ref().unwrap().clone(),
            params: None,
            body: None,
        };
        if !self.expect_peek(&TokenType::LPAREN){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::LPAREN, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }

        lit.params = self.parse_function_parameters();
        if !self.expect_peek(&TokenType::LBRACE){
            eprintln!("expected next token to be {:?},got {:?} instead",TokenType::LBRACE, self.peek_token.as_ref().unwrap().token_type);
            return None;
        }
        lit.body = self.parse_block_statement();
        tracing::subscriber::with_default(subscriber, || {
            info!("parse_function_literal:{:?}", lit);
        });
        Some(Arc::new(lit))
        // unimplemented!()
    }

    fn parse_call_arguments(&mut self) -> Vec<Arc<dyn Expression>>{
        let mut args = Vec::<Arc<dyn Expression>>::new();
        if self.peek_token_is(&TokenType::RPAREN) {
            self.next_token();
            return args;
        } 
        self.next_token();
        args.push(self.parse_expression(PRIORITY::LOWEST).unwrap());
        while self.peek_token_is(&TokenType::COMMA){
            self.next_token();
            self.next_token();
            args.push(self.parse_expression(PRIORITY::LOWEST).unwrap());
        }
        if !self.expect_peek(&TokenType::RPAREN){
            return Vec::<Arc<dyn Expression>>::new();
        }
        args
    }
    //注册为中缀表达式，所以要有参数function来表示左侧表达式
    fn parse_call_expression(&mut self, function: Arc<dyn Expression>)->Option<Arc<dyn Expression>>{
        let mut exp = CallExpression{
            token: self.cur_token.as_ref().unwrap().clone(), 
            function: function,
            arguments: Vec::<Arc<dyn Expression>>::new(),
        };
        exp.arguments = self.parse_call_arguments();
        Some(Arc::new(exp))
    }
}

//获取优先级表
fn get_precedences(token_type: TokenType) -> PRIORITY {
    match token_type{
        TokenType::EQ | TokenType::NOT_EQ => PRIORITY::EQUALS,
        TokenType::LT | TokenType::GT => PRIORITY::LESSGREATER,
        TokenType::PLUS | TokenType::MINUS => PRIORITY::SUM,
        TokenType::SLASH | TokenType::ASTERISK => PRIORITY::PRODUCT,
        TokenType::LPAREN => PRIORITY::CALL,
        _ => PRIORITY::LOWEST,
    }
}