use crate::{
    lexer::{self, BraceType, OpType},
    parser::{
        dfa::{ASTLink, DFA, DFAError, NonTerm},
        pd::{BracePd, EofPd, IdentiferPd, KeywordPd, NonTermPd, OpPd, PD, SemicolonPd, UIntLPd},
        production::{get_production},
    },
};
use std::{
    collections::{HashMap, VecDeque},
};
pub mod dfa;
pub mod pd;
pub mod production;

type Token = lexer::Token;
type LexOutput = lexer::LexerOutputItem;

#[derive(Debug)]
pub struct ParserErrOutput {
    err: ParserError,
    range: Option<(usize, usize)>, // Lexer 输出 Vec 的下标范围，闭区间
}

#[derive(Debug, Clone, PartialEq)]
pub enum ParserError {
    CannotTurnToDigit,
    NumOverflow(u64),
    NotANumber,
    NoInputTokens,
    ParseOver,
    ASTNotConsistent,
    ASTUnknownToken,
    ASTNoProducer,
    ASTNoPD,
    ASTNoDC,
    ProducerKeyConflict(u32),
    DFAErr(DFAError),
}

#[derive(Debug)]
pub struct Parser {
    token_size: usize,
    deque: VecDeque<LexOutput>,
    dfa: DFA,
    over: bool,
}

impl Parser {
    pub fn new(tokens: &Vec<LexOutput>) -> Self {
        Self {
            token_size: tokens.len(),
            deque: VecDeque::from(tokens.clone()),
            dfa: DFA::new(),
            over: false,
        }
    }

    pub fn redirect(&mut self, tokens: &Vec<LexOutput>) {
        self.token_size = tokens.len();
        self.deque = VecDeque::from(tokens.clone());
        self.dfa = DFA::new();
        self.over = true;
    }

    pub fn is_over(&self) -> bool {
        self.over
    }

    /**
     * 消耗当前 Parser 暂存的全部 tokens，构建一个 AST，这个 AST 带着分析过的文件信息由返回参数接管
     * DFA 负责构建树的形态，Parser 负责提供更加详细的分析数据
     * 在 Accept/Error 之后，这个 Parser 必须重置并指向 redirect 另一个 Vec<LexOutput> 才能重新工作
     */
    pub fn work(&mut self) -> Result<ASTLink, ParserErrOutput> {
        if self.is_over() {
            return Err(ParserErrOutput {
                err: ParserError::ParseOver,
                range: None,
            });
        }

        loop {
            match self.look_token() {
                Ok(ac) => {
                    if ac {
                        break;
                    }
                }
                Err(e) => {
                    return Err(e);
                }
            }
        }

        match self.dfa.ast_clone() {
            Ok(mut ast) => {
                parse_ast(&mut ast, None)?;

                Ok(ast)
            }
            Err(e) => Err(ParserErrOutput {
                err: ParserError::DFAErr(e),
                range: None,
            }),
        }
    }

    /**
     * 看一个 token，找 Action 表，尝试 Reduce，尝试 Goto
     * @return 是否 Accepted
     */
    pub fn look_token(&mut self) -> Result<bool, ParserErrOutput> {
        let lo = self.deque.front().ok_or(ParserErrOutput {
            err: ParserError::NoInputTokens,
            range: None,
        })?;

        //println!("Parser is looking token {:?}", lo.token);
        let idx = self.token_size - self.deque.len();
        let ret = self.dfa.step(&lo.token, idx);
        //println!("DFA.step returns {:?}", ret);

        match ret {
            Ok(act) => {
                match act.action {
                    dfa::Action::Shift(_) => {
                        //println!("Consume this token.");
                        self.deque.pop_front().unwrap();
                        Ok(false)
                    }
                    dfa::Action::Reduce(_) => {
                        //println!("Dont consume this token.");
                        Ok(false)
                    }
                    dfa::Action::Accept => {
                        self.over = true;
                        //println!("Accept and finish.");
                        Ok(true)
                    }
                }
            }
            Err(e) => {
                self.over = true;

                Err(ParserErrOutput {
                    err: ParserError::DFAErr(e),
                    range: Some((idx, idx)),
                })
            }
        }
    }
}

pub fn parse_ast(x: &mut ASTLink, parent: Option<&ASTLink>) -> Result<(), ParserErrOutput> {
    let (mn, mx) = parse_token_range(x)?;

    for y in &mut x.adj {
        parse_ast(y, None)?;
    }

    let ret = match (x.prod_id, &x.sym) {
        (None, dfa::Symbol::Term(token)) => parse_directly(token).or_else(|e| {
            Err(ParserErrOutput {
                err: e,
                range: Some((mn, mx)),
            })
        }),
        (Some(p), dfa::Symbol::NonTerm(non_term)) => parse_by_producer(p, x, &non_term, parent)
            .or_else(|e| {
                Err(ParserErrOutput {
                    err: e,
                    range: Some((mn, mx)),
                })
            }),
        _ => Err(ParserErrOutput {
            err: ParserError::ASTNotConsistent,
            range: Some((mn, mx)),
        }),
    }?;

    // Parser 检查 OK 并把 PD 加入 AST
    x.parser_ok = true;
    x.pd = Some(ret);

    Ok(())
}

pub fn parse_by_producer(
    p: u32,
    x: &ASTLink,
    non_term: &NonTerm,
    parent: Option<&ASTLink>,
) -> Result<PD, ParserError> {
    let prod = get_production(&p).or(Err(ParserError::ASTNoProducer))?;
    if (prod.n_rhs != x.adj.len() as u32) || (prod.ret_sym != *non_term) {
        return Err(ParserError::ASTNotConsistent);
    }

    let mut hm: HashMap<String, usize> = HashMap::new();
    let c = prod.symbol.clone().iter().map(|x| x.to_string()).collect();

    let mut i = 0;
    for x in &prod.symbol {
        if hm.insert(x.to_string(), i).is_some() {
            return Err(ParserError::ProducerKeyConflict(p));
        };
        i += 1;
    }

    Ok(PD::NonTerm(NonTermPd {
        components: c,
        prop: hm,
    }))
}

pub fn parse_directly(token: &Token) -> Result<PD, ParserError> {
    match token {
        lexer::Token::Keyword(kw) => Ok(PD::Keyword(KeywordPd {
            symbol: "Keyword".to_string(),
            name: kw.to_string(),
        })),
        lexer::Token::UIntLiteral(st) => {
            let (u, warn) = parse_to_u64_d(&st)?;

            Ok(PD::UIntL(UIntLPd {
                symbol: "UIntLiteral".to_string(),
                data_type: "unsigned".to_string(),
                src: st.to_string(),
                val: u,
                warn: warn,
            }))
        }
        lexer::Token::Brace(brace_type) => Ok(PD::Brace(BracePd {
            symbol: parse_brace_to_char(brace_type),
        })),
        lexer::Token::Semicolon => Ok(PD::Semicolon(SemicolonPd { symbol: ';' })),
        lexer::Token::Identifier(id) => Ok(PD::Identifer(IdentiferPd {
            symbol: "Identifer".to_string(),
            name: id.to_string(),
        })),
        lexer::Token::Eof => Ok(PD::Eof(EofPd {
            symbol: "EOF".to_string(),
        })),
        lexer::Token::Operation(op) => Ok(PD::Operation(OpPd {
            symbol: parse_operation_to_char(op),
            n_operand: 0,   //后面推断
        })),
        lexer::Token::Unknown(_) => Err(ParserError::ASTUnknownToken),
    }
}

pub fn parse_token_range(x: &ASTLink) -> Result<(usize, usize), ParserErrOutput> {
    match &x.sym {
        dfa::Symbol::Term(_) => Ok((x.index, x.index)),
        dfa::Symbol::NonTerm(_) => {
            if x.adj.is_empty() {
                return Err(ParserErrOutput {
                    err: ParserError::ASTNotConsistent,
                    range: Some((x.index, x.index)),
                });
            }

            Ok((x.adj.first().unwrap().index, x.adj.last().unwrap().index))
        }
    }
}

pub fn parse_to_u64_d(st: &str) -> Result<(u64, String), ParserError> {
    let u: u64;
    let warn = match parse_to_u64(&st) {
        Ok(ui) => {
            u = ui;
            Ok(String::new())
        }
        Err(e) => match e {
            ParserError::NumOverflow(ui) => {
                u = ui;
                Ok(format!("Number {} turns to {} because overflow.", st, u))
            }
            _ => {
                u = 0;
                Err(e)
            }
        },
    }?;

    Ok((u, warn))
}

pub fn parse_to_u64(s: &str) -> Result<u64, ParserError> {
    let chars: Vec<char> = s.chars().collect();
    let (base, start) = match chars.get(0..2) {
        Some(['0', 'x' | 'X']) => (16, 2), // 十六进制
        Some(['0', 'b' | 'B']) => (2, 2),  // 二进制
        Some(['0', _]) => (8, 1),          // 八进制
        _ => (10, 0),                      // 十进制
    };

    if start >= chars.len() {
        return Err(ParserError::NotANumber);
    }

    let mut of = false;
    let num = chars[start..].iter().try_fold(0u64, |acc, x| {
        let digit: u64 = x
            .to_digit(base)
            .ok_or(ParserError::CannotTurnToDigit)?
            .into();

        let (multiplied, mul_overflow) = acc.overflowing_mul(base.into());
        let (sum, add_overflow) = multiplied.overflowing_add(digit);

        of |= mul_overflow || add_overflow;

        Ok(sum)
    })?;

    if of {
        Err(ParserError::NumOverflow(num))
    } else {
        Ok(num)
    }
}

pub fn parse_brace_to_char(bt: &BraceType) -> char {
    match bt {
        BraceType::OpenCurly => '{',
        BraceType::CloseCurly => '}',
        BraceType::OpenSquare => '[',
        BraceType::CloseSquare => ']',
        BraceType::OpenRound => '(',
        BraceType::CloseRound => ')',
    }
}

pub fn parse_operation_to_char(op: &OpType) -> char {
    match op {
        OpType::Negation => '-',
        OpType::BitwiseComplement => '~',
        OpType::LogicalNegation => '!',
        OpType::Add => '+',
        OpType::Mul => '-',
        OpType::Div => '/',
    }
}
