use pest::iterators::Pairs;
use pest_derive::Parser;
use pest::Parser;
use std::fs;
use std::env;


#[derive(Parser)]
#[grammar = "ident.pest"]
pub struct LexerParser;

#[derive(Debug, PartialEq)]
pub enum Token {
    Const,
    Int,
    Void,
    If,
    Else,
    While,
    Break,
    Continue,
    Return,
    Plus,
    Minus,
    Mul,
    Div,
    Mod,
    Assign,
    Eq,
    Neq,
    Lt,
    Gt,
    Le,
    Ge,
    Not,
    And,
    Or,
    LParen,
    RParen,
    LBrace,
    RBrace,
    LBrackt,
    RBrackt,
    Comma,
    Semicolon,
    Ident(String),
    IntegerConst(i64),
}

pub fn tokenize(input: &str) -> Vec<(Token, usize)> {
    let pairs = match LexerParser::parse(Rule::program, input) {
        Ok(p) => p,
        Err(e) => {
            eprintln!("Parse error: {}", e);
            return Vec::new();
        }
    };
    let mut tokens = Vec::new();
    if check_have_unknown(pairs.clone()){
        return Vec::new();
    }
    for pair in pairs {
        //eprintln!("Found token: {:?}", pair);
        collect_tokens(pair, &mut tokens);
    }
    tokens
}

fn check_have_unknown(pairs: pest::iterators::Pairs<'_, Rule>) -> bool {
    let mut have_unknow = false;
    for pair in pairs {
        let span = pair.as_span();
        let line = span.start_pos().line_col().0;
        if pair.as_rule() == Rule::UNKNOWN {
            eprintln!("Error type A at Line {}: Mysterious character \"{}\".", line, span.as_str());
            have_unknow = true;
        }
        if check_have_unknown(pair.into_inner()) {
            have_unknow = true;
        }
    }
    have_unknow
}

fn collect_tokens(pair: pest::iterators::Pair<Rule>, tokens: &mut Vec<(Token, usize)>) {
    use Token::*;
    let span = pair.as_span();
    let line = span.start_pos().line_col().0;
    match pair.as_rule() {
        Rule::CONST => tokens.push((Const, line)),
        Rule::INT => tokens.push((Int, line)),
        Rule::VOID => tokens.push((Void, line)),
        Rule::IF => tokens.push((If, line)),
        Rule::ELSE => tokens.push((Else, line)),
        Rule::WHILE => tokens.push((While, line)),
        Rule::BREAK => tokens.push((Break, line)),
        Rule::CONTINUE => tokens.push((Continue, line)),
        Rule::RETURN => tokens.push((Return, line)),
        Rule::PLUS => tokens.push((Plus, line)),
        Rule::MINUS => tokens.push((Minus, line)),
        Rule::MUL => tokens.push((Mul, line)),
        Rule::DIV => tokens.push((Div, line)),
        Rule::MOD => tokens.push((Mod, line)),
        Rule::ASSIGN => tokens.push((Assign, line)),
        Rule::EQ => tokens.push((Eq, line)),
        Rule::NEQ => tokens.push((Neq, line)),
        Rule::LT => tokens.push((Lt, line)),
        Rule::GT => tokens.push((Gt, line)),
        Rule::LE => tokens.push((Le, line)),
        Rule::GE => tokens.push((Ge, line)),
        Rule::NOT => tokens.push((Not, line)),
        Rule::AND => tokens.push((And, line)),
        Rule::OR => tokens.push((Or, line)),
        Rule::L_PAREN => tokens.push((LParen, line)),
        Rule::R_PAREN => tokens.push((RParen, line)),
        Rule::L_BRACE => tokens.push((LBrace, line)),
        Rule::R_BRACE => tokens.push((RBrace, line)),
        Rule::L_BRACKT => tokens.push((LBrackt, line)),
        Rule::R_BRACKT => tokens.push((RBrackt, line)),
        Rule::COMMA => tokens.push((Comma, line)),
        Rule::SEMICOLON => tokens.push((Semicolon, line)),
        Rule::IDENT => tokens.push((Ident(pair.as_str().to_string()), line)),
        Rule::INTEGER_CONST => {
            let s = pair.as_str();
            //print!("s: {}\n", s);
            //eprintln!("Parsing INTEGER_CONST: {} len = {}", s, s.len());
            let v = if s.starts_with("0x") || s.starts_with("0X") {
                i64::from_str_radix(&s[2..], 16).unwrap()
            } else if s.starts_with("0") && s.len() == 2 {
                if s.chars().all(|c| c >= '0' && c <= '7') {
                    i64::from_str_radix(&s[1..], 8).unwrap()
                } else {
                    //tokens.push((IntegerConst(0), line));
                    s.parse().unwrap()
                }
            }
            else {
                
                if s.starts_with("0") && s.len() >= 2 && !s.starts_with("00") {
                    if s.chars().all(|c| c >= '0' && c <= '7') {
                        i64::from_str_radix(&s[1..], 8).unwrap()
                    } else {
                        s.parse().unwrap()
                    }
                }else{
                s.parse().unwrap()
                }
            };
            tokens.push((IntegerConst(v), line));
        }
        _ => {
            for inner in pair.into_inner() {
                collect_tokens(inner, tokens);
            }
        }
    }
}

fn main() {
    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }
    let file_path = &args[1];
    let code = match fs::read_to_string(file_path) {
        Ok(c) => c,
        Err(e) => {
            eprintln!("File read error: {}", e);
            std::process::exit(1);
        }
    };
    //eprintln!("start tokens c");
    let tokens = tokenize(&code);
    
    // 匹配通过才输出 token
    for (token, line) in tokens {
        match token {
            Token::Ident(s) => eprintln!("IDENT {} at Line {}.", s, line),
            Token::IntegerConst(v) => eprintln!("INTEGER_CONST {} at Line {}.", v, line),
            Token::Const => eprintln!("CONST const at Line {}.", line),
            Token::Int => eprintln!("INT int at Line {}.", line),
            Token::Void => eprintln!("VOID void at Line {}.", line),
            Token::If => eprintln!("IF if at Line {}.", line),
            Token::Else => eprintln!("ELSE else at Line {}.", line),
            Token::While => eprintln!("WHILE while at Line {}.", line),
            Token::Break => eprintln!("BREAK break at Line {}.", line),
            Token::Continue => eprintln!("CONTINUE continue at Line {}.", line),
            Token::Return => eprintln!("RETURN return at Line {}.", line),
            Token::Plus => eprintln!("PLUS + at Line {}.", line),
            Token::Minus => eprintln!("MINUS - at Line {}.", line),
            Token::Mul => eprintln!("MUL * at Line {}.", line),
            Token::Div => eprintln!("DIV / at Line {}.", line),
            Token::Mod => eprintln!("MOD % at Line {}.", line),
            Token::Assign => eprintln!("ASSIGN = at Line {}.", line),
            Token::Eq => eprintln!("EQ == at Line {}.", line),
            Token::Neq => eprintln!("NEQ != at Line {}.", line),
            Token::Lt => eprintln!("LT < at Line {}.", line),
            Token::Gt => eprintln!("GT > at Line {}.", line),
            Token::Le => eprintln!("LE <= at Line {}.", line),
            Token::Ge => eprintln!("GE >= at Line {}.", line),
            Token::Not => eprintln!("NOT ! at Line {}.", line),
            Token::And => eprintln!("AND && at Line {}.", line),
            Token::Or => eprintln!("OR || at Line {}.", line),
            Token::LParen => eprintln!("L_PAREN ( at Line {}.", line),
            Token::RParen => eprintln!("R_PAREN ) at Line {}.", line),
            Token::LBrace => eprintln!("L_BRACE {{ at Line {}.", line),
            Token::RBrace => eprintln!("R_BRACE }} at Line {}.", line),
            Token::LBrackt => eprintln!("L_BRACKT [ at Line {}.", line),
            Token::RBrackt => eprintln!("R_BRACKT ] at Line {}.", line),
            Token::Comma => eprintln!("COMMA , at Line {}.", line),
            Token::Semicolon => eprintln!("SEMICOLON ; at Line {}.", line),
        }
    }
}
