use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
struct SysYLexer;

#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq)]
enum TokenType {
    CONST,
    INT,
    VOID,
    IF,
    ELSE,
    WHILE,
    BREAK,
    CONTINUE,
    RETURN,
    PLUS,
    MINUS,
    MUL,
    DIV,
    MOD,
    ASSIGN,
    EQ,
    NEQ,
    LT,
    GT,
    LE,
    GE,
    NOT,
    AND,
    OR,
    L_PAREN,
    R_PAREN,
    L_BRACE,
    R_BRACE,
    L_BRACKT,
    R_BRACKT,
    COMMA,
    SEMICOLON,
    IDENT,
    INTEGER_CONST,
    LINE_COMMENT,
    MULTILINE_COMMENT,
    ERR,
}

struct Token {
    token_type: TokenType,
    lexeme: String,
    line: usize,
    column: usize,
}

fn main() {
    let args: Vec<String> = std::env::args().collect();
    if args.len() != 2 {
        eprintln!("Usage: {} <source-file>", args[0]);
        std::process::exit(1);
    }
    let filename = &args[1];
    let input = std::fs::read_to_string(filename).expect("Failed to read the source file");
    let all_tokens = tokenize(input.as_str());
    let err_tokens = all_tokens
        .iter()
        .filter(|token| matches!(token.token_type, TokenType::ERR))
        .collect::<Vec<_>>();
    let ok_tokens = all_tokens
        .iter()
        .filter(|token| !matches!(token.token_type, TokenType::ERR))
        .collect::<Vec<_>>();
    if !err_tokens.is_empty() {
        for token in err_tokens {
            eprintln!(
                "Error type A at Line {}: unrecognized token '{}'.",
                token.line, token.lexeme
            );
        }
        std::process::exit(1);
    } else {
        for token in ok_tokens {
            if token.token_type == TokenType::LINE_COMMENT
                || token.token_type == TokenType::MULTILINE_COMMENT
            {
                continue;
            }
            eprintln!(
                "{:?} {} at Line {}.",
                token.token_type, token.lexeme, token.line
            );
        }
    }
}

fn tokenize(input: &str) -> Vec<Token> {
    let pairs = SysYLexer::parse(Rule::TOKEN_LIST, input)
        .expect("Failed to parse input")
        .next();
    let mut tokens = Vec::new();
    if let Some(pairs) = pairs {
        for token in pairs.into_inner() {
            let token = token.into_inner().next().unwrap();
            match token.as_rule() {
                Rule::OK_TOKEN => {
                    let token = token.into_inner().next();
                    if token.is_none() {
                        continue; // 这种情况下说明匹配到了个 whitespace，应该立刻被忽略
                    }
                    let token = token.unwrap();
                    let token_type = match token.as_rule() {
                        Rule::CONST => TokenType::CONST,
                        Rule::INT => TokenType::INT,
                        Rule::VOID => TokenType::VOID,
                        Rule::IF => TokenType::IF,
                        Rule::ELSE => TokenType::ELSE,
                        Rule::WHILE => TokenType::WHILE,
                        Rule::BREAK => TokenType::BREAK,
                        Rule::CONTINUE => TokenType::CONTINUE,
                        Rule::RETURN => TokenType::RETURN,
                        Rule::PLUS => TokenType::PLUS,
                        Rule::MINUS => TokenType::MINUS,
                        Rule::MUL => TokenType::MUL,
                        Rule::DIV => TokenType::DIV,
                        Rule::MOD => TokenType::MOD,
                        Rule::ASSIGN => TokenType::ASSIGN,
                        Rule::EQ => TokenType::EQ,
                        Rule::NEQ => TokenType::NEQ,
                        Rule::LT => TokenType::LT,
                        Rule::GT => TokenType::GT,
                        Rule::LE => TokenType::LE,
                        Rule::GE => TokenType::GE,
                        Rule::NOT => TokenType::NOT,
                        Rule::AND => TokenType::AND,
                        Rule::OR => TokenType::OR,
                        Rule::L_PAREN => TokenType::L_PAREN,
                        Rule::R_PAREN => TokenType::R_PAREN,
                        Rule::L_BRACE => TokenType::L_BRACE,
                        Rule::R_BRACE => TokenType::R_BRACE,
                        Rule::L_BRACKT => TokenType::L_BRACKT,
                        Rule::R_BRACKT => TokenType::R_BRACKT,
                        Rule::COMMA => TokenType::COMMA,
                        Rule::SEMICOLON => TokenType::SEMICOLON,
                        Rule::IDENT => TokenType::IDENT,
                        Rule::INTEGER_CONST => TokenType::INTEGER_CONST,
                        Rule::LINE_COMMENT => TokenType::LINE_COMMENT,
                        Rule::MULTILINE_COMMENT => TokenType::MULTILINE_COMMENT,
                        _ => unreachable!(),
                    };
                    let lexeme = match token_type {
                        TokenType::INTEGER_CONST => {
                            if token.as_str().starts_with("0X") || token.as_str().starts_with("0x")
                            {
                                match u64::from_str_radix(&token.as_str()[2..], 16) {
                                    Ok(val) => val.to_string(),
                                    Err(_) => "0".to_string(),
                                }
                            } else if token.as_str().starts_with('0') {
                                match u64::from_str_radix(&token.as_str()[1..], 8) {
                                    Ok(val) => val.to_string(),
                                    Err(_) => "0".to_string(),
                                }
                            } else {
                                match token.as_str().parse::<u64>() {
                                    Ok(val) => val.to_string(),
                                    Err(_) => "0".to_string(),
                                }
                            }
                        }
                        _ => token.as_str().to_string(),
                    };
                    tokens.push(Token {
                        token_type,
                        lexeme,
                        line: token.line_col().0,
                        column: token.line_col().1,
                    });
                }
                Rule::ERR_TOKEN => {
                    tokens.push(Token {
                        token_type: TokenType::ERR,
                        lexeme: token.as_str().to_string(),
                        line: token.line_col().0,
                        column: token.line_col().1,
                    });
                }
                _ => unreachable!(),
            }
        }
    }
    tokens
}
