use std::{env, fs};

use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"] // 指向你的 .pest 文件
struct SysYLexer;

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 获取文件名
    let filename = &args[1];

    // 读取输入文件
    // let input = fs::read_to_string("input1.txt").expect("Failed to read file");
    let input = fs::read_to_string(filename).expect("Failed to read file");

    let token = tokenize(&input);

}

pub enum Token {
    // 关键字
    CONST,
    INT,
    VOID,
    IF,
    ELSE,
    WHILE,
    BREAK,
    CONTINUE,
    RETURN,

    // 运算符
    PLUS,
    MINUS,
    MUL,
    DIV,
    MOD,
    ASSIGN,
    EQ,
    NEQ,
    LT,
    GT,
    LE,
    GE,
    NOT,
    AND,
    OR,
    
    // 分隔符
    L_PAREN,
    R_PAREN,
    L_BRACE,
    R_BRACE,
    L_BRACKT,
    R_BRACKT,
    COMMA,
    SEMICOLON,
    
    // 标识符和常量
    IDENT(String),
    INTEGER_CONST(u64),

}

impl Token {
    pub fn type_name(&self) -> &'static str {
        match self {
            Token::CONST => "CONST",
            Token::INT => "INT",
            Token::VOID => "VOID",
            Token::IF => "IF",
            Token::ELSE => "ELSE",
            Token::WHILE => "WHILE",
            Token::BREAK => "BREAK",
            Token::CONTINUE => "CONTINUE",
            Token::RETURN => "RETURN",
            Token::PLUS => "PLUS",
            Token::MINUS => "MINUS",
            Token::MUL => "MUL",
            Token::DIV => "DIV",
            Token::MOD => "MOD",
            Token::ASSIGN => "ASSIGN",
            Token::EQ => "EQ",
            Token::NEQ => "NEQ",
            Token::LT => "LT",
            Token::GT => "GT",
            Token::LE => "LE",
            Token::GE => "GE",
            Token::NOT => "NOT",
            Token::AND => "AND",
            Token::OR => "OR",
            Token::L_PAREN => "L_PAREN",
            Token::R_PAREN => "R_PAREN",
            Token::L_BRACE => "L_BRACE",
            Token::R_BRACE => "R_BRACE",
            Token::L_BRACKT => "L_BRACKT",
            Token::R_BRACKT => "R_BRACKT",
            Token::COMMA => "COMMA",
            Token::SEMICOLON => "SEMICOLON",
            Token::IDENT(_) => "IDENT",
            Token::INTEGER_CONST(_) => "INTEGER_CONST",
        }
    }
}

pub fn parse_integer(s: &str) -> Result<u64, std::num::ParseIntError> {
    if s.starts_with("0x") || s.starts_with("0X") {
        u64::from_str_radix(&s[2..], 16)
    } else if s.starts_with('0') && s.len() > 1 {
        u64::from_str_radix(&s[1..], 8)
    } else {
        s.parse::<u64>()
    }
}


pub fn tokenize(input: &str) -> Vec<Token> {
    // 首先进行 program 解析
    let mut output = String::from("");
    let mut error_out = String::from("");
    let program_pair = SysYLexer::parse(Rule::PAIRS, input)
        .unwrap_or_else(|e| panic!("Parse error: {}", e))
        .next()
        .unwrap(); // 取第一个匹配
    
    let mut tokens = Vec::new();

    let mut line = 1;
    for pair in program_pair.into_inner() {
        // println!("token: {:?} ", pair);
        match pair.as_rule() {
            Rule::TOKEN => {
                let _inner = pair.into_inner().next().unwrap();
                // println!("  t: {:?} ", _inner);
                let token = match _inner.as_rule() {
                    // 关键字
                    Rule::CONST => Token::CONST,
                    Rule::INT => Token::INT,
                    Rule::VOID => Token::VOID,
                    Rule::IF => Token::IF,
                    Rule::ELSE => Token::ELSE,
                    Rule::WHILE => Token::WHILE,
                    Rule::BREAK => Token::BREAK,
                    Rule::CONTINUE => Token::CONTINUE,
                    Rule::RETURN => Token::RETURN,
                    
                    // 运算符
                    Rule::PLUS => Token::PLUS,
                    Rule::MINUS => Token::MINUS,
                    Rule::MUL => Token::MUL,
                    Rule::DIV => Token::DIV,
                    Rule::MOD => Token::MOD,
                    Rule::ASSIGN => Token::ASSIGN,
                    Rule::EQ => Token::EQ,
                    Rule::NEQ => Token::NEQ,
                    Rule::LT => Token::LT,
                    Rule::GT => Token::GT,
                    Rule::LE => Token::LE,
                    Rule::GE => Token::GE,
                    Rule::NOT => Token::NOT,
                    Rule::AND => Token::AND,
                    Rule::OR => Token::OR,
                    
                    // 分隔符
                    Rule::L_PAREN => Token::L_PAREN,
                    Rule::R_PAREN => Token::R_PAREN,
                    Rule::L_BRACE => Token::L_BRACE,
                    Rule::R_BRACE => Token::R_BRACE,
                    Rule::L_BRACKT => Token::L_BRACKT,
                    Rule::R_BRACKT => Token::R_BRACKT,
                    Rule::COMMA => Token::COMMA,
                    Rule::SEMICOLON => Token::SEMICOLON,
                    
                    // 标识符和常量
                    Rule::IDENT => Token::IDENT(_inner.as_str().to_string()),
                    Rule::INTEGER_CONST => Token::INTEGER_CONST( parse_integer(_inner.as_str()).unwrap() ),
                    
                    _ => continue, // 跳过不认识的规则
                };

                if let Token::INTEGER_CONST(val) = token {
                    output.push_str(&format!("{} {} at Line {}.\n", token.type_name(), val, line));
                    // eprintln!("{} {} at line {}.", token.type_name(), val, line);
                } else {
                    output.push_str(&format!("{} {} at Line {}.\n", token.type_name(), _inner.as_str(), line));
                    // eprintln!("{} {} at line {}.", token.type_name(), token_inner.as_str(), line);
                }
                tokens.push(token);
            },
            Rule::WHITESPACE => {
                // 空白符，计算行号
                let ws_text = pair.as_str();
                let new_lines = ws_text.matches('\n').count();
                line += new_lines;
            },
            Rule::COMMENT => {
                line += pair.as_str().matches('\n').count();
            },
            Rule::ERROR => {
                error_out.push_str(&format!("Error type A at Line {}: Mysterious character {:?}.\n", line, pair.as_str()));
            },
            Rule::EOI => {},
            _ => {}, 
        }
    }

    if !error_out.is_empty() {
        eprintln!("{}", error_out);
    } else {
        eprintln!("{}", output);
    }

    tokens
}