 use std::{env, fs};

use pest_derive::Parser;
use pest::Parser;
fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 获取文件名
    let filename = &args[1];

    // 读取输入文件
    let input = fs::read_to_string(filename).expect("Failed to read file");

    // 词法分析
    let tokens = tokenize(&input);
    if tokens.err > 0 {
        for err in tokens.errs {
            eprintln!("Error type A at Line {}: [errorMessage].", err.line);
        }
    }
    else{
        for token in tokens.tokens {
            eprintln!("{} {} at Line {}.", token.type_, token.token, token.line);
        }
    }
}


#[derive(Parser)]
#[grammar = "lexer.pest"] 
pub struct ExpressionParser;

pub struct ErrInfo{
    pub line: i64,
}
pub struct TokenInfo{
    pub token: String,
    pub line: i64,
    pub type_: String,
}
pub struct Lexer{
    pub err: i64,
    pub tokens: Vec<TokenInfo>,
    pub errs: Vec<ErrInfo>,
}

impl Lexer {
    pub fn new() -> Self {
        Lexer {
            err: 0,
            tokens: Vec::new(),
            errs: Vec::new(),
        }
    }
}
pub fn tokenize(input: &str) -> Lexer {
    let mut lexer = Lexer::new();
    let mut pairs = ExpressionParser::parse(Rule::tokens, &input).unwrap();
    // println!("Lexical analysis result:{}", pairs.len());
    for pair in pairs.next().unwrap().into_inner() {
        let rule = pair.as_rule();
        let text = pair.as_str();
        let line = pair.as_span().start_pos().line_col().0 as i64;
        let (kind, out_text, err) = match rule {
            Rule::CONST => ("CONST", text.to_string(), 0),
            Rule::INT => ("INT", text.to_string(), 0),
            Rule::VOID => ("VOID", text.to_string(), 0),
            Rule::IF => ("IF", text.to_string(), 0),
            Rule::ELSE => ("ELSE", text.to_string(), 0),
            Rule::WHILE => ("WHILE", text.to_string(), 0),
            Rule::BREAK => ("BREAK", text.to_string(), 0),
            Rule::CONTINUE => ("CONTINUE", text.to_string(), 0),
            Rule::RETURN => ("RETURN", text.to_string(), 0),

            Rule::PLUS => ("PLUS", text.to_string(), 0),
            Rule::MINUS => ("MINUS", text.to_string(), 0),
            Rule::MUL => ("MUL", text.to_string(), 0),
            Rule::DIV => ("DIV", text.to_string(), 0),
            Rule::MOD => ("MOD", text.to_string(), 0),
            Rule::ASSIGN => ("ASSIGN", text.to_string(), 0),
            Rule::EQ => ("EQ", text.to_string(), 0),
            Rule::NEQ => ("NEQ", text.to_string(), 0),
            Rule::LT => ("LT", text.to_string(), 0),
            Rule::GT => ("GT", text.to_string(), 0),
            Rule::LE => ("LE", text.to_string(), 0),
            Rule::GE => ("GE", text.to_string(), 0),
            Rule::NOT => ("NOT", text.to_string(), 0),
            Rule::AND => ("AND", text.to_string(), 0),
            Rule::OR => ("OR", text.to_string(), 0),

            Rule::L_PAREN => ("L_PAREN", text.to_string(), 0),
            Rule::R_PAREN => ("R_PAREN", text.to_string(), 0),
            Rule::L_BRACE => ("L_BRACE", text.to_string(), 0),
            Rule::R_BRACE => ("R_BRACE", text.to_string(), 0),
            Rule::L_BRACKT => ("L_BRACKT", text.to_string(), 0),
            Rule::R_BRACKT => ("R_BRACKT", text.to_string(), 0),
            Rule::COMMA => ("COMMA", text.to_string(), 0),
            Rule::SEMICOLON => ("SEMICOLON", text.to_string(), 0),

            Rule::IDENT => ("IDENT", text.to_string(), 0),
            Rule::INTEGER_CONST => ("INTEGER_CONST", parse_int_const(text).to_string(), 0),

            Rule::LINE_COMMENT => continue,
            Rule::MULTILINE_COMMENT => continue,
            Rule::WHITESPACE => continue,
            Rule::COMMENT => continue,
            Rule::UNKNOWN => ("UNKNOWN", text.to_string(), 1),
            _ => ("UNKNOWN", text.to_string(), 1),
        };
        if err != 0 {
            lexer.err += 1;
            lexer.errs.push(ErrInfo { line });
            // println!("{}", out_text);
        }
        else{
            // println!("{} {}", out_text, kind.to_string());
            lexer.tokens.push(TokenInfo { token: out_text, line, type_: kind.to_string() });
        }
    }

    return lexer;
    
}

fn parse_int_const(s: &str) -> i32 {
    let s = s.trim(); // 去掉首尾空白
    if let Some(hex) = s.strip_prefix("0x").or_else(|| s.strip_prefix("0X")) {
        // 十六进制
        i32::from_str_radix(hex, 16).unwrap()
    } else if s.starts_with('0') && s.len() > 1 {
        // 八进制
        i32::from_str_radix(&s[1..], 8).unwrap()
    } else {
        // 十进制
        s.parse().unwrap()
    }
}