use pest::Parser;
use std::env;
use std::fs;
use pest_derive::Parser;    

#[derive(Parser)]
#[grammar = "lexer.pest"]   // 语法规则文件
pub struct ExpressionParser;

#[derive(Debug, PartialEq)]
pub struct Token {
    value: String,
    token_type: String, 
    line: usize,
    col: usize,
}

/// 构造一个Token，自动带上位置信息
fn make_token(p: pest::iterators::Pair<Rule>, typ: &str) -> Token {
    let (line, col) = p.as_span().start_pos().line_col();
    //println!("{:#?}",p);
    Token {
        value: p.as_str().trim().to_string(),
        token_type: typ.to_string(),
        line,
        col,
    }
}
/// 构造数字Token
fn make_num_token(p: pest::iterators::Pair<Rule>,typ: &str)->Token{
    let (line,col) = p.as_span().start_pos().line_col();
    let cond = p.into_inner().nth(0).unwrap(); 
    let num = match   cond.as_rule() {
        Rule::OCT_INTEGER => {
            let clean_str = cond.as_str().trim_start_matches("0");
            if clean_str.is_empty() {
                0
            }else{
                i64::from_str_radix(clean_str, 8).unwrap()
            } 
        },
        Rule::HEX_INTEGER => {
            let clean_str = cond.as_str().trim_start_matches("0x").trim_start_matches("0X");
            i64::from_str_radix(clean_str, 16).unwrap()
        },
        _ => {
        i64::from_str_radix(cond.as_str().trim(), 10).unwrap()
        }
    };
    Token { value: num.to_string(), token_type: typ.to_string(), line: line, col: col }
}
///// 构造关键字token
//fn make_ident_keyword_token(p: pest::iterators::Pair<Rule>, typ:&str) {
//    let mut token = make_token(p, typ);
//
//    
//}
/// 生成Token序列
pub fn tokenize(input: &str) -> Vec<Token> {
    let pair = ExpressionParser::parse(Rule::program, input)
        .unwrap_or_else(|e| panic!("Parse error: {}", e))
        .next().unwrap();

    pair.into_inner()
        .map(|p| match p.as_rule() {
            // 关键字
            Rule::CONST     => make_token(p, "CONST"),
            Rule::INT       => make_token(p, "INT"),
            Rule::VOID      => make_token(p, "VOID"),
            Rule::IF        => make_token(p, "IF"),
            Rule::ELSE      => make_token(p, "ELSE"),
            Rule::WHILE     => make_token(p, "WHILE"),
            Rule::BREAK     => make_token(p, "BREAK"),
            Rule::CONTINUE  => make_token(p, "CONTINUE"),
            Rule::RETURN    => make_token(p, "RETURN"),

            // 运算符
            Rule::EQ        => make_token(p, "EQ"),
            Rule::NEQ       => make_token(p, "NEQ"),
            Rule::LE        => make_token(p, "LE"),
            Rule::GE        => make_token(p, "GE"),
            Rule::AND       => make_token(p, "AND"),
            Rule::OR        => make_token(p, "OR"),
            Rule::PLUS      => make_token(p, "PLUS"),
            Rule::MINUS     => make_token(p, "MINUS"),
            Rule::MUL       => make_token(p, "MUL"),
            Rule::DIV       => make_token(p, "DIV"),
            Rule::MOD       => make_token(p, "MOD"),
            Rule::ASSIGN    => make_token(p, "ASSIGN"),
            Rule::LT        => make_token(p, "LT"),
            Rule::GT        => make_token(p, "GT"),
            Rule::NOT       => make_token(p, "NOT"),

            // 符号
            Rule::L_PAREN   => make_token(p, "L_PAREN"),
            Rule::R_PAREN   => make_token(p, "R_PAREN"),
            Rule::L_BRACE   => make_token(p, "L_BRACE"),
            Rule::R_BRACE   => make_token(p, "R_BRACE"),
            Rule::L_BRACKT  => make_token(p, "L_BRACKT"),
            Rule::R_BRACKT  => make_token(p, "R_BRACKT"),
            Rule::COMMA     => make_token(p, "COMMA"),
            Rule::SEMICOLON => make_token(p, "SEMICOLON"),

            Rule::IDENT     => make_token(p, "IDENT"),
            Rule::ILLEGAL   => make_token(p, "ILLEGAL"),
            Rule::INTEGER_CONST => make_num_token(p, "INTEGER_CONST"),
            // 兜底
            _ => {
                //println!("12345{:#?}",p.as_rule());
                let (line, col) = p.as_span().start_pos().line_col();
                Token { value: "None".to_string(), token_type: "None".to_string(), line, col }
                //unreachable!()
            }
        })
    .collect()
}
fn print_tokens(tokens: Vec<Token>) {
    let mut flag = true;
    for token  in &tokens {
        if token.token_type == "ILLEGAL" {
            eprintln!(
                "Error type A at Line {}: Mysterious character \"{}\".",
                token.line, token.value
            );
            flag = false;
        }
    }
    if!flag{
        return;
    }
    for token in tokens {
        if token.token_type != "None" {
            eprintln!("{} {} at Line {}.",token.token_type,token.value,token.line);
        }
    }
}
fn main() {
    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }
    let filename = &args[1];
    let input = fs::read_to_string(filename).expect("Failed to read file");

    let tokens = tokenize(&input);

    print_tokens(tokens);
}

