use pest::Parser;
use pest_derive::Parser;
use std::env;
use std::fs::File;
use std::io::{self, BufRead, BufReader};
use std::process;
use std::collections::HashMap;
use pest::iterators::Pairs;

//枚举，用于表示词法单元的类型
#[derive(Debug, PartialEq, Clone, Copy)]
enum ItemKind {
    //数字
    Number,
    //标识符
    Ident,
    //关键字
    KeywordInt,
    KeywordVoid,
    KeywordConst,
    KeywordIf,
    KeywordWhile,
    KeywordElse,
    KeywordBreak,
    KeywordReturn,
    KeywordContinue,
    //操作符
    OpPlus,
    OpMinus,
    OpStar,
    OpNot,
    OpOr,
    OpAnd,
    OpGreaterEqual,
    OpGreaterThan,
    OpLessEqual,
    OpLessThan,
    OpSlash,
    OpPercent,
    OpAssign,
    OpEqual,
    OpNotEqual,
    //标点符号
    LParen,
    RParen,
    LBracket,
    RBracket,
    LBrace,
    RBrace,
    Semicolon,
    Comma,
}
impl std::fmt::Display for ItemKind {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match self {
            ItemKind::KeywordInt => write!(f, "INT"),
            ItemKind::KeywordVoid => write!(f, "VOID"),
            ItemKind::KeywordConst => write!(f, "CONST"),
            ItemKind::KeywordIf => write!(f, "IF"),
            ItemKind::KeywordWhile => write!(f, "WHILE"),
            ItemKind::KeywordElse => write!(f, "ELSE"),
            ItemKind::KeywordBreak => write!(f, "BREAK"),
            ItemKind::KeywordReturn => write!(f, "RETURN"),
            ItemKind::KeywordContinue => write!(f, "CONTINUE"),
            ItemKind::Ident => write!(f, "IDENT"),
            ItemKind::Number => write!(f, "INTEGER_CONST"), // ✅ 改这里
            _ => write!(f, "{:?}", self),
        }
    }
}

#[derive(Parser)]
#[grammar = "sysy.pest"]
struct SysYParser;

//一个简单的结构体，用于存储词法单元的信息
#[derive(Debug)]
struct Token {
    kind: ItemKind,
    text: String,
    line: usize,
}

//词法分析器的主引擎
struct Lexer {
    source: String,
    tokens: Vec<Token>,
    has_error: bool,
}

impl Lexer {
    //构造函数：读取源文件
    fn new(file_path: &str) -> Result<Self, io::Error> {
        let mut source = String::new();
        let file = File::open(file_path)?;
        let reader = BufReader::new(file);
        for line in reader.lines() {
            source.push_str(&line?);
            source.push('\n');
        }
        Ok(Self { source, tokens: Vec::new(), has_error: false })
    }

    //运行词法分析
    fn run(&mut self) {
        //解析（这里会借用 self.source）
        let parse_result = SysYParser::parse(Rule::main_program, &self.source);

        match parse_result {
            Ok(pairs) => {
                let mut keyword_map = HashMap::new();
                keyword_map.insert("int", ItemKind::KeywordInt);
                keyword_map.insert("void", ItemKind::KeywordVoid);
                keyword_map.insert("const", ItemKind::KeywordConst);
                keyword_map.insert("if", ItemKind::KeywordIf);
                keyword_map.insert("while", ItemKind::KeywordWhile);
                keyword_map.insert("else", ItemKind::KeywordElse);
                keyword_map.insert("break", ItemKind::KeywordBreak);
                keyword_map.insert("return", ItemKind::KeywordReturn);
                keyword_map.insert("continue", ItemKind::KeywordContinue);

                //关键：把对 self 的借用拆分为对不同字段的借用，避免冲突
                let source = &self.source;           //不可变借用source
                let tokens = &mut self.tokens;       //可变借用tokens

                //内部递归函数：只接收所需字段，避免&mut self与&self.source同时存在
                fn process_pairs(
                    pairs: Pairs<Rule>,
                    source: &str,
                    tokens: &mut Vec<Token>,
                    keyword_map: &HashMap<&str, ItemKind>,
                ) {
                    for pair in pairs {
                        let inner_pairs = pair.clone().into_inner();
                        if inner_pairs.peek().is_some() {
                            process_pairs(inner_pairs, source, tokens, keyword_map);
                            continue;
                        }

                        let start = pair.as_span().start();
                        let line_num = source[0..start].chars().filter(|&c| c == '\n').count() + 1;
                        let token_text = pair.as_str();

                        match pair.as_rule() {
                            Rule::number => {
                                let value = if token_text.starts_with("0x") {
                                    i64::from_str_radix(&token_text[2..], 16).unwrap()
                                } else if token_text.starts_with('0')
                                    && token_text.len() > 1
                                    && token_text.chars().all(|c| c >= '0' && c <= '7')
                                {
                                    i64::from_str_radix(&token_text[1..], 8).unwrap()
                                } else {
                                    token_text.parse::<i64>().unwrap()
                                };
                                tokens.push(Token { kind: ItemKind::Number, text: value.to_string(), line: line_num });
                            }
                            Rule::ident => {
                                let key = token_text.to_lowercase();
                                if let Some(kind) = keyword_map.get(key.as_str()) {
                                    tokens.push(Token { kind: *kind, text: token_text.to_string(), line: line_num });
                                } else {
                                    tokens.push(Token { kind: ItemKind::Ident, text: token_text.to_string(), line: line_num });
                                }
                            }
                            Rule::keyword_int => {
                                let token_text = pair.as_str();
                                tokens.push(Token { kind: ItemKind::KeywordInt, text: token_text.to_string(), line: line_num });
                            }
                            Rule::keyword_void => {
                                let token_text = pair.as_str();
                                tokens.push(Token { kind: ItemKind::KeywordVoid, text: token_text.to_string(), line: line_num });
                            }
                            // 其它规则保持不变
                            Rule::op_equal => tokens.push(Token { kind: ItemKind::OpEqual, text: token_text.to_string(), line: line_num }),
                            Rule::op_not_equal => tokens.push(Token { kind: ItemKind::OpNotEqual, text: token_text.to_string(), line: line_num }),
                            Rule::op_less_equal => tokens.push(Token { kind: ItemKind::OpLessEqual, text: token_text.to_string(), line: line_num }),
                            Rule::op_greater_equal => tokens.push(Token { kind: ItemKind::OpGreaterEqual, text: token_text.to_string(), line: line_num }),
                            Rule::op_and => tokens.push(Token { kind: ItemKind::OpAnd, text: token_text.to_string(), line: line_num }),
                            Rule::op_or => tokens.push(Token { kind: ItemKind::OpOr, text: token_text.to_string(), line: line_num }),
                            Rule::op_plus => tokens.push(Token { kind: ItemKind::OpPlus, text: token_text.to_string(), line: line_num }),
                            Rule::op_minus => tokens.push(Token { kind: ItemKind::OpMinus, text: token_text.to_string(), line: line_num }),
                            Rule::op_star => tokens.push(Token { kind: ItemKind::OpStar, text: token_text.to_string(), line: line_num }),
                            Rule::op_not => tokens.push(Token { kind: ItemKind::OpNot, text: token_text.to_string(), line: line_num }),
                            Rule::op_slash => tokens.push(Token { kind: ItemKind::OpSlash, text: token_text.to_string(), line: line_num }),
                            Rule::op_percent => tokens.push(Token { kind: ItemKind::OpPercent, text: token_text.to_string(), line: line_num }),
                            Rule::op_assign => tokens.push(Token { kind: ItemKind::OpAssign, text: token_text.to_string(), line: line_num }),
                            Rule::op_less_than => tokens.push(Token { kind: ItemKind::OpLessThan, text: token_text.to_string(), line: line_num }),
                            Rule::op_greater_than => tokens.push(Token { kind: ItemKind::OpGreaterThan, text: token_text.to_string(), line: line_num }),
                            Rule::paren_l => tokens.push(Token { kind: ItemKind::LParen, text: token_text.to_string(), line: line_num }),
                            Rule::paren_r => tokens.push(Token { kind: ItemKind::RParen, text: token_text.to_string(), line: line_num }),
                            Rule::bracket_l => tokens.push(Token { kind: ItemKind::LBracket, text: token_text.to_string(), line: line_num }),
                            Rule::bracket_r => tokens.push(Token { kind: ItemKind::RBracket, text: token_text.to_string(), line: line_num }),
                            Rule::brace_l => tokens.push(Token { kind: ItemKind::LBrace, text: token_text.to_string(), line: line_num }),
                            Rule::brace_r => tokens.push(Token { kind: ItemKind::RBrace, text: token_text.to_string(), line: line_num }),
                            Rule::semicolon => tokens.push(Token { kind: ItemKind::Semicolon, text: token_text.to_string(), line: line_num }),
                            Rule::comma => tokens.push(Token { kind: ItemKind::Comma, text: token_text.to_string(), line: line_num }),
                            _ => {}
                        }
                    }
                }

                // 开始处理解析树
                process_pairs(pairs, source, tokens, &keyword_map);
            }
            Err(e) => {
                eprintln!("{}", e);
                self.has_error = true;
            }
        }
    }

    //打印结果
    fn print_result(&self) {
        if self.has_error {
            return;
        }
        for token in &self.tokens {
            match token.kind {
                ItemKind::KeywordInt => eprintln!("INT {} at Line {}.", token.text, token.line),
                ItemKind::KeywordVoid => eprintln!("VOID {} at Line {}.", token.text, token.line),
                ItemKind::KeywordConst => eprintln!("CONST {} at Line {}.", token.text, token.line),
                ItemKind::KeywordIf => eprintln!("IF {} at Line {}.", token.text, token.line),
                ItemKind::KeywordWhile => eprintln!("WHILE {} at Line {}.", token.text, token.line),
                ItemKind::KeywordElse => eprintln!("ELSE {} at Line {}.", token.text, token.line),
                ItemKind::KeywordBreak => eprintln!("BREAK {} at Line {}.", token.text, token.line),
                ItemKind::KeywordReturn => eprintln!("RETURN {} at Line {}.", token.text, token.line),
                ItemKind::KeywordContinue => eprintln!("CONTINUE {} at Line {}.", token.text, token.line),
                ItemKind::Ident => eprintln!("IDENT {} at Line {}.", token.text, token.line),
                ItemKind::Number => eprintln!("INTEGER_CONST {} at Line {}.", token.text, token.line),
                ItemKind::OpPlus => eprintln!("PLUS {} at Line {}.", token.text, token.line),
                ItemKind::OpMinus => eprintln!("MINUS {} at Line {}.", token.text, token.line),
                ItemKind::OpStar => eprintln!("MUL {} at Line {}.", token.text, token.line),
                ItemKind::OpNot => eprintln!("NOT {} at Line {}.", token.text, token.line),
                ItemKind::OpOr => eprintln!("OR {} at Line {}.", token.text, token.line),
                ItemKind::OpAnd => eprintln!("AND {} at Line {}.", token.text, token.line),
                ItemKind::OpGreaterEqual => eprintln!("GEQ {} at Line {}.", token.text, token.line),
                ItemKind::OpGreaterThan => eprintln!("GT {} at Line {}.", token.text, token.line),
                ItemKind::OpLessEqual => eprintln!("LEQ {} at Line {}.", token.text, token.line),
                ItemKind::OpLessThan => eprintln!("LT {} at Line {}.", token.text, token.line),
                ItemKind::OpSlash => eprintln!("DIV {} at Line {}.", token.text, token.line),
                ItemKind::OpPercent => eprintln!("MOD {} at Line {}.", token.text, token.line),
                ItemKind::OpAssign => eprintln!("ASSIGN {} at Line {}.", token.text, token.line),
                ItemKind::OpEqual => eprintln!("EQ {} at Line {}.", token.text, token.line),
                ItemKind::OpNotEqual => eprintln!("NEQ {} at Line {}.", token.text, token.line),
                ItemKind::LParen => eprintln!("L_PAREN {} at Line {}.", token.text, token.line),
                ItemKind::RParen => eprintln!("R_PAREN {} at Line {}.", token.text, token.line),
                ItemKind::LBracket => eprintln!("L_BRACKET {} at Line {}.", token.text, token.line),
                ItemKind::RBracket => eprintln!("R_BRACKET {} at Line {}.", token.text, token.line),
                ItemKind::LBrace => eprintln!("L_BRACE {} at Line {}.", token.text, token.line),
                ItemKind::RBrace => eprintln!("R_BRACE {} at Line {}.", token.text, token.line),
                ItemKind::Semicolon => eprintln!("SEMICOLON {} at Line {}.", token.text, token.line),
                ItemKind::Comma => eprintln!("COMMA {} at Line {}.", token.text, token.line),
            }
        }
    }
}

// 主函数，程序的入口点
fn main() {
    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <source_file_path>", args[0]);
        process::exit(1);
    }
    let file_path = &args[1];

    // 先读文件内容为 source（用于解析错误定位）
    let source = match std::fs::read_to_string(file_path) {
        Ok(s) => s,
        Err(e) => {
            eprintln!("Error: Failed to read file {}: {}", file_path, e);
            process::exit(1);
        }
    };

    // 先用 pest 尝试解析以便捕获语法错误（我们自定义错误输出）
    match SysYParser::parse(Rule::main_program, &source) {
        Ok(_) => {
            // 解析成功 -> 再创建 Lexer（使用现有的构造函数）
            match Lexer::new(file_path) {
                Ok(mut lexer) => {
                    lexer.run();
                    lexer.print_result();
                }
                Err(e) => {
                    eprintln!("Error: Failed to read file {}: {}", file_path, e);
                    process::exit(1);
                }
            }
        }
        Err(err) => {
            // 自定义 pest 错误输出：定位到字节位置，计算行号并取出字符
            use pest::error::InputLocation;
            let pos = match err.location {
                InputLocation::Pos(p) => p,
                InputLocation::Span((p, _)) => p,
            };
            //pos是字节偏移，计算行号
            let line = source[..pos].chars().filter(|&c| c == '\n').count() + 1;
            //取出该位置的字符（若不可得，显示问号）
            let ch = source[pos..].chars().next().unwrap_or('?');
            println!("Error type A at Line {}: Mysterious character \"{}\".", line, ch);
            process::exit(1);
        }
    }
}



