use pest_derive::Parser;
use pest::Parser;
use crate::model::{Token,TokenKind};

#[derive(Parser)]
#[grammar = "lexer.pest"]
struct SysYLexer;

/// 判断一个字符是否是 SysY 合法字符
fn is_valid_char(c: char) -> bool { /* 原实现 */ matches!(c,
    'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | ' ' | '\t' | '\n' | '\r' |
    '+' | '-' | '*' | '/' | '%' | '=' | '!' | '<' | '>' | '&' | '|' |
    '(' | ')' | '[' | ']' | '{' | '}' | ',' | ';') }

fn collect_illegal_lines(src: &str) -> Vec<usize> { /* 原实现 */
    let mut bad = Vec::new();
    let mut line = 1;
    let mut line_comment = false;
    let mut block_comment = false;
    let mut chars = src.chars().peekable();
    while let Some(c) = chars.next() {
        if c == '\u{FEFF}' { continue; }
        if c == '\n' { line_comment=false; line+=1; }
        match (line_comment, block_comment, c, chars.peek()) {
            (false,false,'/',Some('/'))=>{line_comment=true; chars.next(); continue;}
            (false,false,'/',Some('*'))=>{block_comment=true; chars.next(); continue;}
            (false,true ,'*',Some('/'))=>{block_comment=false; chars.next(); continue;}
            _=>{}
        }
        if line_comment||block_comment { continue; }
        if !is_valid_char(c) { bad.push(line); }
    }
    bad.sort(); bad.dedup(); bad
}

/// 唯一对外暴露的入口函数
pub fn run(src: &str) -> Vec<Token> {
   // 第一步：非法字符检查
    let bad_lines = collect_illegal_lines(&src);
    if !bad_lines.is_empty() {
        panic!("Error type A at Lines {:?}: Mysterious character.", bad_lines);
    }

    // 第二步：pest 解析
    let pairs = SysYLexer::parse(Rule::file, &src)
        .unwrap_or_else(|e| {
            let line = match e.line_col {
                pest::error::LineColLocation::Pos((l,_))=>l,
                pest::error::LineColLocation::Span((l,_),_)=>l,
            };
            panic!("Error type A at Line {}: Mysterious character.", line);
        });

    // 第三步：生成 Token 列表
    let mut tokens = Vec::new();
    for pair in pairs {
        for tok in pair.into_inner() {
            let rule = tok.as_rule();
            let text = tok.as_str();
            let (line, _) = tok.line_col();

            use TokenKind::*;
            let kind = match rule {
                Rule::CONST      => Const,
                Rule::INT        => Int,
                Rule::VOID       => Void,
                Rule::IF         => If,
                Rule::ELSE       => Else,
                Rule::WHILE      => While,
                Rule::BREAK      => Break,
                Rule::CONTINUE   => Continue,
                Rule::RETURN     => Return,
                Rule::IDENT      => Ident,
                Rule::INTEGER_CONST => {
                    IntegerConst
                }
                Rule::PLUS  => Plus,
                Rule::MINUS => Minus,
                Rule::MUL   => Mul,
                Rule::DIV   => Div,
                Rule::MOD   => Mod,
                Rule::ASSIGN => Assign,
                Rule::EQ    => Eq,
                Rule::NEQ   => Neq,
                Rule::LT    => Lt,
                Rule::LE    => Le,
                Rule::GT    => Gt,
                Rule::GE    => Ge,
                Rule::NOT   => Not,
                Rule::AND   => And,
                Rule::OR    => Or,
                Rule::L_PAREN  => LParen,
                Rule::R_PAREN  => RParen,
                Rule::L_BRACKT => LBrack,
                Rule::R_BRACKT => RBrack,
                Rule::L_BRACE  => LBrace,
                Rule::R_BRACE  => RBrace,
                Rule::COMMA    => Comma,
                Rule::SEMICOLON=> Semicolon,
                _ => continue, // 跳过 SOI/EOI/WHITESPACE/COMMENT
            };
            tokens.push(Token{ line, kind, text: text.to_string() });
        }
    }
    tokens
}

pub fn tokens_to_kind_vec(tokens: Vec<Token>) -> Vec<TokenKind> {
    tokens.into_iter().map(|token| token.kind).collect()
}
