use std::{env, fs};
use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"] 
struct SysYLexer;

#[derive(Debug, PartialEq)]
pub struct Token {
    pub kind: TokenKind,
    pub line: usize,
    pub col: usize,
}

#[derive(Debug, PartialEq)]
pub enum TokenKind {
    Identifier(String),
    INTegerConst(i64),
    Keyword(Keyword),
    Operator(Operator),
    Punctuation(Punctuation),
    UNKNOWN(String),
    Eof,
}

#[derive(Debug, PartialEq)]
pub enum Keyword {
    CONTINUE,
    BREAK,
    RETURN,
    CONST,
    INT,
    VOID,
    IF,
    ELSE,
    WHILE,
}

impl Keyword {
    pub fn to_str(&self) -> &'static str{
        match self {
            Keyword::CONTINUE => "continue",
            Keyword::BREAK    => "break"   ,
            Keyword::RETURN   => "return"  ,
            Keyword::CONST    => "const"   ,
            Keyword::INT      => "int"     ,
            Keyword::VOID     => "void"    ,
            Keyword::IF       => "if"      ,
            Keyword::ELSE     => "else"    ,
            Keyword::WHILE    => "while"    
        }
    }
    pub fn to_str_upgrade(&self) -> String{
        return self.to_str().to_uppercase();
    }
}

#[derive(Debug, PartialEq)]
pub enum Operator {
    Ge,
    Le,
    Eq,
    Neq,
    And,
    Or,
    Gt,
    Lt,
    Assign,
    Plus,
    Minus,
    Mul,
    Div,
    Mod,
    Not,
}

impl Operator {
    pub fn printout(&self) -> &'static str {
        match self {
            Operator::Ge  => "GE >=",
            Operator::Le  => "LE <=",
            Operator::Eq  => "EQ ==",
            Operator::Neq  => "NEQ !=",
            Operator::And  => "AND &&",
            Operator::Or  => "OR ||",
            Operator::Gt  => "GT >",
            Operator::Lt  => "LT <",
            Operator::Assign  => "ASSIGN =",
            Operator::Plus  => "PLUS +",
            Operator::Minus  => "MINUS -",
            Operator::Mul  => "MUL *",
            Operator::Div  => "DIV /",
            Operator::Mod  => "MOD %",
            Operator::Not  => "NOT !",
        }
    }
}

#[derive(Debug, PartialEq)]
pub enum Punctuation {
    LParen,
    RParen,
    LBrace,
    RBrace,
    LBrackt,
    RBrackt,
    Comma,
    Semicolon,
}

impl Punctuation {
    pub fn printout(&self) -> &'static str{
        match self {
            Punctuation::LParen    => "L_PAREN (",
            Punctuation::RParen    => "R_PAREN )",
            Punctuation::LBrace    => "L_BRACE {",
            Punctuation::RBrace    => "R_BRACE }",
            Punctuation::LBrackt   => "L_BRACKT [",
            Punctuation::RBrackt   => "R_BRACKT ]",
            Punctuation::Comma     => "COMMA ,",
            Punctuation::Semicolon => "SEMICOLON ;"
        }
    }
}

pub fn tokenize(input: &str) -> Vec<Token> {
    let mut tokens = Vec::new();

    let pairs = SysYLexer::parse(Rule::program, input)
        .unwrap_or_else(|e| panic!("Parse error: {}", e));

    for pair in pairs.flatten() {
        let span = pair.as_span();
        let (line, col) = span.start_pos().line_col();

        let kind = match pair.as_rule() {
            Rule::CONTINUE => TokenKind::Keyword(Keyword::CONTINUE),
            Rule::BREAK    => TokenKind::Keyword(Keyword::BREAK),
            Rule::RETURN   => TokenKind::Keyword(Keyword::RETURN),
            Rule::CONST    => TokenKind::Keyword(Keyword::CONST),
            Rule::INT      => TokenKind::Keyword(Keyword::INT),
            Rule::VOID     => TokenKind::Keyword(Keyword::VOID),
            Rule::IF       => TokenKind::Keyword(Keyword::IF),
            Rule::ELSE     => TokenKind::Keyword(Keyword::ELSE),
            Rule::WHILE    => TokenKind::Keyword(Keyword::WHILE),

            Rule::IDENT => TokenKind::Identifier(pair.as_str().to_string()),

            Rule::INTEGER_CONST => {
                let value = pair.as_str();
                let num = if value.starts_with("0x") || value.starts_with("0X") {
                    i64::from_str_radix(&value[2..], 16).unwrap()
                } else if value.starts_with("0") && value.len() > 1 {
                    i64::from_str_radix(&value[1..], 8).unwrap()
                } else {
                    value.parse().unwrap()
                };
                TokenKind::INTegerConst(num)
            }

            Rule::GE     => TokenKind::Operator(Operator::Ge),
            Rule::LE     => TokenKind::Operator(Operator::Le),
            Rule::EQ     => TokenKind::Operator(Operator::Eq),
            Rule::NEQ    => TokenKind::Operator(Operator::Neq),
            Rule::AND    => TokenKind::Operator(Operator::And),
            Rule::OR     => TokenKind::Operator(Operator::Or),
            Rule::GT     => TokenKind::Operator(Operator::Gt),
            Rule::LT     => TokenKind::Operator(Operator::Lt),
            Rule::ASSIGN => TokenKind::Operator(Operator::Assign),
            Rule::PLUS   => TokenKind::Operator(Operator::Plus),
            Rule::MINUS  => TokenKind::Operator(Operator::Minus),
            Rule::MUL    => TokenKind::Operator(Operator::Mul),
            Rule::DIV    => TokenKind::Operator(Operator::Div),
            Rule::MOD    => TokenKind::Operator(Operator::Mod),
            Rule::NOT    => TokenKind::Operator(Operator::Not),

            Rule::L_PAREN   => TokenKind::Punctuation(Punctuation::LParen),
            Rule::R_PAREN   => TokenKind::Punctuation(Punctuation::RParen),
            Rule::L_BRACE   => TokenKind::Punctuation(Punctuation::LBrace),
            Rule::R_BRACE   => TokenKind::Punctuation(Punctuation::RBrace),
            Rule::L_BRACKT  => TokenKind::Punctuation(Punctuation::LBrackt),
            Rule::R_BRACKT  => TokenKind::Punctuation(Punctuation::RBrackt),
            Rule::COMMA     => TokenKind::Punctuation(Punctuation::Comma),
            Rule::SEMICOLON => TokenKind::Punctuation(Punctuation::Semicolon),

            Rule::EOI => TokenKind::Eof,

            Rule::UNKNOWN => TokenKind::UNKNOWN(pair.as_str().to_string()),

            _ => continue,
        };

        tokens.push(Token { kind, line, col });
    }

    tokens
}

fn main() {
    let args: Vec<String> = env::args().collect();

    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    let filename = &args[1];
    let input = fs::read_to_string(filename).expect("Failed to read file");

    let tokens = tokenize(&input);

    let errors: Vec<&Token> = tokens.iter().filter(|t| matches!(t.kind, TokenKind::UNKNOWN(_))).collect();

    if !errors.is_empty() {
        for token in errors {
            if let TokenKind::UNKNOWN(string) = &token.kind {
                eprintln!("Error type A at Line {}: Mysterious character \"{}\".", token.line, string);
            }
        }
    } else {
        for token in tokens {
            match token.kind {
                TokenKind::Keyword(keyword) => eprintln!("{} {} at Line {}.", keyword.to_str_upgrade(), keyword.to_str(), token.line),
                TokenKind::Identifier(string) => eprintln!("IDENT {} at Line {}.", string, token.line),
                TokenKind::Punctuation(punctuation) => eprintln!("{} at Line {}.", punctuation.printout(), token.line),
                TokenKind::Operator(operator) => eprintln!("{} at Line {}.", operator.printout(), token.line),
                TokenKind::INTegerConst(num) => eprintln!("INTEGER_CONST {} at Line {}.", num, token.line),
                TokenKind::Eof => {} 
                _ => {}
            }
        }
    }
}
