use std::{env, fmt, fs, num::ParseIntError};

use pest::{Parser, iterators::Pair};
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct YParser;

#[derive(Debug, Clone, PartialEq)]
pub enum Token {
    Int,
    Return,
    Define,
    NewLine,
    Char,
    If,
    Else,
    While,
    Break,
    Continue,
    Const,
    Void,
    Identifier(String),
    IntegerConst(i64),
    Plus,
    Minus,
    Mult,
    Div,
    Mod,
    Not,
    Dot,
    Quest,
    And,
    Or,
    Eq,
    Neq,
    Gt,
    Lt,
    Ge,
    Le,
    BitXor,
    BitOr,
    BitAnd,
    Assign,
    Semicolon,
    Pound,
    LParen,
    RParen,
    LBrace,
    RBrace,
    LBracket,
    RBracket,
    SigQuot,
    DouQuot,
    BackSlash,
    Slash,
    Amp,
    At,
    UnderScore,
    Colon,
    Comma,
    Unknown,
}
impl fmt::Display for Token {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        match self {
            Token::Int => {
                write!(f, "INT")
            }
            Token::Return => {
                write!(f, "RETURN")
            }
            Token::Define => {
                write!(f, "DEFINE")
            }
            Token::NewLine => {
                write!(f, "NEWLINE")
            }
            Token::If => {
                write!(f, "IF")
            }
            Token::Else => {
                write!(f, "ELSE")
            }
            Token::While => {
                write!(f, "WHILE")
            }
            Token::Break => {
                write!(f, "BREAK")
            }
            Token::Continue => {
                write!(f, "CONTINUE")
            }
            Token::Char => {
                write!(f, "CHAR")
            }
            Token::Const => {
                write!(f, "CONST")
            }
            Token::Identifier(s) => {
                write!(f, "IDENT {}", s)
            }
            Token::IntegerConst(v) => {
                write!(f, "INTEGER_CONST {}", v)
            }
            Token::Plus => {
                write!(f, "PLUS")
            }
            Token::Minus => {
                write!(f, "MINUS")
            }
            Token::Mult => {
                write!(f, "MUL")
            }
            Token::Div => {
                write!(f, "DIV")
            }
            Token::Mod => {
                write!(f, "MOD")
            }
            Token::Assign => {
                write!(f, "ASSIGN")
            }
            Token::Not => {
                write!(f, "NOT")
            }
            Token::Dot => {
                write!(f, "DOT")
            }
            Token::Pound => {
                write!(f, "POUND")
            }
            Token::At => {
                write!(f, "AT")
            }
            Token::Amp => {
                write!(f, "AMP")
            }
            Token::Or => {
                write!(f, "OR")
            }
            Token::And => {
                write!(f, "AND")
            }
            Token::BitOr => {
                write!(f, "BIT_OR")
            }
            Token::BitXor => {
                write!(f, "BIT_XOR")
            }
            Token::BitAnd => {
                write!(f, "BIT_AND")
            }
            Token::Quest => {
                write!(f, "QUESTION")
            }
            Token::Eq => {
                write!(f, "EQ")
            }
            Token::Neq => {
                write!(f, "NEQ")
            }
            Token::Lt => {
                write!(f, "LT")
            }
            Token::Gt => {
                write!(f, "GT")
            }
            Token::Le => {
                write!(f, "LE")
            }
            Token::Ge => {
                write!(f, "GE")
            }
            Token::Semicolon => {
                write!(f, "SEMICOLON")
            }
            Token::LParen => {
                write!(f, "L_PAREN")
            }
            Token::RParen => {
                write!(f, "R_PAREN")
            }
            Token::LBrace => {
                write!(f, "L_BRACE")
            }
            Token::RBrace => {
                write!(f, "R_BRACE")
            }
            Token::LBracket => {
                write!(f, "L_BRACKT")
            }
            Token::RBracket => {
                write!(f, "R_BRACKT")
            }
            Token::Void => {
                write!(f, "VOID")
            }
            Token::SigQuot => {
                write!(f, "SINGle_QUOTATION")
            }
            Token::DouQuot => {
                write!(f, "DOUBLE_QUOTATION")
            }
            Token::BackSlash => {
                write!(f, "BACK_SLASH")
            }
            Token::Slash => {
                write!(f, "SLASH")
            }
            Token::Colon => {
                write!(f, "COLON")
            }
            Token::UnderScore => {
                write!(f, "UNDERSCORE")
            }
            Token::Comma => {
                write!(f, "COMMA")
            }
            Token::Unknown => {
                write!(f, "UNKNOWN")
            }
        }
    }
}

fn main() {
    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        return;
    }

    let filename = &args[1];
    let content = match fs::read_to_string(filename) {
        Ok(c) => c,
        Err(e) => {
            eprintln!("Error reading file: {}", e);
            return;
        }
    };

    match token_print(&content) {
        Ok(r) => eprintln!("{}", r),
        Err(e) => eprintln!("{}", e),
    }
}
fn token_print(content: &str) -> Result<String, String> {
    if content.is_empty() {
        return Ok("".to_string());
    }
    match tokenize(content) {
        Ok(tokens) => {
            if tokens.get(0).expect("get first token error").0 == Token::Unknown {
                let ret_str: Vec<String> = tokens
                    .iter()
                    .map(|(_, s, l)| {
                        format!(
                            "Error type A at Line {}: Mysterious character \"{}\".",
                            l, s
                        )
                    })
                    .collect();
                Err(format!("{}", ret_str.join("\n")))
            } else {
                let ret_str: Vec<String> = tokens
                    .iter()
                    .map(|(t, s, l)| match *t {
                        Token::IntegerConst(_) | Token::Identifier(_) => {
                            format!("{} at Line {}.", t, l)
                        }
                        _ => {
                            format!("{} {} at Line {}.", t, s, l)
                        }
                    })
                    .collect();
                Ok(ret_str.join("\n"))
            }
        }
        Err(e) => Err(format!("Panic! parse error \"{}\".", e)),
    }
}

pub fn tokenize(input: &str) -> Result<Vec<(Token, String, usize)>, pest::error::Error<Rule>> {
    let mut exp = YParser::parse(Rule::lexer, input)?;
    let pairs = exp.next().unwrap();
    let mut tokens = Vec::<(Token, String, usize)>::new();
    let mut errs = Vec::<(Token, String, usize)>::new();

    for pair in pairs.into_inner() {
        //expr 规则包含 TOKEN 和 WHITESPACE，但 WHITESPACE 被静默，所以 pairs 中只有 TOKEN
        if pair.as_rule() == Rule::TOKEN {
            // TOKEN 规则内部包含一个具体的子规则（如 int、return 等）
            let inner_pair = pair.into_inner().next().unwrap(); // 安全 unwrap，因为 TOKEN 总有子规则
            match inner_pair.as_rule() {
                Rule::CONST => {
                    tokens.push((
                        Token::Const,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::DEFINE => {
                    tokens.push((
                        Token::Define,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::CHAR => {
                    tokens.push((
                        Token::Char,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::COMMA => {
                    tokens.push((
                        Token::Comma,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::LT => {
                    tokens.push((
                        Token::Lt,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::GT => {
                    tokens.push((
                        Token::Gt,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::LE => {
                    tokens.push((
                        Token::Le,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::GE => {
                    tokens.push((
                        Token::Ge,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::EQ => {
                    tokens.push((
                        Token::Eq,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::NEQ => {
                    tokens.push((
                        Token::Neq,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::NOT => {
                    tokens.push((
                        Token::Not,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::BIT_XOR => {
                    tokens.push((
                        Token::BitXor,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::BIT_OR => {
                    tokens.push((
                        Token::BitOr,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::BIT_AND => {
                    tokens.push((
                        Token::BitAnd,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::VOID => {
                    tokens.push((
                        Token::Void,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::INT => {
                    tokens.push((
                        Token::Int,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::RETURN => {
                    tokens.push((
                        Token::Return,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::IF => {
                    tokens.push((
                        Token::If,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::ELSE => {
                    tokens.push((
                        Token::Else,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::WHILE => {
                    tokens.push((
                        Token::While,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::BREAK => {
                    tokens.push((
                        Token::Break,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::CONTINUE => {
                    tokens.push((
                        Token::Continue,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::IDENT => tokens.push((
                    Token::Identifier(inner_pair.as_str().to_string()),
                    inner_pair.as_str().to_string(),
                    inner_pair.line_col().0,
                )),
                Rule::INTEGER_CONST => match handle_integer(&inner_pair) {
                    Ok(v) => {
                        tokens.push((
                            Token::IntegerConst(v),
                            inner_pair.as_str().to_string(),
                            inner_pair.line_col().0,
                        ));
                    }
                    Err(e) => {
                        eprintln!(
                            "Parse integer const  Error at Line [{}]:[{}].",
                            inner_pair.line_col().0,
                            e
                        );
                        continue;
                    }
                },
                Rule::PLUS => {
                    tokens.push((
                        Token::Plus,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::MINUS => {
                    tokens.push((
                        Token::Minus,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::MULT => {
                    tokens.push((
                        Token::Mult,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::DIV => {
                    tokens.push((
                        Token::Div,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::MOD => {
                    tokens.push((
                        Token::Mod,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::AND => {
                    tokens.push((
                        Token::And,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::OR => {
                    tokens.push((
                        Token::Or,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::ASSIGN => {
                    tokens.push((
                        Token::Assign,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::SEMICOLON => {
                    tokens.push((
                        Token::Semicolon,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::L_PAREN => {
                    tokens.push((
                        Token::LParen,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::R_PAREN => {
                    tokens.push((
                        Token::RParen,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::L_BRACE => {
                    tokens.push((
                        Token::LBrace,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::R_BRACE => {
                    tokens.push((
                        Token::RBrace,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::L_BRACKT => {
                    tokens.push((
                        Token::LBracket,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::R_BRACKT => {
                    tokens.push((
                        Token::RBracket,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::SIG_QUOT => {
                    tokens.push((
                        Token::RBracket,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::DOU_QUOT => {
                    tokens.push((
                        Token::RBracket,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::POUND => {
                    tokens.push((
                        Token::Pound,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }

                Rule::AT => {
                    tokens.push((
                        Token::At,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }

                Rule::AMP => {
                    tokens.push((
                        Token::Amp,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::DOT => {
                    tokens.push((
                        Token::Dot,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::QUEST => {
                    tokens.push((
                        Token::Quest,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::BACK_SLASH => {
                    tokens.push((
                        Token::BackSlash,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::COLON => {
                    tokens.push((
                        Token::Colon,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                Rule::UNDERSCORE => {
                    tokens.push((
                        Token::UnderScore,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
                _ => {
                    errs.push((
                        Token::Unknown,
                        inner_pair.as_str().to_string(),
                        inner_pair.line_col().0,
                    ));
                }
            }
        }
    }
    if errs.is_empty() {
        return Ok(tokens);
    } else {
        return Ok(errs);
    }
}

#[cfg(test)]
mod test {

    use std::{
        fs::File,
        io::{BufRead, BufReader},
    };

    use super::*;

    #[test]
    // #[ignore]
    fn test_in_out() {
        let in_files = [
            "test_cases/arrays_and_radix.in",
            "test_cases/comments_and_hex.in",
            "test_cases/complex_errors_test.in",
            "test_cases/complex_expressions.in",
            "test_cases/comprehensive.in",
            "test_cases/edge_case_test.in",
            "test_cases/empty.in",
            "test_cases/error_invalid_char.in",
            "test_cases/invalid_character_error.in",
            "test_cases/keywords.in",
            "test_cases/leading_zeros_test.in",
            "test_cases/numbers.in",
            "test_cases/octal_edge_case.in",
            "test_cases/operators.in",
            "test_cases/sample1.in",
            "test_cases/sample2.in",
            "test_cases/sample3.in",
            "test_cases/simple.in",
            "test_cases/single_ampersand_test.in",
        ];

        for in_file in in_files {
            let out_file = format!("{}.out", in_file.strip_suffix(".in").unwrap());

            dbg!(out_file.as_str());
            let mut input: String = String::new();

            match token_print(
                fs::read_to_string(in_file)
                    .expect("read in_file error")
                    .as_str(),
            ) {
                Ok(r) => input = r,
                Err(e) => eprintln!("{}", e),
            };

            let input_lines = input.split("\n");
            let reader_out = BufReader::new(
                File::open(out_file).expect(format!("{} is not found", in_file).as_str()),
            )
            .lines();
            for (line_num, (in_line, out_line)) in input_lines.zip(reader_out).enumerate() {
                let content_in = in_line;
                let content_out = out_line.unwrap();
                if content_in != content_out {
                    eprintln!(
                        "{} Not match    #in :{}    #out :{}",
                        line_num, content_in, content_out
                    );
                }
            }
        }
    }
    #[test]
    #[ignore]
    fn test_1() {
        let test_src: &'static str = r"int main()
{
    // line comment
    /*
    block comment
    */
    int i = 0x1;
}
        ";

        println!("test 1 src: {}", test_src);

        match token_print(&test_src) {
            Ok(r) => println!("{}", r),
            Err(e) => eprintln!("{}", e),
        };
    }
    #[test]
    #[ignore]
    fn test_2() {
        let test_src = r"int main(){
    int i = 1;
    int j = ~i;
}";
        println!("src: {}", test_src);

        match token_print(&test_src) {
            Ok(r) => println!("{}", r),
            Err(e) => eprintln!("{}", e),
        }
    }

    #[test]
    #[ignore]
    fn test_3() {
        let test_src: &'static str = r#"int func(int arg) {
            int l;
            l = - - - arg;
            return l;
        }

        int main() {
            int x, y;
            x = 02;
            y = 0x1;
            x = x - 1 + y;
            if (+-!!!x) {
                x = - - -2;
            }
            else {
                x = 1 + + y;
            }
            func(x);
            return 0;
        }
        "#;

        println!("src: {}", test_src);
    }

    #[test]
    #[ignore]
    fn test_4() {
        // let test_src: &'static str = r#"123 0x1a 0XFF 077 0 0x0 00"#;

        let test_src: &'static str = "+ - * / % = == != < > <= >= ! && ||";
        // let test_src: &'static str = "% && ||";
        println!("src: {}", test_src);

        match token_print(&test_src) {
            Ok(r) => println!("{}", r),
            Err(e) => eprintln!("{}", e),
        };
    }

    #[test]
    #[ignore]
    fn test_mixed_identifiers_integers() {
        // 测试标识符和数字混合的情况
        // 注意：2i应该被识别为INTEGER_CONST(2)和IDENT(i)
        let test_mixed = r"
        int 2i = 08;
    ";
        println!("Testing mixed identifiers and integers:");

        match token_print(&test_mixed) {
            Ok(r) => println!("{}", r),
            Err(e) => eprintln!("{}", e),
        };
    }
}

fn handle_integer(pair: &Pair<Rule>) -> Result<i64, ParseIntError> {
    let num_str = pair.as_str().trim();
    // println!("handle int :{}$", num_str);
    let value = match num_str.get(0..2) {
        Some("0x") | Some("0X") => i64::from_str_radix(&num_str[2..], 16),
        Some(prefix) if prefix.starts_with('0') && num_str.len() > 1 => {
            i64::from_str_radix(&num_str[1..], 8)
        }
        _ => num_str.parse::<i64>(),
    };
    value
}
