// src/lexer.rs
use crate::error::{Error, Result};
use pest::Parser;
use pest::error::LineColLocation;
use pest::iterators::Pair;
use pest_derive::Parser;

// Import the pest parser
#[derive(Parser)]
#[grammar = "grammar.pest"]
pub struct SysYParser;

#[derive(Debug, Clone, PartialEq)]
pub struct Token {
    pub kind: TokenKind,
    pub lexeme: String,
    pub line: usize,
}

#[derive(Debug, Clone, PartialEq)]
pub enum TokenKind {
    Int,
    IDent,
    IntegerConst,
    Assign,
    Semicolon,
    LParen,
    RParen,
    LBrace,
    RBrace,
    Plus,
    Minus,
    Not,
    If,
    Else,
    Return,
    Comma,
    LBrackt,
    RBrackt,
}

// =========================
// Public API: Parse and Print
// =========================

pub fn lex_file(file_path: &str) -> Result<()> {
    // Check if file exists
    if !std::path::Path::new(file_path).exists() {
        return Err(Error::FileNotFound(file_path.to_string()));
    }

    // Read file
    let content = std::fs::read_to_string(file_path)?;

    // Try to parse tokens
    match parse_tokens(&content) {
        Ok(tokens) => {
            print_tokens(&tokens);
        }
        Err(e) => {
            // Print error in required format
            eprintln!("{}", e);
        }
    }

    Ok(())
}

pub fn parse_tokens(input: &str) -> Result<Vec<Token>> {
    let parse_result = SysYParser::parse(Rule::program, input);

    match parse_result {
        Ok(pairs) => {
            let mut tokens = Vec::new();
            for pair in pairs {
                collect_tokens_from_pair(pair, &mut tokens)?;
            }
            Ok(tokens)
        }
        Err(e) => {
            // ✅ Extract line number from LineColLocation enum
            let line_start = match &e.line_col {
                LineColLocation::Pos((line, _col)) => *line,
                LineColLocation::Span((line_start, _col_start), _) => *line_start,
            };

            // ✅ Extract byte position from InputLocation
            let pos = match &e.location {
                pest::error::InputLocation::Pos(p) => *p,
                pest::error::InputLocation::Span(span) => span.0,
            };

            // ✅ Get the character at that byte position
            let ch = input
                .chars()
                .nth(pos)
                .map(|c| c.to_string())
                .unwrap_or_else(|| "unknown".to_string());

            // ✅ Check if it's a visible, non-whitespace character
            let is_non_whitespace_char = ch != "unknown"
                && ch
                    .chars()
                    .next()
                    .map(|c| !c.is_whitespace())
                    .unwrap_or(false);

            if is_non_whitespace_char {
                return Err(Error::LexicalError {
                    message: format!("Mysterious character \"{}\"", ch),
                    line: line_start,
                });
            } else {
                return Err(Error::LexicalError {
                    message: "Invalid token".to_string(),
                    line: line_start,
                });
            }
        }
    }
}

pub fn print_tokens(tokens: &[Token]) {
    for token in tokens {
        let kind_str = match &token.kind {
            TokenKind::Int => "INT",
            TokenKind::IDent => "IDENT",
            TokenKind::IntegerConst => "INTEGER_CONST",
            TokenKind::Assign => "ASSIGN",
            TokenKind::Semicolon => "SEMICOLON",
            TokenKind::LParen => "L_PAREN",
            TokenKind::RParen => "R_PAREN",
            TokenKind::LBrace => "L_BRACE",
            TokenKind::RBrace => "R_BRACE",
            TokenKind::Plus => "PLUS",
            TokenKind::Minus => "MINUS",
            TokenKind::Not => "NOT",
            TokenKind::If => "IF",
            TokenKind::Else => "ELSE",
            TokenKind::Return => "RETURN",
            TokenKind::Comma => "COMMA",
            TokenKind::RBrackt => "RBRACKT",
            TokenKind::LBrackt => "LBRACKT",
        };
        eprintln!("{} {} at Line {}.", kind_str, token.lexeme, token.line);
    }
}

// =========================
// Internal: Token Collection
// =========================

fn collect_tokens_from_pair(pair: Pair<Rule>, tokens: &mut Vec<Token>) -> Result<()> {
    // eprintln!("DEBUG: Entering rule {:?}", pair.as_rule()); // 🔍 Debug line
    match pair.as_rule() {
        Rule::INT => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Int,
                lexeme: "int".to_string(),
                line,
            });
        }
        Rule::IF => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::If,
                lexeme: "if".to_string(),
                line,
            });
        }
        Rule::ELSE => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Else,
                lexeme: "else".to_string(),
                line,
            });
        }
        Rule::RETURN => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Return,
                lexeme: "return".to_string(),
                line,
            });
        }
        Rule::IDENT => {
            let line = pair.as_span().start_pos().line_col().0;
            let ident = pair.as_str().to_string();
            tokens.push(Token {
                kind: TokenKind::IDent,
                lexeme: ident,
                line,
            });
        }
        Rule::INTEGER_CONST => {
            let line = pair.as_span().start_pos().line_col().0;
            let text = pair.as_str();

            let value = if text.starts_with("0x") || text.starts_with("0X") {
                u64::from_str_radix(&text[2..], 16)
                    .map_err(|_| Error::ParseError("Invalid hex constant".to_string()))?
            } else if text.starts_with('0') && text.len() > 1 {
                u64::from_str_radix(&text[1..], 8)
                    .map_err(|_| Error::ParseError("Invalid octal constant".to_string()))?
            } else {
                text.parse::<u64>()
                    .map_err(|_| Error::ParseError("Invalid integer constant".to_string()))?
            };

            tokens.push(Token {
                kind: TokenKind::IntegerConst,
                lexeme: value.to_string(),
                line,
            });
        }
        Rule::ASSIGN => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Assign,
                lexeme: "=".to_string(),
                line,
            });
        }
        Rule::SEMICOLON => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Semicolon,
                lexeme: ";".to_string(),
                line,
            });
        }
        Rule::L_PAREN => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::LParen,
                lexeme: "(".to_string(),
                line,
            });
        }
        Rule::R_PAREN => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::RParen,
                lexeme: ")".to_string(),
                line,
            });
        }
        Rule::L_BRACE => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::LBrace,
                lexeme: "{".to_string(),
                line,
            });
        }
        Rule::R_BRACE => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::RBrace,
                lexeme: "}".to_string(),
                line,
            });
        }
        Rule::PLUS => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Plus,
                lexeme: "+".to_string(),
                line,
            });
        }
        Rule::MINUS => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Minus,
                lexeme: "-".to_string(),
                line,
            });
        }
        Rule::NOT => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Not,
                lexeme: "!".to_string(),
                line,
            });
        }
        Rule::COMMA => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::Comma,
                lexeme: ",".to_string(),
                line,
            });
        }
        Rule::L_BRACKT => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::LBrackt,
                lexeme: "[".to_string(),
                line,
            });
        }

        Rule::R_BRACKT => {
            let line = pair.as_span().start_pos().line_col().0;
            tokens.push(Token {
                kind: TokenKind::RBrackt,
                lexeme: "]".to_string(),
                line,
            });
        }

        // Recurse into composite rules
        Rule::program
        | Rule::function
        | Rule::type_specifier
        | Rule::compound_stmt
        | Rule::declaration
        | Rule::expression_stmt
        | Rule::expression
        | Rule::assignment_expr
        | Rule::logical_or_expr
        | Rule::logical_and_expr
        | Rule::equality_expr
        | Rule::relational_expr
        | Rule::additive_expr
        | Rule::multiplicative_expr
        | Rule::unary_expr
        | Rule::primary_expr
        | Rule::statement
        | Rule::if_stmt
        | Rule::return_stmt
        | Rule::function_call
        | Rule::arguments
        | Rule::initializer
        | Rule::else_clause
        | Rule::parameter => {
            for inner in pair.into_inner() {
                collect_tokens_from_pair(inner, tokens)?;
            }
        }

        _ => {} // Ignore comments, whitespace
    }
    Ok(())
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_parse_example_01() {
        let input = r#"int main()
{
   // line comment
   /*
     block comment
   */
   int i = 0x1;
}"#;

        let tokens = parse_tokens(input).expect("Parsing failed");

        use TokenKind::*;

        let expected = vec![
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 1,
            },
            Token {
                kind: IDent,
                lexeme: "main".to_string(),
                line: 1,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 1,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 1,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 2,
            },
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 7,
            },
            Token {
                kind: IDent,
                lexeme: "i".to_string(),
                line: 7,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 7,
            },
            Token {
                kind: IntegerConst,
                lexeme: "1".to_string(),
                line: 7,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 7,
            },
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 8,
            },
        ];

        assert_eq!(
            tokens, expected,
            "\n\nExpected:\n{:#?}\n\nGot:\n{:#?}\n",
            expected, tokens
        );
    }

    #[test]
    fn test_parse_example_02() {
        let input = r#"int main(){
    int i = 1;
    int j = ~i;
    }"#;

        let result = parse_tokens(input);
        assert!(result.is_err());

        assert_eq!(
            result.unwrap_err(),
            Error::LexicalError {
                message: "Mysterious character \"~\"".to_string(),
                line: 3,
            }
        );
    }

    #[test]
    fn test_parse_example_03() {
        let input = r#"int func(int arg) {
    int l;
    l = - - - arg;
    return l;
}

int main() {
    int x, y;
    x = 02;
    y = 0x1;
    x = x - 1 + y;
    if (+-!!!x) {
        x = - - -2;
    }
    else {
        x = 1 + + y;
    }
    func(x);
    return 0;
}"#;

        let tokens = parse_tokens(input).expect("Parsing failed");

        use TokenKind::*;

        let expected = vec![
            // func declaration
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 1,
            },
            Token {
                kind: IDent,
                lexeme: "func".to_string(),
                line: 1,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 1,
            },
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 1,
            },
            Token {
                kind: IDent,
                lexeme: "arg".to_string(),
                line: 1,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 1,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 1,
            },
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 2,
            },
            Token {
                kind: IDent,
                lexeme: "l".to_string(),
                line: 2,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 2,
            },
            Token {
                kind: IDent,
                lexeme: "l".to_string(),
                line: 3,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 3,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 3,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 3,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 3,
            },
            Token {
                kind: IDent,
                lexeme: "arg".to_string(),
                line: 3,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 3,
            },
            Token {
                kind: Return,
                lexeme: "return".to_string(),
                line: 4,
            },
            Token {
                kind: IDent,
                lexeme: "l".to_string(),
                line: 4,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 4,
            },
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 5,
            },
            // main declaration
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 7,
            },
            Token {
                kind: IDent,
                lexeme: "main".to_string(),
                line: 7,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 7,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 7,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 7,
            },
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 8,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "y".to_string(),
                line: 8,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 9,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 9,
            },
            Token {
                kind: IntegerConst,
                lexeme: "2".to_string(),
                line: 9,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 9,
            },
            Token {
                kind: IDent,
                lexeme: "y".to_string(),
                line: 10,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 10,
            },
            Token {
                kind: IntegerConst,
                lexeme: "1".to_string(),
                line: 10,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 10,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 11,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 11,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 11,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 11,
            },
            Token {
                kind: IntegerConst,
                lexeme: "1".to_string(),
                line: 11,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 11,
            },
            Token {
                kind: IDent,
                lexeme: "y".to_string(),
                line: 11,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 11,
            },
            Token {
                kind: If,
                lexeme: "if".to_string(),
                line: 12,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 12,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 12,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 12,
            },
            Token {
                kind: Not,
                lexeme: "!".to_string(),
                line: 12,
            },
            Token {
                kind: Not,
                lexeme: "!".to_string(),
                line: 12,
            },
            Token {
                kind: Not,
                lexeme: "!".to_string(),
                line: 12,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 12,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 12,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 12,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 13,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 13,
            },
            Token {
                kind: Minus,
                lexeme: "-".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "2".to_string(),
                line: 13,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 13,
            },
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 14,
            },
            Token {
                kind: Else,
                lexeme: "else".to_string(),
                line: 15,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 16,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 16,
            },
            Token {
                kind: IntegerConst,
                lexeme: "1".to_string(),
                line: 16,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 16,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 16,
            },
            Token {
                kind: IDent,
                lexeme: "y".to_string(),
                line: 16,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 16,
            },
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 17,
            },
            Token {
                kind: IDent,
                lexeme: "func".to_string(),
                line: 18,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 18,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 18,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 18,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 18,
            },
            Token {
                kind: Return,
                lexeme: "return".to_string(),
                line: 19,
            },
            Token {
                kind: IntegerConst,
                lexeme: "0".to_string(),
                line: 19,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 19,
            },
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 20,
            },
        ];

        // ✅ Better: assert one token at a time
        assert_eq!(
            tokens.len(),
            expected.len(),
            "Token count mismatch: expected {}, got {}",
            expected.len(),
            tokens.len()
        );

        for (i, (got, expected_token)) in tokens.iter().zip(expected.iter()).enumerate() {
            if got != expected_token {
                panic!(
                    "Token mismatch at index {}:\n\
                 Expected: {:?}\n\
                 Got:      {:?}\n\
                 Line:     {}",
                    i, expected_token, got, got.line
                );
            }
        }

        // Optional: full assert for IDE diff view
        // assert_eq!(tokens, expected);
    }

    #[test]
    fn test_parse_example_04() {
        let input = r#"int array()
{
    int arr[10] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};

    int a1 = 0, a2 = 3, a3 = 5, a4 = 7, a5 = 9, a6 = 1, a7 = 2, a8 = 4,
        a9 = 6;

    return arr[a1] + arr[a2] + arr[a3] + arr[a4] + arr[a7] + arr[a8];
}

int main()
{
    int q = 1, r = 2, s = 04, t = 0x7, u = 0xA, v = 0xb, w = 0xcD, x = 077;

    int sum1 = q + r + s + t + u + v + w + x;

    int sum2 = array();

    int sum3 = sum1 + sum2;

    return 0;
}"#;

        let tokens = parse_tokens(input).expect("Parsing failed");

        use TokenKind::*;

        let expected = vec![
            // array()
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 1,
            },
            Token {
                kind: IDent,
                lexeme: "array".to_string(),
                line: 1,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 1,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 1,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 2,
            },
            // int arr[10] = {0,1,...9};
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 3,
            },
            Token {
                kind: IDent,
                lexeme: "arr".to_string(),
                line: 3,
            },
            Token {
                kind: LBrackt,
                lexeme: "[".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "10".to_string(),
                line: 3,
            },
            Token {
                kind: RBrackt,
                lexeme: "]".to_string(),
                line: 3,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 3,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "0".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "1".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "2".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "3".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "4".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "5".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "6".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "7".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "8".to_string(),
                line: 3,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 3,
            },
            Token {
                kind: IntegerConst,
                lexeme: "9".to_string(),
                line: 3,
            },
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 3,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 3,
            },
            // int a1=0, a2=3, ..., a9=6;
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a1".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "0".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a2".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "3".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a3".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "5".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a4".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "7".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a5".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "9".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a6".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "1".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a7".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "2".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a8".to_string(),
                line: 5,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 5,
            },
            Token {
                kind: IntegerConst,
                lexeme: "4".to_string(),
                line: 5,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 5,
            },
            Token {
                kind: IDent,
                lexeme: "a9".to_string(),
                line: 6,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 6,
            },
            Token {
                kind: IntegerConst,
                lexeme: "6".to_string(),
                line: 6,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 6,
            },
            // return ...
            Token {
                kind: Return,
                lexeme: "return".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "arr".to_string(),
                line: 8,
            },
            Token {
                kind: LBrackt,
                lexeme: "[".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "a1".to_string(),
                line: 8,
            },
            Token {
                kind: RBrackt,
                lexeme: "]".to_string(),
                line: 8,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "arr".to_string(),
                line: 8,
            },
            Token {
                kind: LBrackt,
                lexeme: "[".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "a2".to_string(),
                line: 8,
            },
            Token {
                kind: RBrackt,
                lexeme: "]".to_string(),
                line: 8,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "arr".to_string(),
                line: 8,
            },
            Token {
                kind: LBrackt,
                lexeme: "[".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "a3".to_string(),
                line: 8,
            },
            Token {
                kind: RBrackt,
                lexeme: "]".to_string(),
                line: 8,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "arr".to_string(),
                line: 8,
            },
            Token {
                kind: LBrackt,
                lexeme: "[".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "a4".to_string(),
                line: 8,
            },
            Token {
                kind: RBrackt,
                lexeme: "]".to_string(),
                line: 8,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "arr".to_string(),
                line: 8,
            },
            Token {
                kind: LBrackt,
                lexeme: "[".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "a7".to_string(),
                line: 8,
            },
            Token {
                kind: RBrackt,
                lexeme: "]".to_string(),
                line: 8,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "arr".to_string(),
                line: 8,
            },
            Token {
                kind: LBrackt,
                lexeme: "[".to_string(),
                line: 8,
            },
            Token {
                kind: IDent,
                lexeme: "a8".to_string(),
                line: 8,
            },
            Token {
                kind: RBrackt,
                lexeme: "]".to_string(),
                line: 8,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 8,
            },
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 9,
            },
            // main()
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 11,
            },
            Token {
                kind: IDent,
                lexeme: "main".to_string(),
                line: 11,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 11,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 11,
            },
            Token {
                kind: LBrace,
                lexeme: "{".to_string(),
                line: 12,
            },
            // int q=1, r=2, ..., x=077;
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "q".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "1".to_string(),
                line: 13,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "r".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "2".to_string(),
                line: 13,
            },
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "s".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "4".to_string(),
                line: 13,
            }, // 04 → 4
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "t".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "7".to_string(),
                line: 13,
            }, // 0x7 → 7
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "u".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "10".to_string(),
                line: 13,
            }, // 0xA → 10
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "v".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "11".to_string(),
                line: 13,
            }, // 0xb → 11
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "w".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "205".to_string(),
                line: 13,
            }, // 0xcD → 205
            Token {
                kind: Comma,
                lexeme: ",".to_string(),
                line: 13,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 13,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 13,
            },
            Token {
                kind: IntegerConst,
                lexeme: "63".to_string(),
                line: 13,
            }, // 077 → 63
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 13,
            },
            // int sum1 = ...
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "sum1".to_string(),
                line: 15,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "q".to_string(),
                line: 15,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "r".to_string(),
                line: 15,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "s".to_string(),
                line: 15,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "t".to_string(),
                line: 15,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "u".to_string(),
                line: 15,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "v".to_string(),
                line: 15,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "w".to_string(),
                line: 15,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 15,
            },
            Token {
                kind: IDent,
                lexeme: "x".to_string(),
                line: 15,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 15,
            },
            // int sum2 = array();
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 17,
            },
            Token {
                kind: IDent,
                lexeme: "sum2".to_string(),
                line: 17,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 17,
            },
            Token {
                kind: IDent,
                lexeme: "array".to_string(),
                line: 17,
            },
            Token {
                kind: LParen,
                lexeme: "(".to_string(),
                line: 17,
            },
            Token {
                kind: RParen,
                lexeme: ")".to_string(),
                line: 17,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 17,
            },
            // int sum3 = sum1 + sum2;
            Token {
                kind: Int,
                lexeme: "int".to_string(),
                line: 19,
            },
            Token {
                kind: IDent,
                lexeme: "sum3".to_string(),
                line: 19,
            },
            Token {
                kind: Assign,
                lexeme: "=".to_string(),
                line: 19,
            },
            Token {
                kind: IDent,
                lexeme: "sum1".to_string(),
                line: 19,
            },
            Token {
                kind: Plus,
                lexeme: "+".to_string(),
                line: 19,
            },
            Token {
                kind: IDent,
                lexeme: "sum2".to_string(),
                line: 19,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 19,
            },
            // return 0;
            Token {
                kind: Return,
                lexeme: "return".to_string(),
                line: 21,
            },
            Token {
                kind: IntegerConst,
                lexeme: "0".to_string(),
                line: 21,
            },
            Token {
                kind: Semicolon,
                lexeme: ";".to_string(),
                line: 21,
            },
            // }
            Token {
                kind: RBrace,
                lexeme: "}".to_string(),
                line: 22,
            },
        ];

        // ✅ Improved assertion
        assert_eq!(
            tokens.len(),
            expected.len(),
            "Token count mismatch: expected {}, got {}",
            expected.len(),
            tokens.len()
        );

        for (i, (got, expected_token)) in tokens.iter().zip(expected.iter()).enumerate() {
            if got != expected_token {
                panic!(
                    "Token mismatch at index {}:\n\
                 Expected: {:?}\n\
                 Got:      {:?}\n\
                 Line:     {}",
                    i, expected_token, got, got.line
                );
            }
        }
    }
}
