//! Token definitions for NRC language

use logos::{Lexer as LogosLexer, Logos};
use std::fmt;

// Custom callback to skip single-line comments (handles Unicode)
fn skip_single_line_comment(lex: &mut LogosLexer<Token>) -> logos::Skip {
    let remainder = lex.remainder();
    if let Some(newline_pos) = remainder.find('\n') {
        // Skip to the newline, but don't skip the newline itself
        // The newline will be handled as whitespace
        lex.bump(newline_pos);
    } else {
        // No newline found, skip to end of file
        lex.bump(remainder.len());
    }
    logos::Skip
}

// Custom callback to skip multi-line comments (handles Unicode)
fn skip_multi_line_comment(lex: &mut LogosLexer<Token>) -> logos::Skip {
    let remainder = lex.remainder();
    if let Some(end_pos) = remainder.find("*/") {
        lex.bump(end_pos + 2); // +2 to skip the */
    } else {
        // If no closing */, consume the rest
        lex.bump(remainder.len());
    }
    logos::Skip
}

/// All possible tokens in NRC language
#[derive(Logos, Debug, Clone, PartialEq)]
pub enum Token {
    // Literals
    /// Boolean literal true
    #[token("true")]
    True,
    /// Boolean literal false
    #[token("false")]
    False,
    /// Integer literal
    #[regex(r"[0-9]+", |lex| lex.slice().parse::<i64>().unwrap_or(0))]
    Integer(i64),
    /// Floating point literal
    #[regex(r"[0-9]+\.[0-9]+", |lex| lex.slice().parse::<f64>().unwrap_or(0.0))]
    Float(f64),
    /// String literal
    #[regex(r#""([^"\\]|\\.)*""#, |lex| lex.slice()[1..lex.slice().len()-1].to_string())]
    String(String),
    /// Character literal
    #[regex(r"'([^'\\]|\\.)'", |lex| lex.slice().chars().nth(1).unwrap())]
    Char(char),

    // Identifiers (must come before keywords to avoid conflicts)
    /// Identifier token
    #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_string())]
    Identifier(String),

    // Keywords
    /// Function keyword
    #[token("func")]
    Fn,
    /// Let keyword
    #[token("let")]
    Let,
    /// Variable keyword
    #[token("var")]
    Var,
    /// Constant keyword
    #[token("const")]
    Const,
    /// Type keyword
    #[token("type")]
    Type,
    /// Struct keyword
    #[token("struct")]
    Struct,
    /// Enum keyword
    #[token("enum")]
    Enum,
    /// Trait keyword
    #[token("trait")]
    Trait,
    /// Import keyword
    #[token("import")]
    Import,
    /// Implementation keyword
    #[token("impl")]
    Impl,
    /// If keyword
    #[token("if")]
    If,
    /// Else keyword
    #[token("else")]
    Else,
    /// For keyword
    #[token("for")]
    For,
    /// While keyword
    #[token("while")]
    While,
    /// Where keyword
    #[token("where")]
    Where,
    /// Match keyword
    #[token("match")]
    Match,
    /// Go keyword
    #[token("go")]
    Go,
    /// Select keyword
    #[token("select")]
    Select,
    /// Try keyword
    #[token("try")]
    Try,
    /// Catch keyword
    #[token("catch")]
    Catch,
    /// Throw keyword
    #[token("throw")]
    Throw,
    /// Return keyword
    #[token("return")]
    Return,
    /// Break keyword
    #[token("break")]
    Break,
    /// Continue keyword
    #[token("continue")]
    Continue,
    /// Defer keyword
    #[token("defer")]
    Defer,
    /// Recover keyword
    #[token("recover")]
    Recover,
    /// Panic keyword
    #[token("panic")]
    Panic,
    /// New keyword for creating reference-counted objects
    #[token("new")]
    New,
    /// Constructor keyword
    #[token("init")]
    Init,
    /// Destructor keyword
    #[token("drop")]
    Drop,
    /// As keyword
    #[token("as")]
    As,
    /// Export keyword
    #[token("export")]
    Export,
    /// Is keyword
    #[token("is")]
    Is,
    /// Nil keyword
    #[token("nil")]
    Nil,
    /// Any keyword
    #[token("any")]
    Any,
    // Type keywords
    /// Integer type keyword (default int64)
    #[token("int")]
    Int,
    /// 8-bit signed integer type keyword
    #[token("int8")]
    Int8,
    /// 16-bit signed integer type keyword
    #[token("int16")]
    Int16,
    /// 32-bit signed integer type keyword
    #[token("int32")]
    Int32,
    /// 64-bit signed integer type keyword
    #[token("int64")]
    Int64,
    /// Unsigned integer type keyword (default uint64)
    #[token("uint")]
    Uint,
    /// 8-bit unsigned integer type keyword
    #[token("uint8")]
    Uint8,
    /// 16-bit unsigned integer type keyword
    #[token("uint16")]
    Uint16,
    /// 32-bit unsigned integer type keyword
    #[token("uint32")]
    Uint32,
    /// 64-bit unsigned integer type keyword
    #[token("uint64")]
    Uint64,
    /// 32-bit float type keyword
    #[token("float32")]
    Float32,
    /// 64-bit float type keyword
    #[token("float64")]
    Float64,
    /// Boolean type keyword
    #[token("bool")]
    Bool,
    /// Rune type keyword (Unicode code point)
    #[token("rune")]
    Rune,
    /// String type keyword
    #[token("String")]
    StringType,
    /// Vec type keyword
    #[token("vec")]
    Vec,
    /// Vec type keyword (full name)
    #[token("Vec")]
    VecFull,
    /// Map type keyword (full name)
    #[token("Map")]
    MapFull,
    /// Set type keyword
    #[token("set")]
    Set,
    /// Tuple type keyword
    #[token("tup")]
    Tup,
    /// Reference counted type keyword
    #[token("Rc")]
    Rc,
    /// Weak reference type keyword
    #[token("Weak")]
    Weak,

    // Operators
    /// Plus operator
    #[token("+")]
    Plus,
    /// Minus operator
    #[token("-")]
    Minus,
    /// Multiplication operator
    #[token("*")]
    Star,
    /// Asterisk (alias for Star)
    Asterisk,
    /// From keyword
    #[token("from")]
    From,
    /// Division operator
    #[token("/")]
    Slash,
    /// Modulo operator
    #[token("%")]
    Percent,
    /// Bitwise AND operator
    #[token("&")]
    Ampersand,
    /// Bitwise OR operator
    #[token("|")]
    Pipe,
    /// Bitwise XOR operator
    #[token("^")]
    Caret,
    /// Bitwise NOT operator
    #[token("~")]
    Tilde,
    /// Left shift operator
    #[token("<<")]
    LeftShift,
    /// Right shift operator
    #[token(">>")]
    RightShift,
    /// Logical AND operator
    #[token("&&")]
    LogicalAnd,
    /// Logical OR operator
    #[token("||")]
    LogicalOr,
    /// Logical NOT operator
    #[token("!")]
    Not,
    /// Equality operator
    #[token("==")]
    Equal,
    /// Inequality operator
    #[token("!=")]
    NotEqual,
    /// Less than operator
    #[token("<")]
    Less,
    /// Less than or equal operator
    #[token("<=")]
    LessEqual,
    /// Greater than operator
    #[token(">")]
    Greater,
    /// Greater than or equal operator
    #[token(">=")]
    GreaterEqual,

    // Assignment operators
    /// Assignment operator
    #[token("=")]
    Assign,
    /// Add and assign operator
    #[token("+=")]
    PlusAssign,
    /// Subtract and assign operator
    #[token("-=")]
    MinusAssign,
    /// Multiply and assign operator
    #[token("*=")]
    StarAssign,
    /// Divide and assign operator
    #[token("/=")]
    SlashAssign,
    /// Modulo and assign operator
    #[token("%=")]
    PercentAssign,
    /// Bitwise AND and assign operator
    #[token("&=")]
    AmpersandAssign,
    /// Bitwise OR and assign operator
    #[token("|=")]
    PipeAssign,
    /// Bitwise XOR and assign operator
    #[token("^=")]
    CaretAssign,
    /// Left shift and assign operator
    #[token("<<=")]
    LeftShiftAssign,
    /// Right shift and assign operator
    #[token(">>=")]
    RightShiftAssign,

    // Delimiters
    /// Left parenthesis
    #[token("(")]
    LeftParen,
    /// Right parenthesis
    #[token(")")]
    RightParen,
    /// Left bracket
    #[token("[")]
    LeftBracket,
    /// Right bracket
    #[token("]")]
    RightBracket,
    /// Left brace
    #[token("{")]
    LeftBrace,
    /// Right brace
    #[token("}")]
    RightBrace,
    /// Comma
    #[token(",")]
    Comma,
    /// Semicolon
    #[token(";")]
    Semicolon,
    /// Colon
    #[token(":")]
    Colon,
    /// Double colon (namespace separator)
    #[token("::")]
    ColonColon,
    /// Dot
    #[token(".")]
    Dot,
    /// Arrow
    #[token("->")]
    Arrow,
    /// Question mark
    #[token("?")]
    Question,
    /// Backtick (for Go-style field tags)
    #[token("`")]
    Backtick,

    // Special tokens
    /// Underscore
    #[token("_")]
    Underscore,

    // Additional tokens
    /// Default keyword
    #[token("default")]
    Default,
    /// In keyword
    #[token("in")]
    In,
    /// At symbol
    #[token("@")]
    At,
    /// Channel keyword
    #[token("chan")]
    Chan,
    /// Mutable keyword
    #[token("mut")]
    Mut,

    // Comments and whitespace (ignored)
    // Single-line comment: uses custom callback to handle Unicode characters
    #[regex(r"//", skip_single_line_comment)]
    // Multi-line comment: uses custom callback to handle Unicode characters
    #[regex(r"/\*", skip_multi_line_comment)]
    // Whitespace: spaces, tabs, newlines, carriage returns
    #[regex(r"[ \t\n\r]+", logos::skip)]
    /// Error token for invalid input
    Error,
}

/// Token with location information
#[derive(Debug, Clone, PartialEq)]
pub struct TokenWithLocation {
    /// The token itself
    pub token: Token,
    /// Line number (1-based)
    pub line: usize,
    /// Column number (1-based)
    pub column: usize,
    /// Character offset from the start of the file
    pub offset: usize,
}

impl TokenWithLocation {
    /// Create a new token with location
    pub fn new(token: Token, line: usize, column: usize, offset: usize) -> Self {
        Self {
            token,
            line,
            column,
            offset,
        }
    }
}

impl fmt::Display for Token {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        match self {
            Token::True => write!(f, "true"),
            Token::False => write!(f, "false"),
            Token::Integer(n) => write!(f, "{}", n),
            Token::Float(n) => write!(f, "{}", n),
            Token::String(s) => write!(f, "\"{}\"", s),
            Token::Char(c) => write!(f, "'{}'", c),
            Token::Identifier(s) => write!(f, "{}", s),
            Token::Plus => write!(f, "+"),
            Token::Minus => write!(f, "-"),
            Token::Star => write!(f, "*"),
            Token::Slash => write!(f, "/"),
            Token::Percent => write!(f, "%"),
            Token::Ampersand => write!(f, "&"),
            Token::Pipe => write!(f, "|"),
            Token::Caret => write!(f, "^"),
            Token::Tilde => write!(f, "~"),
            Token::LeftShift => write!(f, "<<"),
            Token::RightShift => write!(f, ">>"),
            Token::LogicalAnd => write!(f, "&&"),
            Token::LogicalOr => write!(f, "||"),
            Token::Not => write!(f, "!"),
            Token::Equal => write!(f, "=="),
            Token::NotEqual => write!(f, "!="),
            Token::Less => write!(f, "<"),
            Token::LessEqual => write!(f, "<="),
            Token::Greater => write!(f, ">"),
            Token::GreaterEqual => write!(f, ">="),
            Token::Assign => write!(f, "="),
            Token::LeftParen => write!(f, "("),
            Token::RightParen => write!(f, ")"),
            Token::LeftBracket => write!(f, "["),
            Token::RightBracket => write!(f, "]"),
            Token::LeftBrace => write!(f, "{{"),
            Token::RightBrace => write!(f, "}}"),
            Token::Comma => write!(f, ","),
            Token::Semicolon => write!(f, ";"),
            Token::Colon => write!(f, ":"),
            Token::ColonColon => write!(f, "::"),
            Token::Dot => write!(f, "."),
            Token::Arrow => write!(f, "->"),
            Token::Question => write!(f, "?"),
            Token::Underscore => write!(f, "_"),
            Token::Error => write!(f, "<error>"),
            _ => write!(f, "{:?}", self),
        }
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_token_parsing() {
        let mut lexer = Token::lexer("func main() { return 42; }");

        assert_eq!(lexer.next(), Some(Ok(Token::Fn)));
        assert_eq!(
            lexer.next(),
            Some(Ok(Token::Identifier("main".to_string())))
        );
        assert_eq!(lexer.next(), Some(Ok(Token::LeftParen)));
        assert_eq!(lexer.next(), Some(Ok(Token::RightParen)));
        assert_eq!(lexer.next(), Some(Ok(Token::LeftBrace)));
        assert_eq!(lexer.next(), Some(Ok(Token::Return)));
        assert_eq!(lexer.next(), Some(Ok(Token::Integer(42))));
        assert_eq!(lexer.next(), Some(Ok(Token::Semicolon)));
        assert_eq!(lexer.next(), Some(Ok(Token::RightBrace)));
        assert_eq!(lexer.next(), None);
    }

    #[test]
    fn test_string_literal() {
        let mut lexer = Token::lexer(r#""hello world""#);
        assert_eq!(
            lexer.next(),
            Some(Ok(Token::String("hello world".to_string())))
        );
    }

    #[test]
    fn test_float_literal() {
        let mut lexer = Token::lexer("3.14159");
        assert_eq!(lexer.next(), Some(Ok(Token::Float(3.14159))));
    }

    #[test]
    fn test_chinese_comment() {
        let lexer = Token::lexer("func // 中文注释\nmain");

        // Collect all tokens and print them for debugging
        let tokens: Vec<_> = lexer.collect();
        println!("Tokens: {:?}", tokens);

        // Should get: func, main
        assert_eq!(tokens.len(), 2);
        assert_eq!(tokens[0], Ok(Token::Fn));
        assert_eq!(tokens[1], Ok(Token::Identifier("main".to_string())));
    }

    #[test]
    fn test_chinese_comment_multiline() {
        let source = "// 中文注释\nfunc main() int32 {\n    return 0\n}";
        let lexer = Token::lexer(source);

        // Collect all tokens
        let tokens: Vec<_> = lexer.collect();
        println!("Source: {}", source);
        println!("Tokens: {:?}", tokens);

        // Should get: func, main, (, ), int32, {, return, 0, }
        assert_eq!(tokens.len(), 9);
        assert_eq!(tokens[0], Ok(Token::Fn));
        assert_eq!(tokens[1], Ok(Token::Identifier("main".to_string())));
        assert_eq!(tokens[2], Ok(Token::LeftParen));
        assert_eq!(tokens[3], Ok(Token::RightParen));
        assert_eq!(tokens[4], Ok(Token::Int32));
        assert_eq!(tokens[5], Ok(Token::LeftBrace));
        assert_eq!(tokens[6], Ok(Token::Return));
        assert_eq!(tokens[7], Ok(Token::Integer(0)));
        assert_eq!(tokens[8], Ok(Token::RightBrace));
    }
}
