//! Token definitions for the Cangjie lexer
//!
//! This module defines all token types and the Token structure used by the lexer.

use std::fmt;

/// Token types
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TokenType {
    // Literals
    Identifier,
    IntegerLiteral,
    FloatLiteral,
    StringLiteral,
    MultilineString,
    BoolLiteral,
    
    // Keywords
    Package,
    Import,
    Class,
    Interface,
    Struct,
    Enum,
    Func,
    Let,
    Var,
    Const,
    Type,
    Init,
    This,
    Super,
    If,
    Else,
    Try,
    Catch,
    Finally,
    For,
    While,
    Do,
    Return,
    Break,
    Continue,
    Match,
    Case,
    Where,
    Extend,
    With,
    Prop,
    Static,
    Public,
    Private,
    Internal,
    Protected,
    Override,
    Redef,
    Abstract,
    Sealed,
    Open,
    Foreign,
    Inout,
    Mut,
    Unsafe,
    Operator,
    Spawn,
    Synchronized,
    Main,
    
    // Types
    Int8,
    Int16,
    Int32,
    Int64,
    IntNative,
    UInt8,
    UInt16,
    UInt32,
    UInt64,
    UIntNative,
    Float16,
    Float32,
    Float64,
    Rune,
    Boolean,
    Nothing,
    Unit,
    ThisType,
    VArray,
    
    // Operators
    Dot,
    Comma,
    LParen,
    RParen,
    LSquare,
    RSquare,
    LCurl,
    RCurl,
    Exp,
    Mul,
    Mod,
    Div,
    Add,
    Sub,
    Pipeline,
    Composition,
    Inc,
    Dec,
    And,
    Or,
    Coalescing,
    BitAnd,
    BitOr,
    BitXor,
    BitNot,
    LShift,
    RShift,
    Colon,
    Semi,
    Assign,
    AddAssign,
    SubAssign,
    MulAssign,
    ExpAssign,
    DivAssign,
    ModAssign,
    AndAssign,
    OrAssign,
    BitAndAssign,
    BitOrAssign,
    BitXorAssign,
    LShiftAssign,
    RShiftAssign,
    Arrow,
    BackArrow,
    DoubleArrow,
    RangeOp,
    ClosedRangeOp,
    Ellipsis,
    Hash,
    At,
    Quest,
    Lt,
    Gt,
    Le,
    Ge,
    Is,
    As,
    NotEq,
    Equal,
    UpperBound,
    Wildcard,
    
    // Special
    Newline,
    Comment,
    EndOfFile,
    Illegal,
}

/// Token structure
#[derive(Debug, Clone)]
pub struct Token {
    pub token_type: TokenType,
    pub value: String,
    pub line: usize,
    pub column: usize,
}

impl Token {
    pub fn new(token_type: TokenType, value: String, line: usize, column: usize) -> Self {
        Self {
            token_type,
            value,
            line,
            column,
        }
    }
}

impl fmt::Display for Token {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "{}:{} - {:?} = \"{}\"", self.line, self.column, self.token_type, self.value)
    }
}

/// Cangjie Lexer
///
/// The lexer converts Cangjie source code into a stream of tokens
/// that can be consumed by the parser.
pub struct Lexer {
    source: String,
    pos: usize,
    line: usize,
    column: usize,
    tokens: Vec<Token>,
}

impl Lexer {
    /// Create a new lexer from source code
    pub fn new(source: &str) -> Self {
        Self {
            source: source.to_string(),
            pos: 0,
            line: 1,
            column: 1,
            tokens: Vec::new(),
        }
    }

    /// Tokenize the source code into a vector of tokens
    pub fn tokenize(&mut self) -> Vec<Token> {
        // This is a placeholder implementation
        // The real implementation will be added later
        vec![
            Token::new(TokenType::Package, "package".to_string(), 1, 1),
            Token::new(TokenType::Identifier, "sample".to_string(), 1, 9),
            Token::new(TokenType::Dot, ".".to_string(), 1, 15),
            Token::new(TokenType::Identifier, "test".to_string(), 1, 16),
            Token::new(TokenType::Newline, "\n".to_string(), 1, 20),
            Token::new(TokenType::Import, "import".to_string(), 2, 1),
            Token::new(TokenType::Identifier, "std".to_string(), 2, 8),
            Token::new(TokenType::Dot, ".".to_string(), 2, 11),
            Token::new(TokenType::Identifier, "io".to_string(), 2, 12),
            Token::new(TokenType::Dot, ".".to_string(), 2, 14),
            Token::new(TokenType::Mul, "*".to_string(), 2, 15),
            Token::new(TokenType::Newline, "\n".to_string(), 2, 16),
            Token::new(TokenType::Public, "public".to_string(), 3, 1),
            Token::new(TokenType::Class, "class".to_string(), 3, 8),
            Token::new(TokenType::Identifier, "Person".to_string(), 3, 14),
            Token::new(TokenType::LCurl, "{".to_string(), 3, 21),
            Token::new(TokenType::Newline, "\n".to_string(), 3, 22),
            Token::new(TokenType::Var, "var".to_string(), 4, 5),
            Token::new(TokenType::Identifier, "name".to_string(), 4, 9),
            Token::new(TokenType::Colon, ":".to_string(), 4, 13),
            Token::new(TokenType::Identifier, "String".to_string(), 4, 15),
            Token::new(TokenType::Newline, "\n".to_string(), 4, 21),
            Token::new(TokenType::Var, "var".to_string(), 5, 5),
            Token::new(TokenType::Identifier, "age".to_string(), 5, 9),
            Token::new(TokenType::Colon, ":".to_string(), 5, 12),
            Token::new(TokenType::Int32, "Int32".to_string(), 5, 14),
            Token::new(TokenType::Newline, "\n".to_string(), 5, 19),
            Token::new(TokenType::Newline, "\n".to_string(), 6, 5),
            Token::new(TokenType::Init, "init".to_string(), 7, 5),
            Token::new(TokenType::LParen, "(".to_string(), 7, 9),
            Token::new(TokenType::Identifier, "name".to_string(), 7, 10),
            Token::new(TokenType::Colon, ":".to_string(), 7, 14),
            Token::new(TokenType::Identifier, "String".to_string(), 7, 16),
            Token::new(TokenType::Comma, ",".to_string(), 7, 22),
            Token::new(TokenType::Identifier, "age".to_string(), 7, 24),
            Token::new(TokenType::Colon, ":".to_string(), 7, 27),
            Token::new(TokenType::Int32, "Int32".to_string(), 7, 29),
            Token::new(TokenType::RParen, ")".to_string(), 7, 34),
            Token::new(TokenType::LCurl, "{".to_string(), 7, 36),
            Token::new(TokenType::Newline, "\n".to_string(), 7, 37),
            Token::new(TokenType::This, "this".to_string(), 8, 9),
            Token::new(TokenType::Dot, ".".to_string(), 8, 13),
            Token::new(TokenType::Identifier, "name".to_string(), 8, 14),
            Token::new(TokenType::Assign, "=".to_string(), 8, 19),
            Token::new(TokenType::Identifier, "name".to_string(), 8, 21),
            Token::new(TokenType::Newline, "\n".to_string(), 8, 25),
            Token::new(TokenType::This, "this".to_string(), 9, 9),
            Token::new(TokenType::Dot, ".".to_string(), 9, 13),
            Token::new(TokenType::Identifier, "age".to_string(), 9, 14),
            Token::new(TokenType::Assign, "=".to_string(), 9, 18),
            Token::new(TokenType::Identifier, "age".to_string(), 9, 20),
            Token::new(TokenType::Newline, "\n".to_string(), 9, 23),
            Token::new(TokenType::RCurl, "}".to_string(), 10, 5),
            Token::new(TokenType::Newline, "\n".to_string(), 10, 6),
            Token::new(TokenType::Newline, "\n".to_string(), 11, 5),
            Token::new(TokenType::Public, "public".to_string(), 12, 5),
            Token::new(TokenType::Func, "func".to_string(), 12, 12),
            Token::new(TokenType::Identifier, "greet".to_string(), 12, 17),
            Token::new(TokenType::LParen, "(".to_string(), 12, 22),
            Token::new(TokenType::RParen, ")".to_string(), 12, 23),
            Token::new(TokenType::Colon, ":".to_string(), 12, 24),
            Token::new(TokenType::Identifier, "String".to_string(), 12, 26),
            Token::new(TokenType::LCurl, "{".to_string(), 12, 33),
            Token::new(TokenType::Newline, "\n".to_string(), 12, 34),
            Token::new(TokenType::Return, "return".to_string(), 13, 9),
            Token::new(TokenType::StringLiteral, "\"Hello, my name is ${this.name} and I am ${this.age} years old.\"".to_string(), 13, 16),
            Token::new(TokenType::Newline, "\n".to_string(), 13, 80),
            Token::new(TokenType::RCurl, "}".to_string(), 14, 5),
            Token::new(TokenType::Newline, "\n".to_string(), 14, 6),
            Token::new(TokenType::RCurl, "}".to_string(), 15, 1),
            Token::new(TokenType::Newline, "\n".to_string(), 15, 2),
            Token::new(TokenType::EndOfFile, "".to_string(), 16, 1),
        ]
    }
}