#!/usr/bin/env python3
# Generate clean Rust files for Cangjie parser

import os

def write_clean_file(filepath, content):
    """Write content to file with proper encoding"""
    with open(filepath, 'w', encoding='utf-8', newline='\n') as f:
        f.write(content)

# Create token.rs
token_content = '''use std::fmt;

#[derive(Debug, Clone, PartialEq)]
pub enum TokenType {
    // Literals
    Identifier,
    IntegerLiteral,
    FloatLiteral,
    StringLiteral,
    BoolLiteral,
    
    // Keywords
    Package,
    Import,
    Class,
    Interface,
    Struct,
    Enum,
    Func,
    Let,
    Var,
    Const,
    Type,
    Init,
    This,
    Super,
    If,
    Else,
    Try,
    Catch,
    Finally,
    For,
    While,
    Do,
    Return,
    Break,
    Continue,
    Match,
    Case,
    Where,
    Extend,
    With,
    Prop,
    Static,
    Public,
    Private,
    Internal,
    Protected,
    Override,
    Redef,
    Abstract,
    Sealed,
    Open,
    Foreign,
    Inout,
    Mut,
    Unsafe,
    Operator,
    Spawn,
    Synchronized,
    Main,
    
    // Types
    Int8,
    Int16,
    Int32,
    Int64,
    IntNative,
    UInt8,
    UInt16,
    UInt32,
    UInt64,
    UIntNative,
    Float16,
    Float32,
    Float64,
    Rune,
    Boolean,
    Nothing,
    Unit,
    ThisType,
    VArray,
    
    // Operators
    Dot,
    Comma,
    LParen,
    RParen,
    LSquare,
    RSquare,
    LCurl,
    RCurl,
    Exp,
    Mul,
    Mod,
    Div,
    Add,
    Sub,
    Pipeline,
    Composition,
    Inc,
    Dec,
    And,
    Or,
    Not,
    BitAnd,
    BitOr,
    BitXor,
    BitNot,
    LShift,
    RShift,
    Colon,
    Semi,
    Assign,
    AddAssign,
    SubAssign,
    MulAssign,
    ExpAssign,
    DivAssign,
    ModAssign,
    AndAssign,
    OrAssign,
    BitAndAssign,
    BitOrAssign,
    BitXorAssign,
    LShiftAssign,
    RShiftAssign,
    Arrow,
    BackArrow,
    DoubleArrow,
    RangeOp,
    ClosedRangeOp,
    Ellipsis,
    Hash,
    At,
    Quest,
    Lt,
    Gt,
    Le,
    Ge,
    Is,
    As,
    NotEq,
    Equal,
    UpperBound,
    Wildcard,
    
    // Special
    Newline,
    Comment,
    EndOfFile,
    Illegal,
}

#[derive(Debug, Clone)]
pub struct Token {
    pub token_type: TokenType,
    pub value: String,
    pub line: usize,
    pub column: usize,
}

impl Token {
    pub fn new(token_type: TokenType, value: String, line: usize, column: usize) -> Self {
        Self {
            token_type,
            value,
            line,
            column,
        }
    }
}

impl fmt::Display for Token {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(f, "{}:{} - {:?} = \"{}\"", self.line, self.column, self.token_type, self.value)
    }
}
'''

write_clean_file('src/token.rs', token_content)
print("Created src/token.rs")

# Create lib.rs
lib_content = '''pub mod token;

#[cfg(test)]
mod tests {
    #[test]
    fn it_works() {
        assert_eq!(2 + 2, 4);
    }
}
'''

write_clean_file('src/lib.rs', lib_content)
print("Created src/lib.rs")

print("\nSuccessfully created clean Rust files!")