//! # My Crate Documentation
//!
//! This is a lexical analyzer built with the Pest parser.
//!
//! ## Example Usage
//!
//! To use this crate, you can write a program like this:
//!
//! ```rust
//! use compiler::lexer::tokenize;
//! use compiler::token::Token;
//! use compiler::util::init_logger;
//! use log::debug;
//!
//! fn main() {
//!     // Initialize the log system
//!     init_logger();
//!     
//!     let input = "123 abc"; // Example input
//!     debug!("Processing input: {}", input);
//!     
//!     let (tokens, errors) = tokenize(&input);
//!     debug!("Tokenization complete. Found {} tokens and {} errors", tokens.len(), errors.len());
//!     for (token, line, text) in tokens {
//!         println!("{} {} at Line {}.", token, text, line);
//!     }
//!     debug!("Compiler execution completed");
//! }
//! ```
//!
//! The `tokenize` function processes the input and generates tokens.
//! 
pub mod lexer;
pub mod token;
pub mod util;