use super::reader::{Reader, Span};

pub struct Lexer<'src> {
	reader: Reader<'src>,
	peek: Option<TokenItem<'src>>,
	stack: Vec<(Reader<'src>, Option<TokenItem<'src>>)>,
}
impl<'src> Lexer<'src> {
	pub fn new(source: &'src str) -> Self {
		Self {
			reader: Reader::new(source),
			peek: None,
			stack: Vec::new(),
		}
	}
	pub fn span(&self) -> Span {
		self.reader.span()
	}
	pub fn push(&mut self) {
		self.stack.push((self.reader.clone(), self.peek.clone()));
	}
	pub fn pop(&mut self, back: bool) {
		let (reader, peek) = self.stack.pop().unwrap();
		if back {
			self.reader = reader;
			self.peek = peek;
		}
	}
	pub fn rest(&mut self) -> Option<&'src str> {
		self.reader.rest()
	}
	pub fn peek(&mut self) -> LexerResult<'src> {
		match self.peek {
			Some(item) => Ok(item),
			None => {
				let next = self.next()?;
				self.peek = Some(next);
				Ok(next)
			}
		}
	}
	pub fn next(&mut self) -> LexerResult<'src> {
		if let Some(item) = self.peek.take() {
			return Ok(item);
		}
		loop {
			let token = self.next_()?;
			if let Token::Comment(..) = token.token {
				continue;
			}
			// println!("{:?}", token);
			return Ok(token);
		}
	}
	fn next_(&mut self) -> LexerResult<'src> {
		fn ident_begin(c: char) -> bool {
			c.is_alphabetic() || c == '_' || c == '$'
		}
		self.reader.eat_while(char::is_whitespace).ok();
		let r = &mut self.reader;
		match r.peek().ok_or(Error::End)? {
			'.' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('.') => {
						r.eat().map_err(Error::UnexpectChar)?;
						match r.peek() {
							Some('.') => {
								r.eat().map_err(Error::UnexpectChar)?;
								Ok(Token::DotDotDot.with(self.span()))
							}
							_ => Err(Error::UnexpectChar(self.span())),
						}
					}
					_ => Ok(Token::Dot.with(self.span())),
				}
			}
			'{' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Brace(true).with(self.span()))
			}
			'}' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Brace(false).with(self.span()))
			}
			'(' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Paren(true).with(self.span()))
			}
			')' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Paren(false).with(self.span()))
			}
			'[' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Bracket(true).with(self.span()))
			}
			']' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Bracket(false).with(self.span()))
			}
			';' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Semi.with(self.span()))
			}
			',' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Comma.with(self.span()))
			}
			'-' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some(c) if c.is_numeric() => self.number(),
					_ => Ok(Token::Minus.with(self.span())),
				}
			}
			'+' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Plus.with(self.span()))
			}
			'*' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Star.with(self.span()))
			}
			'%' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Percent.with(self.span()))
			}
			'!' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('=') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::BangEq.with(self.span()))
					}
					_ => Ok(Token::Bang.with(self.span())),
				}
			}
			'~' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Tilde.with(self.span()))
			}
			'^' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Caret.with(self.span()))
			}
			'/' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('/') => {
						r.eat().map_err(Error::UnexpectChar)?;
						self.comment_line()
					}
					Some('*') => {
						r.eat().map_err(Error::UnexpectChar)?;
						self.comment_block()
					}
					Some(c) if c.is_alphabetic() => {
						self.reader.eat_until_char('/').map_err(Error::UnexpectChar)?;
						self.reader.next();
						Err(Error::UnsupportChar(c, self.span()))
					}
					_ => Ok(Token::Slash.with(self.span())),
				}
			}
			'=' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('=') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::EqEq.with(self.span()))
					}
					Some('>') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::FatArrow.with(self.span()))
					}
					_ => Ok(Token::Assign.with(self.span())),
				}
			}
			'<' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('=') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::LessEq.with(self.span()))
					}
					// Some('<') => {
					// 	r.eat().map_err(Error::UnexpectChar)?;
					// 	Ok(Token::Shl.with(self.span()))
					// }
					_ => Ok(Token::Less.with(self.span())),
				}
			}
			'>' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('=') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::GreaterEq.with(self.span()))
					}
					// Some('>') => {
					// 	r.eat().map_err(Error::UnexpectChar)?;
					// 	Ok(Token::Shr.with(self.span()))
					// }
					_ => Ok(Token::Greater.with(self.span())),
				}
			}
			'&' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('&') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::AndAnd.with(self.span()))
					}
					_ => Ok(Token::And.with(self.span())),
				}
			}
			'|' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('|') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::OrOr.with(self.span()))
					}
					_ => Ok(Token::Or.with(self.span())),
				}
			}
			'?' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some('?') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::QuestionQuestion.with(self.span()))
					}
					Some('.') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::QuestionDot.with(self.span()))
					}
					_ => Ok(Token::Question.with(self.span())),
				}
			}
			':' => {
				r.eat().map_err(Error::UnexpectChar)?;
				match r.peek() {
					Some(':') => {
						r.eat().map_err(Error::UnexpectChar)?;
						Ok(Token::ColonColon.with(self.span()))
					}
					_ => Ok(Token::Colon.with(self.span())),
				}
			}
			'#' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Hash.with(self.span()))
			}
			'@' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::At.with(self.span()))
			}
			'\\' => {
				r.eat().map_err(Error::UnexpectChar)?;
				Ok(Token::Backslash.with(self.span()))
			}
			c if c == '`' => self.template(),
			c if c == '"' || c == '\'' => self.string(),
			c if c.is_digit(10) => self.number(),
			c if ident_begin(c) => self.ident(),
			c => Err(Error::UnsupportChar(c, self.span())),
		}
	}
	fn ident(&mut self) -> LexerResult<'src> {
		fn eat(r: &mut Reader<'_>) -> Result<(), Span> {
			fn ident_continue(c: char) -> bool {
				c.is_alphanumeric() || c == '_' || c == '$'
			}
			r.eat_while(ident_continue)
		}
		Ok(TokenItem {
			span: self.span(),
			token: Token::Ident(self.reader.cap(eat).map_err(Error::UnexpectChar)?),
		})
	}
	fn number(&mut self) -> LexerResult<'src> {
		fn eat(r: &mut Reader<'_>) -> Result<(), Span> {
			r.eat_while(|c| c.is_ascii_digit() || c.is_ascii_hexdigit() || c == 'o' || c == '.')
		}
		Ok(TokenItem {
			span: self.span(),
			token: Token::Number(self.reader.cap(eat).map_err(Error::UnexpectChar)?),
		})
	}
	fn string(&mut self) -> LexerResult<'src> {
		let style = self.reader.eat().map_err(Error::UnexpectChar)?;
		let eat = |r: &mut Reader<'_>| {
			loop {
				r.eat_until_any("\"\'\\")?;
				let c = r.peek().ok_or(r.span())?;
				match c {
					'\\' => {
						r.eat()?;
						match r.peek() {
							Some(v) if v == style => {
								r.eat()?;
							}
							_ => {}
						}
					}
					v if v == style => break,
					_ => {
						r.eat()?;
					}
				}
			}
			Ok(())
		};
		let ret = self.reader.cap(eat).map_err(Error::UnexpectChar)?;
		self.reader.eat().map_err(Error::UnexpectChar)?;
		Ok(TokenItem {
			span: self.span(),
			token: Token::String(ret),
		})
	}
	fn template(&mut self) -> LexerResult<'src> {
		let _style = self.reader.eat().map_err(Error::UnexpectChar)?;
		let eat = |r: &mut Reader<'_>| {
			loop {
				r.eat_until_any("`\\${")?;
				let c = r.peek().ok_or(r.span())?;
				match c {
					'\\' => {
						// 处理转义字符
						r.eat()?;
						match r.peek() {
							Some('`') | Some('\\') | Some('$') | Some('{') => {
								// 转义这些特殊字符
								r.eat()?;
							}
							Some(_) => {
								// 其他转义序列，如 \n, \t 等
								r.eat()?;
							}
							None => return Err(r.span()),
						}
					}
					'$' => {
						// 可能遇到插值表达式 ${...}
						r.eat()?;
						if let Some('{') = r.peek() {
							// 进入插值表达式处理
							r.eat()?; // 吃掉 '{'

							// 递归处理插值表达式中的内容
							let mut brace_count = 1;
							while brace_count > 0 {
								r.eat_until_any("{}`\\")?;
								let c = r.peek().ok_or(r.span())?;
								match c {
									'{' => {
										r.eat()?;
										brace_count += 1;
									}
									'}' => {
										r.eat()?;
										brace_count -= 1;
									}
									'\\' => {
										// 在插值表达式中也要处理转义
										r.eat()?;
										if r.peek().is_some() {
											r.eat()?;
										}
									}
									'`' => {
										// 模板字符串未正确闭合
										return Err(r.span());
									}
									_ => unreachable!(),
								}
							}
						}
						// 如果不是 ${，就继续作为普通字符处理
					}
					'`' => break, // 模板字符串结束
					_ => {
						r.eat()?;
					}
				}
			}
			Ok(())
		};

		let ret = self.reader.cap(eat).map_err(Error::UnexpectChar)?;
		self.reader.eat().map_err(Error::UnexpectChar)?; // 吃掉结束的反引号

		Ok(TokenItem {
			span: self.span(),
			token: Token::Template(ret),
		})
	}
	fn comment_line(&mut self) -> LexerResult<'src> {
		fn eat(r: &mut Reader<'_>) -> Result<(), Span> {
			r.eat_until_char('\n')?;
			r.eat_char('\n')
		}
		Ok(TokenItem {
			span: self.span(),
			token: Token::Comment(self.reader.cap(eat).map_err(Error::UnexpectChar)?),
		})
	}
	fn comment_block(&mut self) -> LexerResult<'src> {
		fn eat(r: &mut Reader<'_>) -> Result<(), Span> {
			loop {
				r.eat_until_char('*')?;
				r.eat_char('*')?;
				if r.peek() == Some('/') {
					r.eat_char('/')?;
					return Ok(());
				}
			}
		}
		Ok(TokenItem {
			span: self.span(),
			token: Token::Comment(self.reader.cap(eat).map_err(Error::UnexpectChar)?),
		})
	}
}
impl<'src> Lexer<'src> {
	pub fn expect(&mut self, token: Token<'static>) -> Result<(), Error<'src>> {
		match self.next()? {
			TokenItem { token: v, .. } if v == token => Ok(()),
			v => {
				Err(Error::Unexpect(v.span, v.token, token)) //
			}
		}
	}
	pub fn expect_ident_eq(&mut self, name: &'src str) -> Result<&'src str, Error<'src>> {
		match self.next()? {
			TokenItem {
				token: Token::Ident(v), ..
			} if v == name => Ok(v),
			v => Err(Error::Unexpect(v.span, v.token, Token::Ident(name))),
		}
	}
	pub fn expect_ident(&mut self) -> Result<&'src str, Error<'src>> {
		match self.next()? {
			TokenItem {
				token: Token::Ident(v), ..
			} => Ok(v),
			v => Err(Error::Unexpect(v.span, v.token, Token::Ident(""))),
		}
	}
	pub fn expect_string(&mut self) -> Result<&'src str, Error<'src>> {
		match self.next()? {
			TokenItem {
				token: Token::String(v), ..
			} => Ok(v),
			v => Err(Error::Unexpect(v.span, v.token, Token::String(""))),
		}
	}
	#[allow(unused)]
	pub fn expect_number(&mut self) -> Result<&'src str, Error<'src>> {
		match self.next()? {
			TokenItem {
				token: Token::Number(v), ..
			} => Ok(v),
			v => Err(Error::Unexpect(v.span, v.token, Token::Number(""))),
		}
	}
}
impl<'src> Lexer<'src> {
	pub fn peek_ident(&mut self) -> Option<&'src str> {
		match self.peek() {
			Ok(TokenItem {
				token: Token::Ident(v), ..
			}) => Some(v),
			_ => None,
		}
	}
	pub fn peek_string(&mut self) -> Option<&'src str> {
		match self.peek() {
			Ok(TokenItem {
				token: Token::String(v), ..
			}) => Some(v),
			_ => None,
		}
	}
	pub fn peek_number(&mut self) -> Option<&'src str> {
		match self.peek() {
			Ok(TokenItem {
				token: Token::Number(v), ..
			}) => Some(v),
			_ => None,
		}
	}
	#[allow(unused)]
	pub fn peek_value(&mut self) -> Option<&'src str> {
		match self.peek() {
			Ok(TokenItem {
				token: Token::String(v) | Token::Number(v),
				..
			}) => Some(v),
			_ => None,
		}
	}
}
impl<'src> Lexer<'src> {
	pub fn is(&mut self, token: Token<'static>) -> bool {
		match self.peek() {
			Ok(v) => v.token == token,
			_ => false,
		}
	}
	pub fn if_is_next(&mut self, token: Token<'static>) -> bool {
		if self.is(token) {
			self.next().ok();
			true
		} else {
			false
		}
	}
	pub fn if_is_ident_next(&mut self, keyword: &'src str) -> bool {
		if self.peek_ident() == Some(keyword) {
			self.next().ok();
			true
		} else {
			false
		}
	}
	#[allow(unused)]
	pub fn next_until(&mut self, token: Token<'static>) -> Result<(), Error<'src>> {
		loop {
			if self.is(token) {
				return Ok(());
			}
			match self.next()?.token {
				Token::Brace(true) => {
					self.next_until(Token::Brace(false))?;
					self.next()?;
				}
				Token::Bracket(true) => {
					self.next_until(Token::Bracket(false))?;
					self.next()?;
				}
				Token::Paren(true) => {
					self.next_until(Token::Paren(false))?;
					self.next()?;
				}
				Token::Less => {
					self.next_until(Token::Greater)?;
					self.next()?;
				}
				_ => {}
			}
		}
	}
}

#[derive(Debug, Clone, Copy)]
pub struct TokenItem<'src> {
	pub token: Token<'src>,
	pub span: Span,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Token<'src> {
	// 基础数据类型
	Comment(&'src str),
	Ident(&'src str),
	String(&'src str),
	Number(&'src str),
	Template(&'src str),

	// 括号类
	/// {}
	Brace(bool),
	/// []
	Bracket(bool),
	/// ()
	Paren(bool),

	// 箭头和分隔符
	/// =>
	FatArrow,
	/// ;
	Semi,
	/// ,
	Comma,
	/// .
	Dot,

	// 赋值和声明符
	/// =
	Assign,
	/// :
	Colon,
	/// ::
	ColonColon,

	// 比较操作符
	/// <
	Less,
	/// >
	Greater,
	/// <=
	LessEq,
	/// >=
	GreaterEq,

	// 相等性操作符
	/// ==
	EqEq,
	/// !=
	BangEq,

	// 算术操作符
	/// -
	Minus,
	/// +
	Plus,
	/// *
	Star,
	/// /
	Slash,
	/// %
	Percent,

	// 位操作符
	/// &
	And,
	/// |
	Or,
	/// ^
	Caret,
	/// ~
	Tilde,

	// 逻辑操作符
	/// &&
	AndAnd,
	/// ||
	OrOr,

	// 可选和空值操作符
	/// ?
	Question,
	/// ??
	QuestionQuestion,
	/// ?.
	QuestionDot,

	// 其他符号
	/// ...
	DotDotDot,
	/// !
	Bang,
	/// $
	Dollar,
	/// #
	Hash,
	/// @
	At,
	/// \
	Backslash,
}
#[macro_export]
macro_rules! T {
	// 括号类
	("{") => {
		$crate::lexer::Token::Brace(true)
	};
	("}") => {
		$crate::lexer::Token::Brace(false)
	};
	("[") => {
		$crate::lexer::Token::Bracket(true)
	};
	("]") => {
		$crate::lexer::Token::Bracket(false)
	};
	("(") => {
		$crate::lexer::Token::Paren(true)
	};
	(")") => {
		$crate::lexer::Token::Paren(false)
	};

	// 箭头和分隔符
	("=>") => {
		$crate::lexer::Token::FatArrow
	};
	(";") => {
		$crate::lexer::Token::Semi
	};
	(",") => {
		$crate::lexer::Token::Comma
	};
	(".") => {
		$crate::lexer::Token::Dot
	};

	// 赋值和声明符
	("=") => {
		$crate::lexer::Token::Assign
	};
	(":") => {
		$crate::lexer::Token::Colon
	};
	("::") => {
		$crate::lexer::Token::ColonColon
	};

	// 比较操作符
	("<") => {
		$crate::lexer::Token::Less
	};
	(">") => {
		$crate::lexer::Token::Greater
	};
	("<=") => {
		$crate::lexer::Token::LessEq
	};
	(">=") => {
		$crate::lexer::Token::GreaterEq
	};

	// 相等性操作符
	("==") => {
		$crate::lexer::Token::EqEq
	};
	("!=") => {
		$crate::lexer::Token::BangEq
	};

	// 算术操作符
	("-") => {
		$crate::lexer::Token::Minus
	};
	("+") => {
		$crate::lexer::Token::Plus
	};
	("*") => {
		$crate::lexer::Token::Star
	};
	("/") => {
		$crate::lexer::Token::Slash
	};
	("%") => {
		$crate::lexer::Token::Percent
	};

	// 位操作符
	("&") => {
		$crate::lexer::Token::And
	};
	("|") => {
		$crate::lexer::Token::Or
	};
	("^") => {
		$crate::lexer::Token::Caret
	};
	("~") => {
		$crate::lexer::Token::Tilde
	};
	("<<") => {
		$crate::lexer::Token::Shl
	};
	(">>") => {
		$crate::lexer::Token::Shr
	};

	// 逻辑操作符
	("&&") => {
		$crate::lexer::Token::AndAnd
	};
	("||") => {
		$crate::lexer::Token::OrOr
	};

	// 可选和空值操作符
	("?") => {
		$crate::lexer::Token::Question
	};
	("??") => {
		$crate::lexer::Token::QuestionQuestion
	};
	("?.") => {
		$crate::lexer::Token::QuestionDot
	};

	// 其他符号
	("...") => {
		$crate::lexer::Token::DotDotDot
	};
	("!") => {
		$crate::lexer::Token::Bang
	};
	("$") => {
		$crate::lexer::Token::Dollar
	};
	("#") => {
		$crate::lexer::Token::Hash
	};
	("@") => {
		$crate::lexer::Token::At
	};
	("\\") => {
		$crate::lexer::Token::Backslash
	};
}
impl<'src> Token<'src> {
	pub fn with(self, span: Span) -> TokenItem<'src> {
		TokenItem { token: self, span }
	}
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Error<'src> {
	End,
	UnexpectChar(Span),
	UnsupportChar(char, Span),
	Unexpect(Span, Token<'src>, Token<'src>),
	Unsupport(Span, Token<'src>, &'static str),
}
pub type LexerResult<'src> = Result<TokenItem<'src>, Error<'src>>;

#[cfg(test)]
mod tests {
	use super::*;

	#[test]
	fn test_basic_tokens() {
		let src = "{}()[];=:<>?,.|-+*&$%!~^/";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.next().unwrap().token, Token::Brace(true));
		assert_eq!(lexer.next().unwrap().token, Token::Brace(false));
		assert_eq!(lexer.next().unwrap().token, Token::Paren(true));
		assert_eq!(lexer.next().unwrap().token, Token::Paren(false));
		assert_eq!(lexer.next().unwrap().token, Token::Bracket(true));
		assert_eq!(lexer.next().unwrap().token, Token::Bracket(false));
		assert_eq!(lexer.next().unwrap().token, Token::Semi);
		assert_eq!(lexer.next().unwrap().token, Token::Assign);
		assert_eq!(lexer.next().unwrap().token, Token::Colon);
		assert_eq!(lexer.next().unwrap().token, Token::Less);
		assert_eq!(lexer.next().unwrap().token, Token::Greater);
		assert_eq!(lexer.next().unwrap().token, Token::Question);
		assert_eq!(lexer.next().unwrap().token, Token::Comma);
		assert_eq!(lexer.next().unwrap().token, Token::Dot);
		assert_eq!(lexer.next().unwrap().token, Token::Or);
		assert_eq!(lexer.next().unwrap().token, Token::Minus);
		assert_eq!(lexer.next().unwrap().token, Token::Plus);
		assert_eq!(lexer.next().unwrap().token, Token::Star);
		assert_eq!(lexer.next().unwrap().token, Token::And);
		assert_eq!(lexer.next().unwrap().token, Token::Dollar);
		assert_eq!(lexer.next().unwrap().token, Token::Percent);
		assert_eq!(lexer.next().unwrap().token, Token::Bang);
		assert_eq!(lexer.next().unwrap().token, Token::Tilde);
		assert_eq!(lexer.next().unwrap().token, Token::Caret);
		assert_eq!(lexer.next().unwrap().token, Token::Slash);

		assert!(lexer.next().is_err()); // End of input
	}

	#[test]
	fn test_composite_tokens() {
		let src = "=>...!=<=>=&&||???.::#@\\";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.next().unwrap().token, Token::FatArrow);
		assert_eq!(lexer.next().unwrap().token, Token::DotDotDot);
		assert_eq!(lexer.next().unwrap().token, Token::BangEq);
		assert_eq!(lexer.next().unwrap().token, Token::LessEq);
		assert_eq!(lexer.next().unwrap().token, Token::GreaterEq);
		assert_eq!(lexer.next().unwrap().token, Token::AndAnd);
		assert_eq!(lexer.next().unwrap().token, Token::OrOr);
		assert_eq!(lexer.next().unwrap().token, Token::QuestionQuestion);
		assert_eq!(lexer.next().unwrap().token, Token::QuestionDot);
		assert_eq!(lexer.next().unwrap().token, Token::ColonColon);
		assert_eq!(lexer.next().unwrap().token, Token::Hash);
		assert_eq!(lexer.next().unwrap().token, Token::At);
		assert_eq!(lexer.next().unwrap().token, Token::Backslash);

		assert!(lexer.next().is_err()); // End of input
	}

	#[test]
	fn test_identifiers() {
		let src = "hello _world test123 _123 ABC";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.next().unwrap().token, Token::Ident("hello"));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("_world"));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("test123"));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("_123"));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("ABC"));
	}

	#[test]
	fn test_numbers() {
		let src = "123 456 7890";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.next().unwrap().token, Token::Number("123"));
		assert_eq!(lexer.next().unwrap().token, Token::Number("456"));
		assert_eq!(lexer.next().unwrap().token, Token::Number("7890"));
	}

	#[test]
	fn test_strings() {
		let src = r#""hello" 'world' "escaped\"quote" 'escaped\'quote'"#;
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.next().unwrap().token, Token::String("hello"));
		assert_eq!(lexer.next().unwrap().token, Token::String("world"));
		assert_eq!(lexer.next().unwrap().token, Token::String(r#"escaped\"quote"#));
		assert_eq!(lexer.next().unwrap().token, Token::String(r#"escaped\'quote"#));
	}

	#[test]
	fn test_comment_line() {
		let src = "// comment\nident";
		let mut lexer = Lexer::new(src);

		// Comments are skipped by next()
		assert_eq!(lexer.next().unwrap().token, Token::Ident("ident"));
	}

	#[test]
	fn test_comment_block() {
		let src = "/* comment */ident";
		assert_eq!(Lexer::new(src).next().unwrap().token, Token::Ident("ident"));
		let src = "/** co*mme*nt **/ident";
		assert_eq!(Lexer::new(src).next().unwrap().token, Token::Ident("ident"));
	}

	#[test]
	fn test_whitespace_handling() {
		let src = "  hello  \t\n  world  ";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.next().unwrap().token, Token::Ident("hello"));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("world"));
	}

	#[test]
	fn test_peek_functionality() {
		let src = "hello world";
		let mut lexer = Lexer::new(src);

		// First peek should return first token
		assert_eq!(lexer.peek().unwrap().token, Token::Ident("hello"));
		// Second peek should return same token without consuming
		assert_eq!(lexer.peek().unwrap().token, Token::Ident("hello"));
		// Next should return the same token
		assert_eq!(lexer.next().unwrap().token, Token::Ident("hello"));
		// Now peek should return next token
		assert_eq!(lexer.peek().unwrap().token, Token::Ident("world"));
	}

	#[test]
	fn test_expect_methods() {
		let src = "hello \"world\" 123";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.expect_ident().unwrap(), "hello");
		assert_eq!(lexer.expect_string().unwrap(), "world");
		assert_eq!(lexer.expect_number().unwrap(), "123");
	}

	#[test]
	fn test_expect_methods_error() {
		let src = "hello";
		let mut lexer = Lexer::new(src);

		// This should work
		assert_eq!(lexer.expect_ident().unwrap(), "hello");

		// This should fail with Expect error
		let result = lexer.expect_ident();
		assert!(matches!(result, Err(Error::End)));
	}

	#[test]
	fn test_peek_helper_methods() {
		let src = "hello \"world\" 123";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.peek_ident().unwrap(), "hello");
		assert_eq!(lexer.peek_string(), None); // Next token is not string
		assert_eq!(lexer.peek_number(), None); // Next token is not number

		// Consume the ident
		lexer.next().unwrap();

		assert_eq!(lexer.peek_string().unwrap(), "world");
		assert_eq!(lexer.peek_ident(), None);
		assert_eq!(lexer.peek_number(), None);

		// Consume the string
		lexer.next().unwrap();

		assert_eq!(lexer.peek_number().unwrap(), "123");
		assert_eq!(lexer.peek_value().unwrap(), "123");
	}

	#[test]
	fn test_is_and_if_is_next() {
		let src = "hello world";
		let mut lexer = Lexer::new(src);

		assert!(lexer.is(Token::Ident("hello")));
		assert!(lexer.if_is_next(Token::Ident("hello")));
		assert_eq!(lexer.peek().unwrap().token, Token::Ident("world"));

		// Test false case
		assert!(!lexer.is(Token::String("")));
		assert!(!lexer.if_is_next(Token::String("")));
		assert_eq!(lexer.peek().unwrap().token, Token::Ident("world"));
	}

	#[test]
	fn test_next_until() {
		let src = "hello { nested { more } } world";
		let mut lexer = Lexer::new(src);

		// Skip until world
		lexer.next_until(Token::Ident("world")).unwrap();
		assert_eq!(lexer.next().unwrap().token, Token::Ident("world"));
	}

	#[test]
	fn test_error_cases() {
		// Unterminated block comment
		let src = "/* unclosed comment";
		let mut lexer = Lexer::new(src);
		assert!(matches!(lexer.next(), Err(Error::UnexpectChar(_))));

		// Unterminated string
		let src = "\"unclosed string";
		let mut lexer = Lexer::new(src);
		assert!(matches!(lexer.next(), Err(Error::UnexpectChar(_))));
	}

	#[test]
	fn test_complex_expression() {
		let src = "function(a, b) { return a + b; }";
		let mut lexer = Lexer::new(src);

		assert_eq!(lexer.next().unwrap().token, Token::Ident("function"));
		assert_eq!(lexer.next().unwrap().token, Token::Paren(true));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("a"));
		assert_eq!(lexer.next().unwrap().token, Token::Comma);
		assert_eq!(lexer.next().unwrap().token, Token::Ident("b"));
		assert_eq!(lexer.next().unwrap().token, Token::Paren(false));
		assert_eq!(lexer.next().unwrap().token, Token::Brace(true));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("return"));
		assert_eq!(lexer.next().unwrap().token, Token::Ident("a"));
		assert_eq!(lexer.next().unwrap().token, Token::Plus);
		assert_eq!(lexer.next().unwrap().token, Token::Ident("b"));
		assert_eq!(lexer.next().unwrap().token, Token::Semi);
		assert_eq!(lexer.next().unwrap().token, Token::Brace(false));
	}

	#[test]
	fn test_span_information() {
		let src = "hello world";
		let mut lexer = Lexer::new(src);

		let token1 = lexer.next().unwrap();
		assert_eq!(token1.token, Token::Ident("hello"));
		// Span should cover the entire "hello" token
		assert_eq!(token1.span.0, 1);
		assert_eq!(token1.span.1, 1);

		let token2 = lexer.next().unwrap();
		assert_eq!(token2.token, Token::Ident("world"));
		// Span should cover the entire "world" token (starting after space)
		assert_eq!(token2.span.0, 1);
		assert_eq!(token2.span.1, 7);
	}

	#[test]
	fn test_macro_tokens() {
		// Test that the T! macro works correctly
		assert_eq!(T!("..."), Token::DotDotDot);
		assert_eq!(T!("=>"), Token::FatArrow);
		assert_eq!(T!(";"), Token::Semi);
		assert_eq!(T!("="), Token::Assign);
		assert_eq!(T!(":"), Token::Colon);
		assert_eq!(T!("<"), Token::Less);
		assert_eq!(T!(">"), Token::Greater);
		assert_eq!(T!(","), Token::Comma);
		assert_eq!(T!("?"), Token::Question);
		assert_eq!(T!("."), Token::Dot);
		assert_eq!(T!("|"), Token::Or);
		assert_eq!(T!("-"), Token::Minus);
		assert_eq!(T!("+"), Token::Plus);
		assert_eq!(T!("*"), Token::Star);
		assert_eq!(T!("&"), Token::And);
		assert_eq!(T!("$"), Token::Dollar);
		assert_eq!(T!("%"), Token::Percent);
		assert_eq!(T!("{"), Token::Brace(true));
		assert_eq!(T!("}"), Token::Brace(false));
		assert_eq!(T!("("), Token::Paren(true));
		assert_eq!(T!(")"), Token::Paren(false));
		assert_eq!(T!("["), Token::Bracket(true));
		assert_eq!(T!("]"), Token::Bracket(false));
	}
}
