use std::{iter::Peekable, str::Chars};

use crate::calc::token::Token;

#[derive(Debug)]
pub struct Tokenizer<'a> {
    expression: Peekable<Chars<'a>>,
    reach_end: bool,
    unexpected_char: Option<char>,
}

impl<'a> Tokenizer<'a> {
    pub fn new(expression: &'a str) -> Self {
        Tokenizer {
            expression: expression.chars().peekable(),
            reach_end: false,
            unexpected_char: None,
        }
    }

    pub fn get_unexpected_char(&self) -> Option<char> {
        self.unexpected_char
    }
}

impl<'a> Iterator for Tokenizer<'a> {
    type Item = Token;

    fn next(&mut self) -> Option<Self::Item> {
        if self.reach_end {
            return None;
        }

        let next_char = self.expression.next();

        match next_char {
            Some(chr) if chr.is_numeric() => {
                let mut number_str = String::from(chr);

                // while let Some(next_char) = self.expression.peek() {
                //     if next_char.is_number() {
                //         number_str.push(self.expression.next().unwrap());
                //     } else if *next_char == '.' {
                //         number_str.push(self.expression.next().unwrap());
                //     } else {
                //         break;
                //     }
                // }

                while let Some(next_char) = self.expression.next_if(|c| c.is_numeric() || *c == '.')
                {
                    number_str.push(next_char);
                }

                Some(Token::Number(number_str.parse().unwrap()))
            }
            Some(chr) if chr.is_whitespace() => {
                while let Some(_) = self.expression.next_if(|c| c.is_whitespace()) {
                    // Consume whitespace characters
                }
                self.next() // Recur to get the next token
            }
            Some('+') => Some(Token::Add),
            Some('-') => Some(Token::Subtract),
            Some('*') => Some(Token::Multiply),
            Some('/') => Some(Token::Divide),
            Some('^') => Some(Token::Caret),
            Some('(') => Some(Token::LeftParen),
            Some(')') => Some(Token::RightParen),
            None => {
                self.reach_end = true;
                Some(Token::EOF)
            }
            Some(chr) => {
                self.unexpected_char = Some(chr);
                None
            }
        }
    }
}

#[cfg(test)]
mod tests {
    use rust_decimal::dec;

    use super::*;

    #[test]
    fn test_tokenizer() {
        let expression = "3 + 5 * (2 - 8) ^ 2";
        let tokenizer = Tokenizer::new(expression);
        let tokens: Vec<Token> = tokenizer.collect();
        let expected_tokens = vec![
            Token::Number(dec!(3)),
            Token::Add,
            Token::Number(dec!(5)),
            Token::Multiply,
            Token::LeftParen,
            Token::Number(dec!(2)),
            Token::Subtract,
            Token::Number(dec!(8)),
            Token::RightParen,
            Token::Caret,
            Token::Number(dec!(2)),
            Token::EOF,
        ];

        assert_eq!(tokens, expected_tokens);
    }

    #[test]
    fn test_unexpected_char() {
        let expression = "3 + 5 * (2 - 8) ^ 2 😃";
        let mut tokenizer = Tokenizer::new(expression);
        let _tokens: Vec<Token> = tokenizer.by_ref().collect();
        assert_eq!(tokenizer.get_unexpected_char(), Some('😃'));
    }

    #[test]
    fn test_unexpected_char_2() {
        let expression = "3 + 5 * (2 - 8) ^ 2 😢 3";
        let mut tokenizer = Tokenizer::new(expression);
        let _tokens: Vec<Token> = tokenizer.by_ref().collect();
        assert_eq!(tokenizer.get_unexpected_char(), Some('😢'));
    }
}
