use crate::token::{Token, Span, TokenKind};
use std::iter::Peekable;
use std::str::Chars;

pub struct Lexer<'a> {
    code: &'a str,
    pos: usize,
    chars: Peekable<Chars<'a>>,
}

impl<'a> Iterator for Lexer<'a> {
    type Item = Result<Token, String>;

    fn next(&mut self) -> Option<Self::Item> {
        Some(self.next_token())
    }
}


impl<'a> Lexer<'a> {
    pub fn new(code: &'a str) -> Self {
        Lexer {
            code,
            pos: 0,
            chars: code.chars().peekable(),
        }
    }

    fn advance(&mut self) {
        self.pos += 1;
        self.chars.next();
    }

    fn lex_number(&mut self) -> Result<Token, String> {
        let mut num = String::new();
        let mut is_float = false;
        let start = self.pos;
        while let Some(c) = self.chars.peek() {
            match c {
                '0'..='9' => num.push(* c),
                '.' => {
                    is_float = true;
                    num.push(* c);
                },
                _ => {
                    break;
                }
            }
            self.advance();
        }
        if is_float {
            return Ok(Token::float(num, Span{start, end: self.pos}));
        } else {
            return Ok(Token::int(num, Span{start, end: self.pos}));
        }
    }

    fn lex_string(&mut self) -> Result<Token, String> {
        let mut str = String::new();
        let start = self.pos;
        self.advance(); // skip the first "
        while let Some(&c) = self.chars.peek() {
            if c == '"' {
                self.advance(); // skip the last "
                return Ok(Token::str(str, Span{start, end: self.pos}));
            }
            str.push(c);
            self.advance();
        }
        return Err("Unterminated string".to_string());
    }

    fn lex_bool(&mut self) -> Result<Token, String> {
        let mut bool = String::new();
        let start = self.pos;
        while let Some(c) = self.chars.peek() {
            if c.is_alphabetic() {
                bool.push(*c);
                self.advance();
            } else {
                break;
            }
        }
        if bool == "true" {
            return Ok(Token::bool(bool, Span{start, end: self.pos}));
        } else if bool == "false" {
            return Ok(Token::bool(bool, Span{start, end: self.pos}));
        } else {
            return Err("Invalid bool".to_string());
        }
    }

    fn lex_single_char(&mut self, kind: TokenKind) -> Result<Token, String> {
        let start = self.pos;
        self.advance();
        match kind {
            TokenKind::Add => Ok(Token::add(start, self.pos)),
            TokenKind::Sub => Ok(Token::sub(start, self.pos)),
            TokenKind::Mul => Ok(Token::mul(start, self.pos)),
            TokenKind::Div => Ok(Token::div(start, self.pos)),
            _ => Err("Invalid character".to_string()),
        }
    }


    pub fn next_token(&mut self) -> Result<Token, String> {
        if self.pos >= self.code.len() {
            return Ok(Token::eof(self.pos));
        }

        if let Some(c) = self.chars.peek() {
            return match c {
                '+' => self.lex_single_char(TokenKind::Add),
                '-' => self.lex_single_char(TokenKind::Sub),
                '*' => self.lex_single_char(TokenKind::Mul),
                '/' => self.lex_single_char(TokenKind::Div),
                '"' => self.lex_string(),
                '0'..='9' => self.lex_number(),
                't' | 'f' => self.lex_bool(),
                '\r' => {
                    self.advance();
                    if self.chars.peek() == Some(&'\n') {
                        self.advance();
                    }
                    Ok(Token::lf(self.pos))
                },
                '\n' => {
                    self.advance();
                    Ok(Token::lf(self.pos))
                },
                '\0' => {
                    Ok(Token::eof(self.pos))
                },
                _ => {
                    println!("Invalid character: {}", c);
                    self.chars.next();
                    Err("Invalid character".to_string())
                }
            }
        }
        return Ok(Token::eof(self.pos));
    }
}


// Write Unit Test
#[cfg(test)]
mod tests {
    use super::*;

    fn case(code: &str, expected: Token) {
        let mut lexer = Lexer::new(code);
        match lexer.next_token() {
            Ok(token) => assert_eq!(token.kind, expected.kind),
            Err(e) => assert!(false, "{}", e),
        }
    }

    #[test]
    fn test_next_token() {
        case("123", Token::int("123".to_string(), Span{start: 0, end: 3}));
        case("12.3", Token::float("12.3".to_string(), Span{start: 0, end: 4}));
        case("\"hello\"", Token::str("\"hello\"".to_string(), Span{start: 0, end: 7}));
        case("\"你好\"", Token::str("\"你好\"".to_string(), Span{start: 0, end: 7}));
        case("true", Token::bool("true".to_string(), Span{start: 0, end: 4}));
        case("false", Token::bool("false".to_string(), Span{start: 0, end: 5}));
    }
}
