use crate::token::{lookup_identifier, Token, TokenKind};

pub struct Lexer {
  input: Vec<char>,
  position: usize,
  read_position: usize,
  ch: char,
}

impl Lexer {
  pub fn new(input: &str) -> Self {
    let mut lexer = Self {
      input: input.chars().collect(),
      position: 0,
      read_position: 0,
      ch: Default::default(),
    };

    lexer.read_char();

    lexer
  }

  pub fn next_token(&mut self) -> Token {
    self.skip_whitespace();

    let token = match self.ch {
      '=' => {
        if self.peek_char() == '=' {
          self.read_char();
          Self::make_token_string(TokenKind::Equal, "==".to_string())
        } else {
          Self::make_token(TokenKind::Assign, self.ch)
        }
      }
      ':' => Self::make_token(TokenKind::Colon, self.ch),
      ';' => Self::make_token(TokenKind::Semicolon, self.ch),
      ',' => Self::make_token(TokenKind::Comma, self.ch),
      '+' => Self::make_token(TokenKind::Plus, self.ch),
      '-' => Self::make_token(TokenKind::Minus, self.ch),
      '/' => Self::make_token(TokenKind::Slash, self.ch),
      '*' => Self::make_token(TokenKind::Asterisk, self.ch),
      '(' => Self::make_token(TokenKind::LeftParen, self.ch),
      ')' => Self::make_token(TokenKind::RightParen, self.ch),
      '{' => Self::make_token(TokenKind::LeftBrace, self.ch),
      '}' => Self::make_token(TokenKind::RightBrace, self.ch),
      '[' => Self::make_token(TokenKind::LeftBracket, self.ch),
      ']' => Self::make_token(TokenKind::RightBracket, self.ch),
      '!' => {
        if self.peek_char() == '=' {
          self.read_char();
          Self::make_token_string(TokenKind::BangEqual, "!=".to_string())
        } else {
          Self::make_token(TokenKind::Bang, self.ch)
        }
      }
      '>' => Self::make_token(TokenKind::Greater, self.ch),
      '<' => Self::make_token(TokenKind::Less, self.ch),
      '\0' => Self::make_token(TokenKind::Eof, ' '),
      '"' => Self::make_token_string(TokenKind::String, self.read_string()),
      ch => {
        return if Self::is_letter(ch) {
          let literal = self.read_identifier();
          let kind = lookup_identifier(literal.clone());
          Self::make_token_string(kind, literal)
        } else if Self::is_digit(ch) {
          let kind = TokenKind::Integer;
          let literal = self.read_number();
          Self::make_token_string(kind, literal)
        } else {
          Self::make_token(TokenKind::Illegal, ch)
        }
      }
    };

    self.read_char();
    token
  }

  fn read_char(&mut self) {
    if self.read_position >= self.input.len() {
      self.ch = '\0';
    } else {
      self.ch = self.input[self.read_position];
    }

    self.position = self.read_position;
    self.read_position += 1;
  }

  fn read_identifier(&mut self) -> String {
    let mut identifier = String::new();

    while Self::is_letter(self.ch) {
      identifier.push(self.ch);
      self.read_char();
    }

    identifier
  }

  fn read_number(&mut self) -> String {
    let mut num = String::from("");

    while Self::is_digit(self.ch) {
      num.push(self.ch);
      self.read_char();
    }

    num
  }

  fn read_string(&mut self) -> String {
    let position = self.position + 1;
    self.read_char();

    while self.ch != '"' && self.ch != '\0' {
      self.read_char();
    }

    let string_slice = &self.input[position..self.position];
    string_slice.into_iter().collect()
  }

  fn peek_char(&self) -> char {
    if self.read_position >= self.input.len() {
      '\0'
    } else {
      self.input[self.read_position]
    }
  }

  fn make_token(kind: TokenKind, ch: char) -> Token {
    let literal = ch.to_string();
    Self::make_token_string(kind, literal)
  }

  fn make_token_string(kind: TokenKind, literal: String) -> Token {
    Token::new(kind, literal)
  }

  fn skip_whitespace(&mut self) {
    while self.ch.is_ascii_whitespace() {
      self.read_char();
    }
  }

  fn is_letter(ch: char) -> bool {
    ch.is_alphabetic() || ch == '_'
  }

  fn is_digit(ch: char) -> bool {
    ch.is_numeric()
  }
}

#[cfg(test)]
mod test {
  use super::Lexer;
  use crate::token::{Token, TokenKind};

  fn test_assert(idx: usize, exp_token: Token, recv_token: Token) {
    assert_eq!(
      exp_token.kind, recv_token.kind,
      "expected_tokens[{idx}] - token type wrong. expected={}, got={}",
      exp_token.kind, recv_token.kind
    );

    assert_eq!(
      exp_token.literal, recv_token.literal,
      "expected_tokens[{idx}] - literal wrong. expected={}, got={}",
      exp_token.literal, recv_token.literal
    );
  }

  fn test_lexer(input: &str, expected_tokens: Vec<Token>) {
    let mut lexer = Lexer::new(input);

    for (idx, exp_token) in expected_tokens.into_iter().enumerate() {
      let recv_token = lexer.next_token();
      test_assert(idx, exp_token, recv_token);
    }
  }

  #[test]
  fn test_simple_next_token() {
    let input = "   =+(){},  ;  ";

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::Assign, "=".to_string()),
      Token::new(TokenKind::Plus, "+".to_string()),
      Token::new(TokenKind::LeftParen, "(".to_string()),
      Token::new(TokenKind::RightParen, ")".to_string()),
      Token::new(TokenKind::LeftBrace, "{".to_string()),
      Token::new(TokenKind::RightBrace, "}".to_string()),
      Token::new(TokenKind::Comma, ",".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_identifier_next_token() {
    let input = "ten;";

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::Identifier, "ten".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_number_next_token() {
    let input = "10;";

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::Integer, "10".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_string_next_token() {
    let input = r#"  
        "a simple string!";
        "foobar";
    "#;

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::String, "a simple string!".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::String, "foobar".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_arrar_next_token() {
    let input = "[1, 2];";

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::LeftBracket, "[".to_string()),
      Token::new(TokenKind::Integer, "1".to_string()),
      Token::new(TokenKind::Comma, ",".to_string()),
      Token::new(TokenKind::Integer, "2".to_string()),
      Token::new(TokenKind::RightBracket, "]".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_hashmap_next_token() {
    let input = r#"
        { "foo": "bar" }
    "#;

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::LeftBrace, "{".to_string()),
      Token::new(TokenKind::String, "foo".to_string()),
      Token::new(TokenKind::Colon, ":".to_string()),
      Token::new(TokenKind::String, "bar".to_string()),
      Token::new(TokenKind::RightBrace, "}".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_fnuntion_next_token() {
    let input = r#"
    let five = 5;
    let ten = 10;

    let sum = fn(x, y) {
      x + y;
    };

    let result = sum(five, ten);
    
    !-/*5;
    5 < 10 > 5;
    "#;

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::Let, "let".to_string()),
      Token::new(TokenKind::Identifier, "five".to_string()),
      Token::new(TokenKind::Assign, "=".to_string()),
      Token::new(TokenKind::Integer, "5".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Let, "let".to_string()),
      Token::new(TokenKind::Identifier, "ten".to_string()),
      Token::new(TokenKind::Assign, "=".to_string()),
      Token::new(TokenKind::Integer, "10".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Let, "let".to_string()),
      Token::new(TokenKind::Identifier, "sum".to_string()),
      Token::new(TokenKind::Assign, "=".to_string()),
      Token::new(TokenKind::Function, "fn".to_string()),
      Token::new(TokenKind::LeftParen, "(".to_string()),
      Token::new(TokenKind::Identifier, "x".to_string()),
      Token::new(TokenKind::Comma, ",".to_string()),
      Token::new(TokenKind::Identifier, "y".to_string()),
      Token::new(TokenKind::RightParen, ")".to_string()),
      Token::new(TokenKind::LeftBrace, "{".to_string()),
      Token::new(TokenKind::Identifier, "x".to_string()),
      Token::new(TokenKind::Plus, "+".to_string()),
      Token::new(TokenKind::Identifier, "y".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::RightBrace, "}".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Let, "let".to_string()),
      Token::new(TokenKind::Identifier, "result".to_string()),
      Token::new(TokenKind::Assign, "=".to_string()),
      Token::new(TokenKind::Identifier, "sum".to_string()),
      Token::new(TokenKind::LeftParen, "(".to_string()),
      Token::new(TokenKind::Identifier, "five".to_string()),
      Token::new(TokenKind::Comma, ",".to_string()),
      Token::new(TokenKind::Identifier, "ten".to_string()),
      Token::new(TokenKind::RightParen, ")".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Bang, "!".to_string()),
      Token::new(TokenKind::Minus, "-".to_string()),
      Token::new(TokenKind::Slash, "/".to_string()),
      Token::new(TokenKind::Asterisk, "*".to_string()),
      Token::new(TokenKind::Integer, "5".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Integer, "5".to_string()),
      Token::new(TokenKind::Less, "<".to_string()),
      Token::new(TokenKind::Integer, "10".to_string()),
      Token::new(TokenKind::Greater, ">".to_string()),
      Token::new(TokenKind::Integer, "5".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_if_else_next_token() {
    let input = r#"
    if (5 < 10) {
      return true;
    } else {
      return false;
    }
    "#;

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::If, "if".to_string()),
      Token::new(TokenKind::LeftParen, "(".to_string()),
      Token::new(TokenKind::Integer, "5".to_string()),
      Token::new(TokenKind::Less, "<".to_string()),
      Token::new(TokenKind::Integer, "10".to_string()),
      Token::new(TokenKind::RightParen, ")".to_string()),
      Token::new(TokenKind::LeftBrace, "{".to_string()),
      Token::new(TokenKind::Return, "return".to_string()),
      Token::new(TokenKind::True, "true".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::RightBrace, "}".to_string()),
      Token::new(TokenKind::Else, "else".to_string()),
      Token::new(TokenKind::LeftBrace, "{".to_string()),
      Token::new(TokenKind::Return, "return".to_string()),
      Token::new(TokenKind::False, "false".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::RightBrace, "}".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }

  #[test]
  fn test_double_char_next_token() {
    let input = r#"
    10 == 10;
    10 != 9;
    "#;

    let expected_tokens: Vec<Token> = vec![
      Token::new(TokenKind::Integer, "10".to_string()),
      Token::new(TokenKind::Equal, "==".to_string()),
      Token::new(TokenKind::Integer, "10".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Integer, "10".to_string()),
      Token::new(TokenKind::BangEqual, "!=".to_string()),
      Token::new(TokenKind::Integer, "9".to_string()),
      Token::new(TokenKind::Semicolon, ";".to_string()),
      Token::new(TokenKind::Eof, " ".to_string()),
    ];

    test_lexer(input, expected_tokens);
  }
}
