use crate::consts::*;
use regex::Regex;
use std::error::Error;
use std::fmt::{self, Debug};

#[derive(Debug, Clone, PartialEq)]
pub enum Token<'a> {
    Integer(i64),
    Float(f64),
    String(&'a str),
    Operator(&'static str),
    Keyword(&'static str),
    Symbol(&'a str),
    LParen,
    RParen,
    EOF,
}

impl<'a> fmt::Display for Token<'a> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        use Token::*;
        f.write_str(
            (match self {
                Integer(n) => format!("{}", n),
                Float(n) => format!("{}", n),
                Operator(s) => format!("{}", s),
                String(n) => format!("{}", n),
                Keyword(s) => format!("{}", s),
                Symbol(n) => format!("{}", n),
                LParen => "(".to_string(),
                RParen => ")".to_string(),
                EOF => "".to_string(),
            })
            .as_str(),
        )
    }
}

#[derive(Debug)]
pub struct TokenError {
    err: String,
}

impl Error for TokenError {}

impl fmt::Display for TokenError {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        write!(f, "Tokenize error: {}", self.err)
    }
}

pub fn tokenize(mut input: &str) -> Result<Vec<Token>, Box<dyn Error>> {
    let mut tokens = Vec::new();

    loop {
        let (token, rest) = parse_token(input)?;
        if token == Token::EOF {
            return Ok(tokens);
        }
        tokens.push(token);
        input = rest;
    }
}

fn parse_token(mut input: &str) -> Result<(Token, &str), Box<dyn Error>> {
    input = input.trim_start();

    if input.is_empty() {
        return Ok((Token::EOF, ""));
    }

    match parse_lparen_token(input).or_else(|| {
        parse_rparen_token(input).or_else(|| {
            parse_keyword_token(input).or_else(|| {
                parse_number_token(input).or_else(|| {                
                    parse_operator_token(input).or_else(|| {
                        parse_string_token(input).or_else(|| {
                            parse_string_token(input).or_else(|| parse_symbol_token(input))
                        })
                    })
                })
            })
        })
    }) {
        None => Err(Box::new(TokenError {
            err: format!("Unkown token {}", input),
        })),
        Some(res) => Ok(res?),
    }
}

fn parse_number_token(input: &str) -> Option<Result<(Token, &str), Box<dyn Error>>> {
    let regex = Regex::new("[+-]?[0-9]+(\\.[0-9]*)?([eE][+-]?[0-9]+)?").unwrap();
    match regex.find(input) {
        Some(result) => {
            let range = result.range();
            if range.start != 0 {
                None
            } else {
                Some(Ok(match input[..range.end].parse::<i64>() {
                    Ok(n) => (
                        Token::Integer(n),
                        &input[range.end..],
                    ),
                    Err(_) => (
                        Token::Float(input[..range.end].parse::<f64>().unwrap()),
                        &input[range.end..],
                    ),
                }))
            }
        }
        None => None,
    }
}

fn parse_string_token<'a>(input: &'a str) -> Option<Result<(Token<'a>, &'a str), Box<dyn Error>>> {
    if input.starts_with("\"") {
        // Find until not '"'
        let string_len = input.chars().skip(1).take_while(|&c| c != '"').count();
        if let None = input.chars().nth(string_len + 1) {
            Some(Err(Box::new(TokenError {
                err: format!("Unterminated string: {}", input),
            })))
        } else {
            Some(Ok((
                Token::String(&input[1..string_len + 1]),
                &input[string_len + 2..],
            )))
        }
    } else {
        None
    }
}

fn parse_symbol_token(input: &str) -> Option<Result<(Token, &str), Box<dyn Error>>> {
    let len = input
        .chars()
        .take_while(|&c| !c.is_whitespace() && c != '(' && c != ')')
        .count();
    Some(Ok((Token::Symbol(&input[..len]), &input[len..])))
}

fn parse_operator_token(input: &str) -> Option<Result<(Token, &str), Box<dyn Error>>> {
    for &s in OPERATOR_SET.iter() {
        if input.starts_with(s) {
            return Some(Ok((Token::Operator(s), &input[s.len()..])));
        }
    }
    None
}

fn parse_keyword_token(input: &str) -> Option<Result<(Token, &str), Box<dyn Error>>> {
    for &s in KEYWORD_SET.iter() {
        if input.starts_with(s) {
            return Some(Ok((Token::Keyword(s), &input[s.len()..])));
        }
    }
    None
}

fn parse_lparen_token(input: &str) -> Option<Result<(Token, &str), Box<dyn Error>>> {
    if input.starts_with("(") {
        Some(Ok((Token::LParen, &input[1..])))
    } else {
        None
    }
}

fn parse_rparen_token(input: &str) -> Option<Result<(Token, &str), Box<dyn Error>>> {
    if input.starts_with(")") {
        Some(Ok((Token::RParen, &input[1..])))
    } else {
        None
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_add() {
        let tokens = tokenize("(+ 1 2)").unwrap_or(vec![]);
        assert_eq!(
            tokens,
            vec![
                Token::LParen,
                Token::Operator("+"),
                Token::Integer(1),
                Token::Integer(2),
                Token::RParen,
            ]
        );
    }

    #[test]
    fn test_area_of_a_circle() {
        let program = "
            (
                (define r 10)
                (define pi 314)
                (* pi (* r r))
            )
        ";
        let tokens = tokenize(program).unwrap_or(vec![]);
        assert_eq!(
            tokens,
            vec![
                Token::LParen,
                Token::LParen,
                Token::Keyword("define"),
                Token::Symbol("r"),
                Token::Integer(10),
                Token::RParen,
                Token::LParen,
                Token::Keyword("define"),
                Token::Symbol("pi"),
                Token::Integer(314),
                Token::RParen,
                Token::LParen,
                Token::Operator("*"),
                Token::Symbol("pi"),
                Token::LParen,
                Token::Operator("*"),
                Token::Symbol("r"),
                Token::Symbol("r"),
                Token::RParen,
                Token::RParen,
                Token::RParen
            ]
        );
    }
}
