use std::{
    fmt::{self, Debug, Display, Formatter},
    iter,
    num::NonZeroUsize,
};

use logos::{Logos, Source, SpannedIter};

use crate::error::ErrorLocation;

#[derive(Debug, Clone, PartialEq, Eq)]
pub enum LexingError<Loc> {
    InvalidCharacter(Loc),
}

impl<Loc> Display for LexingError<Loc> {
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
        match self {
            LexingError::InvalidCharacter(_) => write!(f, "Invalid character"),
        }
    }
}

impl LexingError<LineLocation> {
    pub fn location(&self) -> Option<ErrorLocation> {
        match self {
            LexingError::InvalidCharacter(location) => {
                location.as_ref().cloned().map(ErrorLocation::Point)
            }
        }
    }
}

#[derive(Logos, Debug, PartialEq, Eq, Clone)]
#[logos(error = ())]
#[logos(skip r"[ \n\t\f]+")]
pub enum Token<'input> {
    #[token("def")]
    Def,
    #[token("match")]
    Match,
    #[token("case")]
    Case,
    #[token("chain")]
    Chain,
    #[token("if")]
    If,
    #[token("else")]
    Else,
    #[token("jump")]
    Jump,
    #[token("ext")]
    Ext,
    #[token("let")]
    Let,
    #[token("stop")]
    Stop,
    #[token("int")]
    Int,
    #[token("bool")]
    Bool,
    #[token("true")]
    True,
    #[token("false")]
    False,
    #[token("record")]
    Record,
    #[token("ip4")]
    Ip4,
    #[token("port")]
    Port,
    #[token("number")]
    Number,
    #[token("(")]
    OpenParentheses,
    #[token(")")]
    CloseParentheses,
    #[token("[")]
    OpenBracket,
    #[token("]")]
    CloseBracket,
    #[token("{")]
    OpenBrace,
    #[token("}")]
    CloseBrace,
    #[token("+")]
    Add,
    #[token("-")]
    Minus,
    #[token("==")]
    Equals,
    #[token("!=")]
    NotEquals,
    #[token("=")]
    Assign,
    #[token(">")]
    Greater,
    #[token("<")]
    Lesser,
    #[token(">=")]
    GreaterEquals,
    #[token("<=")]
    LesserEquals,
    #[token("&")]
    And,
    #[token("|")]
    Or,
    #[token("~")]
    Not,
    #[token("&&")]
    LogicAnd,
    #[token("||")]
    LogicOr,
    #[token("!")]
    LogicNot,
    #[token("^")]
    Xor,
    #[token(",")]
    Comma,
    #[token(":")]
    Colon,
    #[token(";")]
    Semicolon,
    #[token(".")]
    Dot,
    #[token("<<")]
    LeftShift,
    #[token(">>")]
    RightShift,
    #[token("*")]
    Multiply,
    #[token("/")]
    Divide,
    #[token("%")]
    Mod,
    #[token("=>")]
    FatArrow,
    #[regex("[a-zA-Z_][a-zA-Z0-9\\-_]*")]
    Symbol(&'input str),
    #[regex("(0|[123456789]\\d*)")]
    DecNumber(&'input str),
    #[regex("0x[0-9a-fA-F]+")]
    HexNumber(&'input str),
    #[regex(r#""([^"]+)""#, |lex| {
        let slice = lex.slice();
        &slice[1..slice.len() - 1]
    })]
    String(&'input str),
}

pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;

pub struct TokenLexer<'input> {
    lines_loc: Vec<usize>,
    token_stream: SpannedIter<'input, Token<'input>>,
}

#[derive(Clone, Copy, PartialEq, Eq)]
pub struct LineColumnLocation {
    line: NonZeroUsize,
    column: NonZeroUsize,
}

impl LineColumnLocation {
    pub fn new(line: usize, column: usize) -> Self {
        Self {
            line: NonZeroUsize::new(line).unwrap(),
            column: NonZeroUsize::new(column).unwrap(),
        }
    }

    pub fn line(&self) -> NonZeroUsize {
        self.line
    }

    pub fn column(&self) -> NonZeroUsize {
        self.column
    }
}

impl Display for LineColumnLocation {
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
        write!(f, "Line: {}, Column: {}", self.line, self.column)
    }
}

impl Debug for LineColumnLocation {
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
        Display::fmt(&self, f)
    }
}

pub type LineLocation = Option<LineColumnLocation>;

impl<'input> TokenLexer<'input> {
    pub fn new(input: &'input str) -> Self {
        let lines_loc = iter::once(0)
            .chain(
                input
                    .char_indices()
                    .filter_map(|(loc, char)| if char == '\n' { Some(loc + 1) } else { None }),
            )
            .collect();

        Self {
            lines_loc,
            token_stream: Token::lexer(input).spanned(),
        }
    }

    fn transform_location(&self, loc: usize) -> LineColumnLocation {
        let line = self
            .lines_loc
            .binary_search(&loc)
            .unwrap_or_else(|index| index - 1);

        let column = loc - self.lines_loc[line];

        LineColumnLocation::new(line + 1, column + 1)
    }
}

impl<'input> Iterator for TokenLexer<'input> {
    type Item = Spanned<Token<'input>, LineLocation, LexingError<LineLocation>>;

    fn next(&mut self) -> Option<Self::Item> {
        self.token_stream.next().map(|(token, span)| {
            let start = Some(self.transform_location(span.start));
            let end = Some(self.transform_location(span.end));
            match token {
                Ok(token) => Ok((start, token, end)),
                Err(_) => Err(LexingError::InvalidCharacter(start)),
            }
        })
    }
}

#[cfg(test)]
mod test {
    use crate::lexer::{LineColumnLocation, Token};

    use super::TokenLexer;

    #[test]
    fn number_parse_test() {
        let mut lexer = TokenLexer::new("123456789\n987654321");

        let (start, token, end) = lexer.next().unwrap().unwrap();
        assert_eq!(start.unwrap(), LineColumnLocation::new(1, 1));
        assert_eq!(end.unwrap(), LineColumnLocation::new(1, 10));
        assert_eq!(token, Token::DecNumber("123456789"));

        let (start, token, end) = lexer.next().unwrap().unwrap();
        assert_eq!(start.unwrap(), LineColumnLocation::new(2, 1));
        assert_eq!(end.unwrap(), LineColumnLocation::new(2, 10));
        assert_eq!(token, Token::DecNumber("987654321"));
    }

    #[test]
    fn symbol_parse_test() {
        let mut lexer = TokenLexer::new("symbol TEST_SYMBOL\n_PRIVATE");
        let (start, token, end) = lexer.next().unwrap().unwrap();
        assert_eq!(start.unwrap(), LineColumnLocation::new(1, 1));
        assert_eq!(end.unwrap(), LineColumnLocation::new(1, 7));
        assert_eq!(token, Token::Symbol("symbol"));

        let (start, token, end) = lexer.next().unwrap().unwrap();
        assert_eq!(start.unwrap(), LineColumnLocation::new(1, 8));
        assert_eq!(end.unwrap(), LineColumnLocation::new(1, 19));
        assert_eq!(token, Token::Symbol("TEST_SYMBOL"));

        let (start, token, end) = lexer.next().unwrap().unwrap();
        assert_eq!(start.unwrap(), LineColumnLocation::new(2, 1));
        assert_eq!(end.unwrap(), LineColumnLocation::new(2, 9));
        assert_eq!(token, Token::Symbol("_PRIVATE"));
    }

    #[test]
    fn fulltext_parse_test() {
        let text = r"def match ether_payload(int[2]) {
    0x0800 => ipv4,
    0x0806 => arp,
    0x86DD => ipv6,
}

def chain main {
    def ext dst = [0:6];
    def ext src = [6:6];
    def ethertype = [12:2];
    
    let ext is_ether_v2 = ethertype >= 0x0800;
    if is_ether_v2 {
        let ext type = ethertype;
        jump ext [14:-0] match ether_payload(ethertype);
    } else {
        stop;
    }
}";

        for (index, token) in TokenLexer::new(text).enumerate() {
            let (start, token, end) = token.unwrap();
            println!("{}: {:?} ({:?} .. {:?})", index, token, start, end);
        }
    }
}
