use std::{
    error::Error,
    fmt::{self, Debug, Display, Formatter},
    num::ParseIntError,
};

use lalrpop_util::{lalrpop_mod, ParseError};
use num_bigint::ParseBigIntError;

use crate::{
    ast::ProtoFile,
    error::ErrorLocation,
    lexer::{LexingError, LineLocation, Token, TokenLexer},
};

lalrpop_mod!(grammar);

type GrammarToken<'input> = (LineLocation, Token<'input>, LineLocation);
type PositionRange = (LineLocation, LineLocation);

#[derive(Clone, PartialEq, Eq)]
pub enum GrammarError<'input> {
    Lexing(LexingError<LineLocation>),
    IntSizeInvalid {
        range: PositionRange,
        error: ParseIntError,
    },
    RangeInvalid {
        range: PositionRange,
        error: ParseIntError,
    },
    NumberInvalid {
        range: PositionRange,
        error: ParseBigIntError,
    },
    InvalidToken {
        location: LineLocation,
    },
    UnrecognizedEof {
        location: LineLocation,
        expected: Vec<String>,
    },
    UnrecognizedToken {
        token: GrammarToken<'input>,
        expected: Vec<String>,
    },
    ExtraToken {
        token: GrammarToken<'input>,
    },
}

impl<'input> From<LexingError<LineLocation>> for GrammarError<'input> {
    fn from(value: LexingError<LineLocation>) -> Self {
        GrammarError::Lexing(value)
    }
}

impl<'input> From<ParseError<LineLocation, Token<'input>, GrammarError<'input>>>
    for GrammarError<'input>
{
    fn from(value: ParseError<LineLocation, Token<'input>, GrammarError<'input>>) -> Self {
        match value {
            ParseError::InvalidToken { location } => GrammarError::InvalidToken { location },
            ParseError::UnrecognizedEof { location, expected } => {
                GrammarError::UnrecognizedEof { location, expected }
            }
            ParseError::UnrecognizedToken { token, expected } => {
                GrammarError::UnrecognizedToken { token, expected }
            }
            ParseError::ExtraToken { token } => GrammarError::ExtraToken { token },
            ParseError::User { error } => error,
        }
    }
}

impl<'input> Display for GrammarError<'input> {
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
        match self {
            GrammarError::InvalidToken { .. } => {
                write!(f, "Invalid token")
            }
            GrammarError::UnrecognizedEof { ref expected, .. } => write!(
                f,
                "Unrecognized EOF found. Expected: {}",
                expected.join(", ")
            ),
            GrammarError::UnrecognizedToken { ref expected, .. } => {
                write!(
                    f,
                    "Unrecognized token found. Expected: {}",
                    expected.join(", ")
                )
            }
            GrammarError::ExtraToken {
                token: (_, ref token, _),
            } => write!(f, "Extra token {:?} found", token),
            GrammarError::Lexing(lexing) => Display::fmt(lexing, f),
            GrammarError::IntSizeInvalid { error, .. } => write!(f, "Bad integer size: {}", error),
            GrammarError::RangeInvalid { error, .. } => write!(f, "Bad range: {}", error),
            GrammarError::NumberInvalid { error, .. } => write!(f, "Bad number: {}", error),
        }
    }
}

impl<'input> Debug for GrammarError<'input> {
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
        Display::fmt(self, f)
    }
}

impl<'input> Error for GrammarError<'input> {}

impl<'input> GrammarError<'input> {
    pub fn location(&self) -> Option<ErrorLocation> {
        fn chain_range(range: &PositionRange) -> Option<ErrorLocation> {
            Some(ErrorLocation::Range(range.0?, range.1?))
        }
        fn chain_token(token: &GrammarToken) -> Option<ErrorLocation> {
            Some(ErrorLocation::Range(token.0?, token.2?))
        }
        match self {
            GrammarError::Lexing(error) => error.location(),
            GrammarError::IntSizeInvalid { range, .. } => chain_range(range),
            GrammarError::RangeInvalid { range, .. } => chain_range(range),
            GrammarError::NumberInvalid { range, .. } => chain_range(range),
            GrammarError::InvalidToken { location } => location.map(ErrorLocation::Point),
            GrammarError::UnrecognizedEof { location, .. } => location.map(ErrorLocation::Point),
            GrammarError::UnrecognizedToken { token, .. } => chain_token(token),
            GrammarError::ExtraToken { token } => chain_token(token),
        }
    }
}

impl<'input> ProtoFile<'input> {
    pub fn parse(input: TokenLexer<'_>) -> Result<ProtoFile<'_>, GrammarError<'_>> {
        grammar::ProtoParser::new()
            .parse(input.into_iter().map(|item| item.map_err(|err| err.into())))
            .map_err(|err| err.into())
    }
}

#[cfg(test)]
mod test {
    use crate::{ast::ProtoFile, lexer::TokenLexer};

    #[test]
    fn fulltext_parse_test() {
        let text = r"def match ether_payload(int[2]) {
    0x0800 => ipv4,
    0x0806 => arp,
    0x86DD => ipv6,
}

def chain main {
    def ext dst = [0:6];
    def ext src = [6:6];
    def ethertype = [12:2];
    
    let ext is_ether_v2 = ethertype >= 0x0800;
    if is_ether_v2 {
        let ext type = ethertype;
        jump ext [14:-0] match ether_payload(ethertype);
    } else {
        stop;
    }
}";

        let lexer = TokenLexer::new(text);
        let proto = ProtoFile::parse(lexer).unwrap();
        println!("{:#2?}", proto);
    }
}
