use pest::iterators::Pair;
use std::io::{self, Write};

use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "gm.pest"]
pub struct Lexer;

pub fn run_lexer(source: &str) {
    run_lexer_to(source, &mut io::stderr(), &mut io::stderr());
}

fn run_lexer_to<W: Write, E: Write>(source: &str, out: &mut W, err: &mut E) {
    match Lexer::parse(Rule::tokens, source) {
        Ok(pairs) => {
            let mut token_buf: Vec<u8> = Vec::new();
            let mut error_buf: Vec<u8> = Vec::new();
            let mut has_error = false;
            for pair in pairs {
                collect_tokens(pair, &mut token_buf, &mut error_buf, &mut has_error);
            }
            if has_error {
                err.write_all(&error_buf).ok();
            } else {
                out.write_all(&token_buf).ok();
            }
        }
        Err(e) => {
            writeln!(err, "Error type B: {}", e).ok();
        }
    }
}

fn keyword_mapping(s: &str) -> Option<String> {
    match s {
        "if" => Some("IF".to_string()),
        "else" => Some("ELSE".to_string()),
        "while" => Some("WHILE".to_string()),
        "return" => Some("RETURN".to_string()),
        "int" => Some("INT".to_string()),
        "void" => Some("VOID".to_string()),
        "const" => Some("CONST".to_string()),
        "break" => Some("BREAK".to_string()),
        "continue" => Some("CONTINUE".to_string()),
        _ => None,
    }
}

fn collect_tokens<TW: Write, EW: Write>(
    pair: Pair<Rule>,
    token_out: &mut TW,
    error_out: &mut EW,
    has_error: &mut bool,
) {
    match pair.as_rule() {
        Rule::tokens => {}
        Rule::WHITESPACE | Rule::LINE_COMMENT | Rule::MULTILINE_COMMENT | Rule::EOI => {}
        Rule::ERROR => {
            *has_error = true;
            let _ = writeln!(
                error_out,
                "Error type A at Line {}: unexpected token '{}'",
                pair.as_span().start_pos().line_col().0,
                pair.as_str()
            );
        }
        Rule::IDENT => {
            let value = pair.as_str();
            if let Some(kw) = keyword_mapping(value) {
                if !*has_error {
                    let _ = writeln!(
                        token_out,
                        "{} {} at Line {}.",
                        kw,
                        value,
                        pair.as_span().start_pos().line_col().0
                    );
                }
            } else {
                if !*has_error {
                    let _ = writeln!(
                        token_out,
                        "{:?} {} at Line {}.",
                        pair.as_rule(),
                        value,
                        pair.as_span().start_pos().line_col().0
                    );
                }
            }
        }
        Rule::INTEGER_CONST => {
            let text = pair.as_str();
            let value = if text.starts_with("0x") || text.starts_with("0X") {
                i64::from_str_radix(&text[2..], 16)
            } else if text.starts_with('0') && text.len() > 1 {
                i64::from_str_radix(&text[1..], 8)
            } else {
                text.parse::<i64>()
            };
            match value {
                Ok(v) => {
                    if !*has_error {
                        let _ = writeln!(
                            token_out,
                            "{:?} {} at Line {}.",
                            pair.as_rule(),
                            v,
                            pair.as_span().start_pos().line_col().0
                        );
                    }
                }
                Err(_) => {
                    *has_error = true;
                    let _ = writeln!(
                        error_out,
                        "Error type A at Line {}: invalid integer '{}'",
                        pair.as_span().start_pos().line_col().0,
                        text
                    );
                }
            }
        }
        _ => {
            if !*has_error {
                let _ = writeln!(
                    token_out,
                    "{:?} {} at Line {}.",
                    pair.as_rule(),
                    pair.as_str(),
                    pair.as_span().start_pos().line_col().0
                );
            }
        }
    }

    for inner in pair.into_inner() {
        collect_tokens(inner, token_out, error_out, has_error);
    }
}

// expect tests
#[cfg(test)]
mod tests {
    use diffy::PatchFormatter;

    use super::*;
    use std::fs;
    use std::path::Path;

    #[test]
    fn examples_expect() {
        let dir = Path::new("examples");
        if !dir.exists() {
            panic!("examples directory missing");
        }

        for entry in fs::read_dir(dir).expect("read_dir failed") {
            let path = entry.unwrap().path();
            if !path.is_file() {
                continue;
            }
            if path.extension().and_then(|s| s.to_str()) == Some("expected") {
                continue; // skip expected files themselves
            }
            let mut expected_path = path.clone();
            expected_path.set_extension("expected");
            if !expected_path.exists() {
                continue; // silently skip if no expected counterpart
            }

            let source = fs::read_to_string(&path).expect("cannot read source");
            let mut out_buf: Vec<u8> = Vec::new();
            let mut err_buf: Vec<u8> = Vec::new();
            run_lexer_to(&source, &mut out_buf, &mut err_buf);
            let got = if err_buf.is_empty() {
                String::from_utf8(out_buf).expect("non-utf8 output")
            } else {
                String::from_utf8(err_buf).expect("non-utf8 output")
            };
            let expected = fs::read_to_string(&expected_path).expect("cannot read expected");

            if got != expected {
                use diffy::create_patch;
                let patch = create_patch(&expected, &got);
                let f = PatchFormatter::new().with_color();
                panic!("{}", f.fmt_patch(&patch));
            }
        }
    }
}
