use pest::Parser;
use pest_derive::Parser;
use std::fmt;

#[derive(Parser)]
#[grammar = "lex.pest"]
pub struct SysyParser;

// 错误类型
#[derive(Debug, PartialEq, Clone)]
pub enum ErrorType {
    MysteriousCharacter,
}

impl fmt::Display for ErrorType {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        match self {
            ErrorType::MysteriousCharacter => write!(f, "Mysterious character error"),
        }
    }
}

#[derive(Debug)]
#[allow(dead_code)]
pub struct LexError {
    error_type: ErrorType,
    line: usize,
    message: String,
}

impl fmt::Display for LexError {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        write!(f, "Error type A at Line {}: {}", self.line, self.message)
    }
}

#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Clone)]
pub enum TokenType {
    CONST,
    INT,
    VOID,
    IF,
    ELSE,
    WHILE,
    BREAK,
    CONTINUE,
    RETURN,
    EQ,
    NEQ,
    LE,
    GE,
    AND,
    OR,
    PLUS,
    MINUS,
    MUL,
    DIV,
    MOD,
    ASSIGN,
    LT,
    GT,
    NOT,
    L_PAREN,
    R_PAREN,
    L_BRACE,
    R_BRACE,
    L_BRACKT,
    R_BRACKT,
    COMMA,
    SEMICOLON,
    IDENT,
    INTEGER_CONST,
}

#[derive(Debug)]
pub struct Token {
    pub token_type: TokenType,
    pub value: String,
    pub line: usize,
}

fn get_line_number(input: &str, offset: usize) -> usize {
    input[..offset.min(input.len())]
        .chars()
        .filter(|&c| c == '\n')
        .count()
        + 1
}

fn convert_to_decimal(value: &str) -> String {
    if value.starts_with("0x") || value.starts_with("0X") {
        // hex
        let hex_part = &value[2..];
        if hex_part.is_empty() {
            return "0".to_string();
        }
        match i64::from_str_radix(hex_part, 16) {
            Ok(decimal) => decimal.to_string(),
            Err(_) => value.to_string(),
        }
    } else if value.starts_with('0') && value.len() > 1 && value.chars().all(|c| c.is_ascii_digit())
    {
        // oct
        match i64::from_str_radix(value, 8) {
            Ok(decimal) => decimal.to_string(),
            Err(_) => value.to_string(),
        }
    } else {
        // dec
        value.to_string()
    }
}

fn process_pair(
    pair: pest::iterators::Pair<Rule>,
    input: &str,
    tokens: &mut Vec<Token>,
    errors: &mut Vec<LexError>,
) {
    let rule = pair.as_rule();
    let span = pair.as_span();
    let value = pair.as_str().to_string();
    let line = get_line_number(input, span.start());

    match rule {
        Rule::CONST => tokens.push(Token {
            token_type: TokenType::CONST,
            value,
            line,
        }),
        Rule::INT => tokens.push(Token {
            token_type: TokenType::INT,
            value,
            line,
        }),
        Rule::VOID => tokens.push(Token {
            token_type: TokenType::VOID,
            value,
            line,
        }),
        Rule::IF => tokens.push(Token {
            token_type: TokenType::IF,
            value,
            line,
        }),
        Rule::ELSE => tokens.push(Token {
            token_type: TokenType::ELSE,
            value,
            line,
        }),
        Rule::WHILE => tokens.push(Token {
            token_type: TokenType::WHILE,
            value,
            line,
        }),
        Rule::BREAK => tokens.push(Token {
            token_type: TokenType::BREAK,
            value,
            line,
        }),
        Rule::CONTINUE => tokens.push(Token {
            token_type: TokenType::CONTINUE,
            value,
            line,
        }),
        Rule::RETURN => tokens.push(Token {
            token_type: TokenType::RETURN,
            value,
            line,
        }),
        Rule::IDENT => tokens.push(Token {
            token_type: TokenType::IDENT,
            value,
            line,
        }),
        Rule::INTEGER_CONST => {
            let decimal_value = convert_to_decimal(&value);
            tokens.push(Token {
                token_type: TokenType::INTEGER_CONST,
                value: decimal_value,
                line,
            });
        }
        Rule::OPERATOR => {
            let token_type = match value.as_str() {
                "==" => TokenType::EQ,
                "!=" => TokenType::NEQ,
                "<=" => TokenType::LE,
                ">=" => TokenType::GE,
                "&&" => TokenType::AND,
                "||" => TokenType::OR,
                "+" => TokenType::PLUS,
                "-" => TokenType::MINUS,
                "*" => TokenType::MUL,
                "/" => TokenType::DIV,
                "%" => TokenType::MOD,
                "=" => TokenType::ASSIGN,
                "<" => TokenType::LT,
                ">" => TokenType::GT,
                "!" => TokenType::NOT,
                _ => return,
            };
            tokens.push(Token {
                token_type,
                value,
                line,
            });
        }
        Rule::SEPARATOR => {
            let token_type = match value.as_str() {
                "(" => TokenType::L_PAREN,
                ")" => TokenType::R_PAREN,
                "{" => TokenType::L_BRACE,
                "}" => TokenType::R_BRACE,
                "[" => TokenType::L_BRACKT,
                "]" => TokenType::R_BRACKT,
                "," => TokenType::COMMA,
                ";" => TokenType::SEMICOLON,
                _ => return,
            };
            tokens.push(Token {
                token_type,
                value,
                line,
            });
        }
        Rule::ERROR_CHAR => {
            // 处理错误字符
            errors.push(LexError {
                error_type: ErrorType::MysteriousCharacter,
                line,
                message: format!("Mysterious character \"{}\"", value),
            });
        }
        _ => {}
    }

    for inner_pair in pair.into_inner() {
        process_pair(inner_pair, input, tokens, errors);
    }
}

pub fn tokenize(input: &str) -> (Vec<Token>, Vec<LexError>) {
    let mut tokens = Vec::new();
    let mut errors = Vec::new();

    match SysyParser::parse(Rule::program, input) {
        Ok(pairs) => {
            for pair in pairs {
                process_pair(pair, input, &mut tokens, &mut errors);
            }
        }
        Err(_error) => {
            // 如果解析完全失败，进行手动逐字符分析
            manual_tokenize(input, &mut tokens, &mut errors);
        }
    }

    (tokens, errors)
}

fn manual_tokenize(input: &str, tokens: &mut Vec<Token>, errors: &mut Vec<LexError>) {
    let mut pos = 0;
    let chars: Vec<char> = input.chars().collect();

    while pos < chars.len() {
        let start_pos = pos;
        let line = get_line_number(input, start_pos);

        // 跳过空白字符
        if chars[pos].is_whitespace() {
            pos += 1;
            continue;
        }

        // 处理行注释
        if pos + 1 < chars.len() && chars[pos] == '/' && chars[pos + 1] == '/' {
            while pos < chars.len() && chars[pos] != '\n' {
                pos += 1;
            }
            continue;
        }

        // 处理块注释
        if pos + 1 < chars.len() && chars[pos] == '/' && chars[pos + 1] == '*' {
            pos += 2;
            while pos + 1 < chars.len() {
                if chars[pos] == '*' && chars[pos + 1] == '/' {
                    pos += 2;
                    break;
                }
                pos += 1;
            }
            continue;
        }

        // 尝试匹配最长的有效token
        let mut matched = false;
        let mut max_len = 0;
        let mut best_token: Option<Token> = None;

        // 尝试不同长度的子串，从长到短
        for len in (1..=20).rev() {
            if pos + len > chars.len() {
                continue;
            }

            let substr: String = chars[pos..pos + len].iter().collect();

            if let Ok(mut pairs) = SysyParser::parse(Rule::valid_token, &substr) {
                if let Some(pair) = pairs.next() {
                    let mut temp_tokens = Vec::new();
                    let mut temp_errors = Vec::new();
                    process_pair(pair, input, &mut temp_tokens, &mut temp_errors);

                    if !temp_tokens.is_empty() && temp_errors.is_empty() {
                        max_len = len;
                        best_token = temp_tokens.into_iter().next();
                        matched = true;
                        break; // 找到最长匹配就停止
                    }
                }
            }
        }

        if matched && max_len > 0 {
            if let Some(mut token) = best_token {
                // 更新token的行号为正确的行号
                token.line = line;
                tokens.push(token);
            }
            pos += max_len;
        } else {
            // 无法匹配任何有效token，记录错误
            errors.push(LexError {
                error_type: ErrorType::MysteriousCharacter,
                line,
                message: format!("Mysterious character \"{}\"", chars[pos]),
            });
            pos += 1;
        }
    }
}

pub fn print_tokens(tokens: &[Token]) {
    for token in tokens {
        let type_str = match token.token_type {
            TokenType::CONST => "CONST",
            TokenType::INT => "INT",
            TokenType::VOID => "VOID",
            TokenType::IF => "IF",
            TokenType::ELSE => "ELSE",
            TokenType::WHILE => "WHILE",
            TokenType::BREAK => "BREAK",
            TokenType::CONTINUE => "CONTINUE",
            TokenType::RETURN => "RETURN",
            TokenType::EQ => "EQ",
            TokenType::NEQ => "NEQ",
            TokenType::LE => "LE",
            TokenType::GE => "GE",
            TokenType::AND => "AND",
            TokenType::OR => "OR",
            TokenType::PLUS => "PLUS",
            TokenType::MINUS => "MINUS",
            TokenType::MUL => "MUL",
            TokenType::DIV => "DIV",
            TokenType::MOD => "MOD",
            TokenType::ASSIGN => "ASSIGN",
            TokenType::LT => "LT",
            TokenType::GT => "GT",
            TokenType::NOT => "NOT",
            TokenType::L_PAREN => "L_PAREN",
            TokenType::R_PAREN => "R_PAREN",
            TokenType::L_BRACE => "L_BRACE",
            TokenType::R_BRACE => "R_BRACE",
            TokenType::L_BRACKT => "L_BRACKT",
            TokenType::R_BRACKT => "R_BRACKT",
            TokenType::COMMA => "COMMA",
            TokenType::SEMICOLON => "SEMICOLON",
            TokenType::IDENT => "IDENT",
            TokenType::INTEGER_CONST => "INTEGER_CONST",
        };
        eprintln!("{} {} at Line {}.", type_str, token.value, token.line);
    }
}
