//! Lexer

use crate::prelude::*;
use std::io::Write;

pub mod token;
pub mod token_fmt;
pub mod token_impl;

pub use token::{Token, TokenPos, TokenType};
pub use token_fmt::{collect_lexer_errors, format_token, write_formatted_tokens};
pub use token_impl::Lexer;

#[derive(derive_new::new, Debug)]
pub struct LexerPass;

impl CompilerPass for LexerPass {
    fn name(&self) -> &'static str {
        "lexer"
    }

    fn run(&mut self, ctx: &mut CompilerContext) -> CEResult<()> {
        let input_file = &ctx.settings.sources;
        if !input_file.exists() {
            return Err(CErr::lexer_err(0, 0, "Input file does not exist"));
        }
        let source_code = std::fs::read_to_string(input_file)
            .map_err(|e| CErr::lexer_err(0, 0, format!("Failed to read file: {}", e)))?;

        let mut lexer = Lexer::new(source_code);
        let tokens = lexer.tokenize()?;
        ctx.cur_stage = CompilerStage::Lexer;
        ctx.tokens = Some(tokens);

        Ok(())
    }

    fn stage(&self) -> CompilerStage {
        CompilerStage::Lexer
    }

    fn write_output(&self, ctx: &CompilerContext, writer: &mut dyn Write) -> CEResult<()> {
        let tokens = ctx
            .tokens
            .as_ref()
            .ok_or_else(|| CErr::lexer_err(0, 0, "没有可用的tokens"))?;

        // 检查是否有词法错误
        let errors = collect_lexer_errors(tokens);
        if !errors.is_empty() {
            // 如果有错误，只输出错误信息
            for error in errors {
                writeln!(writer, "{}", error)
                    .map_err(|e| CErr::lexer_err(0, 0, format!("写入输出失败: {}", e)))?;
            }
        } else {
            // 如果没有错误，输出格式化的 token 列表
            write_formatted_tokens(tokens, writer, false)
                .map_err(|e| CErr::lexer_err(0, 0, format!("写入输出失败: {}", e)))?;
        }

        Ok(())
    }
}
