use std::{
    fs,
    path::{Path, PathBuf},
};

use crate::{
    data::{Identifier, IntegerConstant, KeywordType, SymbolType},
    error::CompilerError,
    util,
};

use anyhow::{Result, anyhow};
/// Token represents the type of a token in the Jack language.
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum Token {
    Keyword(KeywordType),
    Symbol(SymbolType),
    Identifier(Identifier),
    IntConst(IntegerConstant),
    StringCost(String),
}
impl Token {
    /// Converts the token to its XML representation.
    pub(crate) fn to_xml(&self) -> String {
        match self {
            Token::Keyword(keyword_type) => util::xml_item("keyword", keyword_type.to_str()),
            Token::Symbol(symbol_type) => util::xml_item("symbol", &symbol_type.char().to_string()),
            Token::Identifier(value) => util::xml_item("identifier", value.as_str()),
            Token::IntConst(value) => util::xml_item("integerConstant", &value.to_string()),
            Token::StringCost(value) => util::xml_item("stringConstant", value),
        }
    }
}

pub(crate) struct Tokenizer {
    /// The name of the input file.
    _jack_file_name: String,
    /// The path to the input file.
    pub(crate) input_path: PathBuf,
    /// The path to the token output file.
    pub(crate) output_path_token: PathBuf,
    /// The path to the grammar output file.
    pub(crate) output_path_grammar: PathBuf,
    /// The path to the VM output file.
    pub(crate) output_path_vm: PathBuf,
}

impl Tokenizer {
    /// translate the jack file or path to token xml file
    pub(crate) fn translate<P: AsRef<Path>>(jack_file: P) -> Result<Vec<PathBuf>> {
        let jack_file = jack_file.as_ref();
        let mut output_paths = Vec::new();
        if jack_file.is_dir() {
            // all the jack file path
            for file_path in jack_file.read_dir()?.flatten().filter_map(|item| {
                if item
                    .path()
                    .extension()
                    .unwrap_or_default()
                    .to_string_lossy()
                    == "jack"
                {
                    Some(item.path())
                } else {
                    None
                }
            }) {
                output_paths.push(Self::translate_file(&file_path)?);
            }
        } else {
            output_paths.push(Self::translate_file(jack_file)?);
        }
        Ok(output_paths)
    }
    /// translate the jack file to token xml file
    pub(crate) fn translate_file(jack_file: &Path) -> Result<PathBuf> {
        let translator = Self::new(jack_file)?;
        let tokens = translator.tokens()?;
        let mut vm_code = tokens
            .iter()
            .map(|token| token.to_xml())
            .collect::<Vec<_>>();
        vm_code.insert(0, "<tokens>".into());
        vm_code.push("</tokens>".into());
        let vm_code = vm_code.join("\n");
        fs::write(&translator.output_path_token, util::format_xml(&vm_code)?)?;
        Ok(translator.output_path_token)
    }
    pub(crate) fn new<P: AsRef<Path>>(jack_file: P) -> Result<Self> {
        let jack_file = jack_file.as_ref();
        let input_path = jack_file.to_path_buf();
        let file_name = input_path
            .file_stem()
            .ok_or(anyhow!("read input file's name fail"))?
            .to_string_lossy()
            .into_owned();
        if input_path.extension().unwrap_or_default().to_string_lossy() != "jack" {
            Err(CompilerError::FileExt(
                input_path.as_path().as_os_str().display().to_string(),
            ))?;
        }
        let mut output_path_grammar = jack_file.to_path_buf();
        output_path_grammar.set_extension("xml");

        let mut output_path_token = output_path_grammar.clone();
        output_path_token.set_file_name(format!("{file_name}T"));
        output_path_token.set_extension("xml");

        let mut output_path_vm = output_path_grammar.clone();
        output_path_vm.set_extension("vm");

        Ok(Self {
            _jack_file_name: file_name,
            input_path,
            output_path_token,
            output_path_grammar,
            output_path_vm,
        })
    }

    /// Parse the input file and return a vector of tokens.
    pub(super) fn tokens(&self) -> Result<Vec<Token>> {
        let mut tokens = Vec::new();
        // 是否为连续注释
        let mut is_continue_comments = false;
        for (line_no, line) in fs::read_to_string(&self.input_path)?.lines().enumerate() {
            let mut line = line.trim();

            if line.starts_with("/**") && !line.ends_with("*/") {
                is_continue_comments = true;
                continue;
            } else if is_continue_comments && line.ends_with("*/") {
                is_continue_comments = false;
                continue;
            }

            if is_continue_comments
                || line.is_empty()
                || line.starts_with("//")
                || (line.starts_with("/*") && line.ends_with("*/"))
            {
                continue;
            }

            if line.contains("//") {
                line = line.split_once("//").unwrap().0;
            }
            let mut word = String::new();
            let mut is_string_flag = false;
            let mut do_parse_word_flag = false;
            for char in line.chars() {
                match char {
                    ' ' if !is_string_flag => {
                        if !word.is_empty() {
                            do_parse_word_flag = true;
                        }
                    }
                    '"' => {
                        if is_string_flag {
                            tokens.push(Token::StringCost(word.clone()));
                            do_parse_word_flag = false;
                            word.clear();
                        } else if !word.is_empty() {
                            do_parse_word_flag = true;
                        }
                        is_string_flag = !is_string_flag
                    }
                    _ if is_string_flag => {
                        word.push(char);
                    }
                    _ => {
                        // if is symbol,parse the prew word and current symbol.
                        if let Some(symbol_type) = SymbolType::parse(&char) {
                            if !word.is_empty() {
                                tokens.push(self.parse_word(&word, line_no)?);
                                word.clear();
                            }
                            tokens.push(Token::Symbol(symbol_type));
                            do_parse_word_flag = false;
                        } else {
                            word.push(char);
                        }
                    }
                }
                if do_parse_word_flag && !word.is_empty() {
                    tokens.push(self.parse_word(&word, line_no)?);
                    word.clear();
                    do_parse_word_flag = false;
                }
            }
            if is_string_flag {
                return Err(anyhow!("Line {line_no} : Miss the end \" | [{word}] ."));
            }
            if !word.is_empty() {
                tokens.push(self.parse_word(&word, line_no)?);
                word.clear();
            }
        }

        Ok(tokens)
    }
    /// parse word to keyword,intconst or identifier.
    ///
    /// the stringconst and symbol have parse in parse function.
    fn parse_word(&self, word: &str, line_no: usize) -> Result<Token> {
        if let Some(keyword_type) = KeywordType::parse(word) {
            return Ok(Token::Keyword(keyword_type));
        }
        if let Some(interger) = IntegerConstant::parse(word) {
            return Ok(Token::IntConst(interger));
        }
        if let Some(identifier) = Identifier::parse(word) {
            Ok(Token::Identifier(identifier))
        } else {
            Err(anyhow!(
                "Line {line_no} : It isn't not jack lexicon | {word} ."
            ))
        }
    }
}

#[cfg(test)]
mod test;
