#![allow(unused)]
/// 词法分析器
pub mod lexer;
///语法分析器
pub mod parser;
/// token相关
pub mod token;
/// 抽象语法树
pub mod ast;
/// 语义分析器
pub mod semantic_analyzer;
/// 指令集
pub mod instruction;
/// 虚拟机
pub mod vm;
/// io
pub mod io;
/// 指令文件解析器
// pub mod executer;
/// 代码生成器
pub mod code_generator;
///解码器
// pub mod decoder;
/// 异常
pub mod except;
pub mod core;

use token::{TokenType, Token, LexerStage};
use ast::AST;
use lexer::Lexer;
use instruction::Instruction;
use code_generator::CodeGenerator;
use vm::VM;
use parser::Parser;
use semantic_analyzer::SemanticAnalyzer;
use crate::common::string::concat;
use except::{invalid_char, invalid_token, INDEX_OUT_OF_BOUNDS};

use std::{collections::HashMap, mem::size_of};

use crate::hashmap;

lazy_static!{
    #[derive(Debug)]
    static ref KEYWORD_MAP : HashMap<&'static str, TokenType> = {
        hashmap!("void" => TokenType::Void, "int" => TokenType::Int, "if" => TokenType::If,
        "else" => TokenType::Else, "while" => TokenType::While, "return" => TokenType::Return)
    };
}

const GLOBAL:&'static str = "__GLOBAL__";
const MAIN:&'static str = "main";

#[test]
fn test(){
    // let lexer = Lexer::new("C:\\Users\\ThinkPad\\Desktop\\b\\main.cpp");
    // let mut t = Parser::new(lexer.lexical_analysis());
    // println!("t:{:#?}", t.syntax_analysis());
    println!("cur_dir:{:#?}", env!("CARGO_MANIFEST_DIR"));
}
