#include "Util.h"
#include "Symbol.h"
#include "FSM.h"

#include <iostream>
#include <string>
#include <fstream>
#include <set>
#include <map>

using namespace std;

set<string> kw = {"int", "void", "return", "const", "main", "float", "if", "else"};
set<char> boundary = {'(', ')', '{', '}', ';', ','};
set<char> operators = {'+', '-', '*', '/', '%', '=', '>', '<'};
set<char> operationBeginChar = {'=', '<', '>', '!', '&', '|'};
set<string> operationOf2Char = {"==", "<=", ">=", "!=", "&&", "||"};

map<string, Token> tokenMap = {
        {"int",    Token::KW_INT},
        {"void",   Token::KW_VOID},
        {"return", Token::KW_RETURN},
        {"const",  Token::KW_CONST},
        {"main",   Token::KW_MAIN},
        {"float",  Token::KW_FLOAT},
        {"if",     Token::KW_IF},
        {"else",   Token::KW_ELSE},
        {"+",      Token::OP_PLUS},
        {"-",      Token::OP_MINUS},
        {"*",      Token::OP_MULTIPLY},
        {"/",      Token::OP_DIVIDE},
        {"%",      Token::OP_PERCENT},
        {"=",      Token::OP_ASSIGN},
        {">",      Token::OP_GT},
        {"<",      Token::OP_LT},
        {"==",     Token::OP_EQUAL},
        {"<=",     Token::OP_LEQ},
        {">=",     Token::OP_GEQ},
        {"!=",     Token::OP_NEQ},
        {"&&",     Token::OP_AND},
        {"||",     Token::OP_OR},
        {"(",      Token::SE_LPARENT},
        {")",      Token::SE_RPARENT},
        {"{",      Token::SE_LBRACES},
        {"}",      Token::SE_RBARCES},
        {";",      Token::SE_SEMICOLON},
        {",",      Token::SE_COMMA}
};

Symbol sTable;
FSM NFA;
FSM DFA;
FSM miniDFA;
int lineNum = 0;

void outputFSM() {
    NFA.printFSM();
    cout << endl << endl << endl;
    DFA.printFSM();
    cout << endl << endl << endl;
    miniDFA.printFSM();
}

void getFSM() {
    sTable = Symbol();
    NFA = createNFA();
    DFA = NFAtoDFA(NFA);
    miniDFA = minimizeDFA(DFA);
    outputFSM();
}

// 使用FSM分析token
void analyseToken(const string& token) {
    if (token.empty()) {
        return;
    }

    //if is KEYWORD, then printToken
    if (isAllLetter(token) && kw.count(toLower(token))) {
        printToken(token, tokenMap[token], lineNum);
        return;
    }
    /**
     * 1. 用正则表达式构造NFA
     * 2. NFA-->DFA
     * 3. DFA minimize
     * 4. 把token扔进最小化的DFA看看返回什么状态---> 标识符或变量，以及错误状态，只有这三种可能
     * 注意：
     * 1. 这里只需要返回Token即可
     */

    int tokenType = identify(miniDFA, token);
    printToken(token, tokenType, lineNum);
    if (tokenType == Token::IDN) {
        sTable.addSymbol(token);
    }
}

void doLexicalAnalysis() {
    string fileName = data_path;
    FILE *fp;
    fp = fopen(lexResultTxt, "w");
    fwrite("", 0, 1, fp);
    ifstream file;
    file.open(fileName, std::ios::in);
    if (!file.is_open()) {
        cout << "Error: " << fileName << " could not open!" << endl;
    }

    char c;
    string token;
    /**
     * 算法概括：
     * 除了分割符和已经定义好的界符和运算符之外，其余所有!所有！token序列都应交给自动机判断
     * 所以这里就是要利用分隔符、界符和运算符将程序分开，因为带有运算符的字符串不应该进入自动机
     */
    while ((c = (char) file.get()) != EOF) {
        if (c == ' ' || c == '\t' || c == '\n') {
            analyseToken(token);
            token = "";
            if (c == '\n') {
                lineNum++;
            }
            continue;
        } else if (boundary.count(c)) { // 界符
            analyseToken(token);
            token = string(1, c);
            printToken(token, tokenMap[token], lineNum);
            token = "";
            continue;
        } else if (operators.count(c) || operationBeginChar.count(c)) { // 运算符
            analyseToken(token);
            if (operationBeginChar.count(c)) { // 有可能是两个字符组成的运算符
                char nextChar = (char) file.get();
                string tryOp;
                tryOp += c;
                tryOp += nextChar;
                if (operationOf2Char.count(tryOp)) { // 是两个字符组成的运算符
                    token = tryOp;
                    printToken(token, tokenMap[token], lineNum);
                } else if (operators.count(c)) { // 普通的由一个字符组成的运算符
                    token = string(1, c);
                    printToken(token, tokenMap[token], lineNum);
                    file.putback(nextChar);
                } else {
                    // 出错了，错误token
                    token = string(1, c);
                    printToken(token, Token::UNDEFINED, lineNum);
                    file.putback(nextChar);
                }
            } else { // 这就是一个字符组成的运算符
                token = string(1, c);
                printToken(token, tokenMap[token], lineNum);
            }
            token = "";
        } else {
            token += c;
        }
    }
    sTable.printTable();
}

//int main() {
//    getFSM();
//    doLexicalAnalysis();
//    return 0;
//}
