#include <iostream>
#include <fstream>
#include <vector>
#include <string>
#include <assert.h>

#include "../include/Grammar.h"
#include "../include/NFA.h"
#include "../include/DFA.h"
#include "../include/Token.h"

int main() {
    std::string grammar_path = "../../demo/regular_grammer.txt";
    std::ifstream grammar_file(grammar_path);
    if (!grammar_file.is_open()) {
        std::cerr << "[*] failed to open grammar file\n";
        return 0;
    }
    auto read_grammar = [&](std::string grammer_type) {
        std::string line;
        std::vector<std::string> ret;
        int flag = 0;   //标记是否开始读入
        while (std::getline(grammar_file, line)) {
            if (line == "//") {
                break;
            }
            if (flag == 0 && line == grammer_type) {
                flag = 1;
                continue;
            }
            if (flag == 1) {
                ret.push_back(line);
            }
        }
        return ret;
    };
    std::vector<Grammar *> grammars({
                                            new Grammar(), new Grammar(), new Grammar,
                                            new Grammar(), new Grammar(), new Grammar()
    });
    assert(0 == grammars[0]->Scan_Regular_Grammar(read_grammar("SEGMENT")));
    assert(0 == grammars[1]->Scan_Regular_Grammar(read_grammar("OPERATOR")));
    assert(0 == grammars[2]->Scan_Regular_Grammar(read_grammar("KEYWORD")));
    assert(0 == grammars[3]->Scan_Regular_Grammar(read_grammar("IDENTIFIER")));
    assert(0 == grammars[4]->Scan_Regular_Grammar(read_grammar("LITERALS")));
    assert(0 == grammars[5]->Scan_Regular_Grammar(read_grammar("OTHERS")));

//    for(int i = 0; i < 6; i++) {
//        grammars[4]->Print_struct();
//        std::cout<<std::endl;
//    }

    std::vector<NFA *> nfas({
        new NFA(), new NFA(), new NFA(),
        new NFA(), new NFA(), new NFA()
    });
    for(int i = 0; i < nfas.size(); ++i) {
        assert(0 == nfas[i]->createNFA(*grammars[i]));
    }

    std::vector<DFA *> dfas({
        new DFA(), new DFA(), new DFA(),
        new DFA(), new DFA(), new DFA()
    });

    for(int i = 0; i < dfas.size(); i++ ) {
        assert(0 == dfas[i]->createDFA(*nfas[i]));
    }
//    dfas[4]->PrintDFA();
//   nfas[4]->PrintNFA();

    std::string source_path = "../../demo/demo.txt";
    std::ifstream source_file(source_path);
    if(!source_file.is_open()) {
        std::cerr << "[*] failed to open source file\n";
        return 0;
    }
    std::vector<std::string> src_lines;
    std::string line;
    while(getline(source_file, line)) {
        src_lines.push_back(line);
    }

    std::string code = "";
    for(auto& str: src_lines) {
        code += str;
        code += "\n";
    }

    std::vector<Token *> tokens;
    int line_num = 1;
    std::string temp = "";
    auto check = [&](std::string tmp) {
        bool acc = false;
        for(int i = 2; i < (int)dfas.size(); ++i) {
            if(dfas[i]->accept(tmp)) {
                TokenType type;
                switch(i) {
                    case 2:
                        type = KEYWORD;
                        break;
                    case 3:
                        type = IDENTIFIER;
                        break;
                    case 4:
                        type = LITERALS;
                        break;
                    case 5:
                        type = OTHERS;
                        break;
                }
                auto t_token = new Token(type, line_num, tmp);
                tokens.push_back(t_token);
                acc = true;
                break;
            }
        }
        if(!acc) {
            auto t_token = new Token(ERROR, line_num, tmp);
            tokens.push_back(t_token);
        }
    };

    int code_length = (int)code.size();
    for(int i = 0; i < code_length; ++i) {
        auto c = code[i];
        std::string tt = ""; tt += c;

        if(dfas[0]->accept(tt)) {
            if(temp != "") {
                check(temp);
                temp = "";
            }

            auto t_token = new Token(SEGMENT, line_num, tt);
            tokens.push_back(t_token);
            continue;
        }
        if((i > 0 && i < code_length-1 && code[i-1] == ' ' && code[i+1] == ' ') && dfas[1]->accept(tt)) {
            if(temp != "") {
                check(temp);
                temp = "";
            }
            auto t_token = new Token(OPERATOR, line_num, tt);
            tokens.push_back(t_token);
            continue;
        }
        if('\n' == c || ' ' == c) {
            if(temp != "") {
                check(temp);
                temp = "";
            }
            if('\n' == c)
            line_num++;
            continue;
        }
        temp += c;
    }

    std::ofstream outputFile("../../demo/token.txt");
    if (!outputFile) {
        std::cerr << "couldn't open the file!" << std::endl;
        return 1;
    }
    outputFile.clear();
    outputFile << tokens.size() << std::endl;
    for (const auto& token : tokens) {
        outputFile << token->toString() << std::endl;
    }


    std::ofstream outputjson("../../demo/token.json");
    if (!outputjson) {
        std::cerr << "couldn't open the file!" << std::endl;
        return 1;
    }
    outputjson.clear();
    for (const auto& token : tokens) {
        outputjson << token->toJSON() << std::endl;
    }

}
