/*************************************************************************
	> File Name: RegLex.hpp
	> Author: H.M. Lau
	> Mail: hm.lau@outlook.com
	> Created Time: 2023/10/14 15:29
 ************************************************************************/

#pragma once

#include <iostream>
#include <string>
#include <cstring>
#include <unordered_map>
#include <vector>
#include <cassert>
#include "log.hpp"

namespace lex
{
    static const size_t TOKEN_SIZE = 128;
    std::vector<std::string> _gTokenMap = {
        "EOS",
        "ANY",
        "AT_BOL",
        "AT_EOL",
        "CCL_END",
        "CCL_START",
        "CLOSE_CURLY",
        "CLOSE_PAREN",
        "CLOSURE",
        "DASH",
        "END_OF_INPUT",
        "L",
        "OPEN_CURLY",
        "OPEN_PAREN",
        "OPTIONAL",
        "OR",
        "PLUS_CLOSE"
    };

    enum Token {
        EOS, // 正则表达式末尾
        ANY, // . 
        AT_BOL, // ^
        AT_EOL, // $
        CCL_END, // ] 
        CCL_START, // [ 
        CLOSE_CURLY, // } 
        CLOSE_PAREN, // ) 
        CLOSURE, // *
        DASH, // -
        END_OF_INPUT, // the ending of input stream
        L, // Literal 
        OPEN_CURLY, // { 
        OPEN_PAREN, // (
        OPTIONAL, // ?
        OR, // |
        PLUS_CLOSE, // +
    };

    class RegLexer
    {
    public:
        RegLexer () 
        { _initTokenMap(); }

        void start(const std::string& input)
        {
            assert(!input.empty());
            _input = input;
            _it = _input.cbegin();
            if (*_it == '\\') _handleEsc();
            else _cur_char = *_it;
            _cur_token = _token_map[_cur_char];
        }

        void clear()
        {
            RegLexer temp;
            std::swap(*this, temp);
        }

        Token getCurToken()
        { return _cur_token; }

        Token advance()
        {
            if (_it != _input.cend()) {
                if (++_it != _input.cend()) {
                    if (*_it == '\\') {
                        _handleEsc();
                    } else {
                        _cur_char = *_it;
                        _cur_token = _token_map[_cur_char];
                    }
                } else {
                    _cur_token = Token::EOS;
                }
            }
#ifdef DEBUG_SHOW
            std::cout << getLexeme() << " : " << getLexSym() << std::endl;
#endif
            return _cur_token;
        }

        uint8_t getLexeme() const
        {
            if (_cur_token == Token::EOS) {
                log::logMessage(log::LOG_LEVEL::_INFO, "lexing finished\n");
                return 1;
            }
            return _cur_char;
        }

        bool matchToken(Token token) const
        { return _cur_token == token; }

        static std::string mapLexToken(size_t token)
        { return _gTokenMap[token]; }

        std::string getLexSym() const
        { return _gTokenMap[_cur_token]; }

    private:
        void _showCur() const
        { printf("%c:%s\n", _cur_char, _gTokenMap[_cur_token].c_str()); }

        void _handleEsc()
        {
            if (_it + 1 != _input.cend() && _esc_map.find(*(_it + 1)) != _esc_map.end()) _cur_char = _esc_map[*(++_it)];
            else _cur_char = '\\';
            _cur_token = Token::L;
        }

        void _initTokenMap()
        {
            std::vector<Token> temp(TOKEN_SIZE, Token::L);
            _token_map.swap(temp);
            _token_map['.'] = Token::ANY; 
            _token_map['^'] = Token::AT_BOL; 
            _token_map['$'] = Token::AT_EOL; 
            _token_map[']'] = Token::CCL_END; 
            _token_map['['] = Token::CCL_START; 
            _token_map['}'] = Token::CLOSE_CURLY; 
            _token_map[')'] = Token::CLOSE_PAREN; 
            _token_map['*'] = Token::CLOSURE; 
            _token_map['-'] = Token::DASH; 
            _token_map['{'] = Token::OPEN_CURLY; 
            _token_map['('] = Token::OPEN_PAREN; 
            _token_map['?'] = Token::OPTIONAL; 
            _token_map['|'] = Token::OR; 
            _token_map['+'] = Token::PLUS_CLOSE; 
            _token_map['\n'] = Token::EOS;
        }

        Token _cur_token;
        std::string _input;
        char _cur_char;
        std::string::const_iterator _it;
        std::vector<Token> _token_map;
        std::unordered_map<char, char> _esc_map = {
            {'0', '\\'},
            {'b', '\b'},
            {'f', '\f'},
            {'n', '\n'},
            {'r', '\r'},
            {'s', ' '},
            {'t', '\t'},
            {'.', '.'},
            {'^', '^'},
            {'$', '$'},
            {']', ']'},
            {'[', '['},
            {'}', '}'},
            {')', ')'},
            {'*', '*'},
            {'-', '-'},
            {'{', '{'},
            {'(', '('},
            {'?', '?'},
            {'|', '|'},
            {'+', '+'}
        };      
    };
}

