﻿#include "tokenizer.h"

#include <format>
#include <iostream>
#include <regex>

namespace Pomian
{
	std::vector<IToken *> Tokenizer::tokenize(int lineNumber, const std::string& code)
	{

        std::string KEYWORD_PATTERN = "(if|else|for|while|int|float|return)";
        std::string NUMBER_PATTERN = "[0-9]+";
        std::string IDENTIFIER_PATTERN = "[a-zA-Z][a-zA-Z0-9]*";
        std::string OPERATOR_PATTERN = "(==|>|<|!=|=|\\+|\\-|\\*|/)";
        std::string SEPERATOR_PATTERN = R"([()\{\}".;])";
        std::string STRING_PATTERN = R"("((?:[^"\\]|\\.)*)"")"; // 匹配字符串常量，包括转义字符

        std::regex expr(
            std::format(R"(({})|({})|({})|({})|({})|{})",
                KEYWORD_PATTERN,
                IDENTIFIER_PATTERN, 
                NUMBER_PATTERN, 
                OPERATOR_PATTERN,                
                STRING_PATTERN,
                SEPERATOR_PATTERN)
        );
        std::vector<IToken *> tokens;

        std::smatch match;
        std::string s = code;
        while (std::regex_search(s, match, expr)) {
            std::string word = match[0].str();
            if (std::regex_match(word, std::regex(KEYWORD_PATTERN))) {
                tokens.push_back(new KeywordToken(lineNumber, word));
            } else if (std::regex_match(word, std::regex(IDENTIFIER_PATTERN))) {
                tokens.push_back(new IdentifierToken(lineNumber, word));
            } else if (std::regex_match(word, std::regex(NUMBER_PATTERN))) {
                tokens.push_back(new NumberToken(lineNumber, word));
            }
            else if (std::regex_match(word, std::regex(STRING_PATTERN))) {
                tokens.push_back(new StringToken(lineNumber, word));
            }
            else if (std::regex_match(word, std::regex(OPERATOR_PATTERN))) {
                tokens.push_back(new OperatorToken(lineNumber, word));
            } else if (std::regex_match(word, std::regex(SEPERATOR_PATTERN))) {
                tokens.push_back(new SeperatorToken(lineNumber, word));
            }
            s = match.suffix().str();
        }
        tokens.push_back(FlagToken::EoL);
		return tokens;
	}
    Tokenizer::Tokenizer(const std::string& filename)
        : m_doNumber(0)
        , m_lineNumber(0)
        , m_tokenNumber(0)
        , m_stream(std::ifstream(filename))
    {
        
        if (!m_stream.is_open()) {
            return;
        }
        




    }
    void Tokenizer::redo()
    {
        m_doNumber = m_tokenNumber;
    }
    void Tokenizer::undo()
    {
        m_tokenNumber = m_doNumber;
    }
    bool Tokenizer::has(int number)
    {        
        while ((m_tokenNumber + number - 1) >= m_tokens.size())
        {      
            std::string line;
            if (!std::getline(m_stream, line)) 
            {
                return false;
            }
            std::vector<IToken*> tokens = tokenize(++m_lineNumber, line);
            m_tokens.insert(m_tokens.end(), tokens.begin(), tokens.end());
        }
        return true;
    }
    void Tokenizer::move(int number)
    {
        m_tokenNumber += number;
    }
    IToken* Tokenizer::current(int index)
    {
        if (has(index + 1) == false)
        {
            return FlagToken::EoF;
        }
        return m_tokens.at(m_tokenNumber + index);
    }
    
}