package model.parser;

import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import util.resources.ResourceManager;

import model.tokens.Token;
import model.tokens.TokenFactory;

public class Lexer<T> {
    private String tokenRegex;
    private static ResourceManager manager = ResourceManager
            .getManager("model");
    private static final String COMMENT_PATTERN = manager
            .getString("COMMENT_PATTERN");
    private static final Character COMMENT_CHAR = manager.getString(
            "COMMENT_CHAR").charAt(0);
    private TokenFactory<T> tokenFactory;

    /**
     * Constructs the lexer, given a resource file mapping patterns to tokens.
     */
    public Lexer(String resourceFile, FunctionTable<T> functionTable) {
        tokenFactory = new TokenFactory<T>(resourceFile, functionTable);
        tokenRegex = makeTokenRegex(tokenFactory.getRegexTokenMap());
    }

    private String makeTokenRegex(Map<String, ?> regexTokenMap) {
        StringBuilder tokenRegexBuilder = new StringBuilder();

        for (String regex : regexTokenMap.keySet()) {
            tokenRegexBuilder.append("|" + regex);
        }

        tokenRegexBuilder.deleteCharAt(0); // Remove first |.

        tokenRegexBuilder.append("|" + COMMENT_PATTERN);
        return tokenRegexBuilder.toString();
    }

    public Queue<Token<T>> makeTokensFromExpressionString(String expression) {
        Queue<Token<T>> tokens = new LinkedList<Token<T>>();

        Matcher matcher = Pattern.compile(tokenRegex).matcher(expression);

        Token<T> lastToken = null;
        while (matcher.find()) {
            String label = matcher.group();
            // Skip comments.
            if (label.charAt(0) == COMMENT_CHAR) {
                continue;
            }
            Token<T> token = tokenFactory.makeToken(label, lastToken);
            lastToken = token;
            tokens.add(token);
        }

        return tokens;
    }

}
