package model.parser;

import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import model.tokens.Token;
import model.tokens.TokenFactory;

public class Lexer<T> {	
	private String tokenRegex;
	private TokenFactory tokenFactory;
	
	/**
	 * Constructs the lexer, given a resource file mapping patterns to tokens.
	 */
	public Lexer(String resourceFile, String syntaxFile) {
		tokenFactory = new TokenFactory(resourceFile, syntaxFile);
        tokenRegex = makeTokenRegex(tokenFactory.getRegexTokenMap());
	}
	
	private String makeTokenRegex(Map<String, ?> regexTokenMap) {
		StringBuilder tokenRegexBuilder = new StringBuilder();
		
		for(String regex : regexTokenMap.keySet()) {
			tokenRegexBuilder.append("|" + regex);
		}
		
		tokenRegexBuilder.deleteCharAt(0); // Remove first |.
		return tokenRegexBuilder.toString();
	}
	
	public List<Token<T>> makeTokensFromExpressionString(String expression) {
		List<Token<T>> tokens = new LinkedList<Token<T>>();

		Matcher matcher = Pattern.compile(tokenRegex).matcher(expression);
		
		
		while(matcher.find()) {
			String label = matcher.group();
			Token<T> token = tokenFactory.makeToken(label);
			tokens.add(token);
		}
		
		return tokens;
	}
}
