package lexical;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import lexical.domain.CT;
import lexical.domain.ErrorLogger;
import lexical.domain.PIF;
import lexical.domain.ST;

public class LexicalAnalyzer {
	private List<String> errors = new ArrayList<String>();
	private CT ct = new CT();
	private PIF pif = new PIF();
	private ST st = new ST();
	private ErrorLogger errorLogger=new ErrorLogger();
	
	private Integer currentLineNo = -1;

	/**
	 * Performs lexical analysis on a file. The results can be seen by calling
	 * the {@link #printResult(PrintStream)}
	 * 
	 * @param sourceFile
	 *            path of the file that will be analyzed
	 */
	public void analyze(String sourceFile) {
		Path path = Paths.get(sourceFile);
		List<String> tokens = new ArrayList<String>();
		
		String line = null;

		/*
		 * Open the source file and read it line by line
		 */
		try (BufferedReader reader = Files.newBufferedReader(path,
				StandardCharsets.UTF_8)) {

			while ((line = reader.readLine()) != null) {
				// process each line
				currentLineNo++;
				line = stripEndOfLineComment(line).trim();
				if (!line.equals("")) {
					tokens = getTokensFromLine(line);
					System.out.println(tokens);

					processTokens(tokens);
				}
			}
		} catch (IOException e) {
			e.printStackTrace();
		}

	}

	public void printResult(PrintStream stream) {
		errorLogger.print(stream);
		pif.print(stream);
		st.print(stream);
	}

	/**
	 * Processes the tokens in the list.(Calls {@link #processToken(String) on
	 * every element in the list})
	 */
	private void processTokens(List<String> tokens) {
		for (String token : tokens) {
			processToken(token);
		}
	}

	/**
	 * 
	 */
	private void processToken(String token) {
		int stPosition;
		Integer code = ct.getCode(token);

		// code is null if the token is not a valid token.
		// The token is logged as an error.
		if (code == null) {
			errorLogger.log(token, currentLineNo);
			return;
		}

		if (code == CT.IDENTIFIER || code == CT.CONSTANT) {
			stPosition = st.add(token);
		} else {
			stPosition = -1;
		}

		pif.add(code, stPosition);
	}


	
	/**
	 * Tokenizes the input string.
	 * @param line input string
	 * @return a list of tokens
	 */
	private List<String> getTokensFromLine(String line) {
//		String patternString = "==|<=|>=|<>|[\\+\\-\\=\\<\\>\\*/%]|\"[a-zA-Z0-9 ]*\"?|\'[a-zA-Z]*\'?|[\\[\\]\\(\\)\\{\\};]";
		String charRegexString = "[a-zA-Z0-9 \\.\\,\\;\\:\\!\\?\\-\\/]";
		String patternString = "==|<=|>=|<>|[\\=\\<\\>\\*/%]|(?<!\\=[ ]{0,30})[+|-]|\""+charRegexString+"*\"?|\'"+charRegexString+"*\'?|[\\[\\]\\(\\)\\{\\};]";
		List<String> tokenList = new ArrayList<String>();
		Pattern pattern = Pattern.compile(patternString);
		Matcher matcher = pattern.matcher(line);
		String group = null;
		String substring;

		while (line.length() != 0) {
			line.trim();
			if (matcher.find()) {
				group = matcher.group();
				substring = line.substring(0, line.indexOf(group));
				line = line.substring(line.indexOf(group) + group.length());
			} else {
				substring = line;
				line = "";
			}

			String[] tokens = substring.split("\\s");
			for (String s : tokens) {
				if (!s.equals(""))
					tokenList.add(s);
			}

			if (group != null) {
				tokenList.add(group);
				group = null;
			}
		}
		return tokenList;
	}

	/**
	 * Deletes the everything after "//", including "//". 
	 * @param line the string from which the comment will be stripped.
	 * @return the line without the comment
	 */
	private String stripEndOfLineComment(String line) {
		int index = line.indexOf("//");
		if (index >= 0) {
			return line.substring(0, index);
		} else {
			return line;
		}

	}

	public void test(String string) {
		// String string =
		// "char c=\"asd; $id_1=321;1id=Lol2_* (3+id_1)%10; if lol_2==27 then \r\n	 a=\"fuck off\";";
		String string2 = string;
		// String patternString =
		// "==|<=|>=|<>|[\\+\\-\\=\\<\\>\\*/%]|[\\[\\]\\(\\)\\{\\};]";
		String patternString = "==|<=|>=|<>|[\\+\\-\\=\\<\\>\\*/%]|\"[a-zA-Z ]*\"?|\'[a-zA-Z]*\'?|[\\[\\]\\(\\)\\{\\};]";
		String errorPatternString = "|\'[a-zA-Z ]*'?|\"[a-zA-Z ]*";
		// patternString += errorPatternString;

		Pattern pattern = Pattern.compile(patternString);
		Matcher matcher = pattern.matcher(string2);

		String group;
		String id;
		while (string2.length() != 0) {
			if (matcher.find()) {
				string2.trim();
				group = matcher.group();
				// if (string2.startsWith(group)) {
				// print(group);
				// string2 = string2.substring(group.length());
				// } else {
				id = string2.substring(0, string2.indexOf(group));
				String[] tokens = id.split("\\s");
				for (String s : tokens) {
					if (!s.equals(""))
						print(s);
				}
				print(group);
				string2 = string2.substring(string2.indexOf(group)
						+ group.length());
				// }
			} else {
				String[] tokens = string2.split("\\s");
				for (String s : tokens) {
					if (!s.equals(""))
						print(s);
				}
				string2 = "";
			}
		}
	}

	public static void print(String s) {
		System.out.println(s);
	}
	
	public ArrayList<String> getPifTokenCodes(){
		ArrayList<String> list = new ArrayList<>();
		for (Integer i : pif.getTokenCodes()) {
			list.add(i.toString());
		}
//		list.addAll(pif.getTokenCodes());
		return list;
	}

}
