package edu.uba.fcen.estimacion.estimacion;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.log4j.Logger;

import edu.uba.fcen.estimacion.database.QueryWordsDB;
import edu.uba.fcen.estimacion.word.selection.constants.Constants;
import edu.uba.fcen.estimacion.word.selection.filter.FilterWords;
import edu.uba.fcen.estimacion.word.selection.filter.FilterWordsFactory;

public class Estimacion {
	
	private static final Logger logger = Logger.getLogger(Estimacion.class);
	private QueryWordsDB db;
	private List<Means> allMeans;
	private Map<String, Means> allCoveredWords;
	private Map<String, String> allWords;
	private String pathToCSV;

	public Estimacion(String pathToCSV) {
		logger.info("El path es: " + pathToCSV);
		this.pathToCSV = pathToCSV;
		this.db = new QueryWordsDB(pathToCSV);
	}
	
	public Estimacion(String pathToCSV, int i) {
		logger.info("El path es: " + pathToCSV);
		this.pathToCSV = pathToCSV;
		this.db = new QueryWordsDB(pathToCSV, i);
	}

	public EstimacionData runOver(String pathToFileIn) {
		processOutputFreeling(pathToFileIn);
		Means total = new Means(0d, 0d, 0d, 0d, 0d, 0d);
		
		double coverage = this.calculateRate();
		
		for(String key : allCoveredWords.keySet()) {
			total.add(allCoveredWords.get(key));
		}
		total.div(allCoveredWords.size());
		logger.info(total.toString());

		/*if (!allWords.isEmpty()) {
			coverage = (double) allCoveredWords.size() / allWords.size();
		} */
		logger.info("cubrimiento: " + (coverage*100) + "%");
		if (coverage < 0d || coverage > 100d) {
			logger.error("Error in file: " + pathToFileIn);
		}
		return new EstimacionData(total, coverage);
	}
	
	public static void main(String[] args) {
		String pathToCSV = args[0];
		String pathToFileIn = args[1];
		
		Estimacion estima = new Estimacion(pathToCSV);
		estima.runOver(pathToFileIn);
	}
	
	private double calculateRate() {
		long wordsInSD = 0l;
		Means mean;
		for(String key : allWords.keySet()) {
			mean = db.means(key);
			if (mean.isValid()) {
				allCoveredWords.put(key, mean);
				wordsInSD++;
			}
		}
		return (double) wordsInSD / allWords.size();
	}

	private void processOutputFreeling(String pathToFileIn) {
		FilterWords fw = FilterWordsFactory.getNormalLineByLineFilter(pathToFileIn);
		Means mean = new Means();
		String [] splited;
		String line, lemma, tag, lemmaWithTag;
		allWords = new HashMap<String, String>();
		allCoveredWords = new HashMap<String, Means>();
		allMeans = new ArrayList<Means>();
		while ((line=fw.readLine())!=null) {
			if (line.contains("+(") && line.contains("-)")) {
				splited = line.split(Constants.WHITE_SPACE);
				for (int i = 0; i < splited.length; i++) {
					if (splited[i].length()>0) {
						if (splited[i].startsWith("+(")) {
							lemma = splited[i+1];
							tag = splited[i+2];
							lemmaWithTag = lemma +"_" + tag.charAt(0);
							if (isWord(lemma)) {
								allWords.put(lemmaWithTag, lemma);
//								mean = db.means(lemmaWithTag);
//								if (mean.isValid()) {
//									allMeans.add(mean);
//									allCoveredWords.put(lemmaWithTag, mean);
//								}
							}
						}
					}
				}
			}
		}
	}
	
	public void closeDB() {
		this.db.close();
	}

	private boolean isWord(String lemma) {
		Pattern patternPunctuation = Pattern.compile(Constants.PUNCTUATION_SYMBOL, Pattern.UNICODE_CASE);
		Matcher match = patternPunctuation.matcher(lemma);
		return !match.find();
	}

}
