package edu.uba.fcen.estimacion.word.selection;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.log4j.Logger;

import edu.uba.fcen.estimacion.database.QueryDB;
import edu.uba.fcen.estimacion.word.selection.cleaner.CleanGarbage;
import edu.uba.fcen.estimacion.word.selection.constants.Constants;
import edu.uba.fcen.estimacion.word.selection.filter.FilterWords;
import edu.uba.fcen.estimacion.word.selection.filter.FilterWordsFactory;

public class GetWikipediaWords {
	
	private final class AreWikiFilesPredicate implements Predicate {
		@Override
		public boolean evaluate(Object arg0) {
			File file = (File) arg0;
			file.getName().startsWith("wiki");
			return false;
		}
	}
	
	static Logger logger = Logger.getLogger(GetWikipediaWords.class); 
	QueryDB db;
	Map<String, Integer> map = new HashMap<String, Integer>();
	
	public Collection getFiles() {
		File directory = new File(Constants.PATH_WIKIPEDIA_CHUNKS_DIRECTORY);
		Collection files = Arrays.asList(directory.listFiles());
		return CollectionUtils.select(files, new AreWikiFilesPredicate());
	}
	
	public static void main(String[] args) {
		GetWikipediaWords gw = new GetWikipediaWords();
//		String line, word, lineCleaned;
//		CleanGarbage cleaner = new CleanGarbage();
//		
//		File directory = new File(Constants.PATH_WIKIPEDIA_CHUNKS_DIRECTORY);
//		long initial, end;
//		initial = System.currentTimeMillis();
//		for(File in : directory.listFiles()) {
//			long internalTime = System.currentTimeMillis();
//			if (in.getName().startsWith("wiki")) {
//				FilterWords fWords = FilterWordsFactory.getWikipediaFilter(in.getAbsolutePath());
//				while((line = fWords.readLine()) != null) {
//					lineCleaned = cleaner.clean(line);
//					String[] words = lineCleaned.split(Constants.WHITE_SPACE);
//					for (int i = 0; i < words.length; i++) {
//						word = words[i].trim().toLowerCase();
//						if (word.length() > 2) {
//							if(gw.map.containsKey(word)) {
//								gw.map.put(word, gw.map.get(word)+1);
//							}else{
//								gw.map.put(word, 1);
//							}
//						}
//					}
//				}
//			}
//			gw.removeSomeWords();
////			logger.debug("Hay " +gw.map.size() + " keys");
//			logger.debug("Demoro: " + (System.currentTimeMillis() - internalTime) + " y hay " +gw.map.size() + " keys");
//		}
//		end = System.currentTimeMillis();
//		logger.debug("Demoro: " + (end - initial) + " y hay " +gw.map.size() + " keys");
//		gw.writeResult(Constants.FILE_DIC_NAME, Constants.WITH_NUM);
//		gw.map.clear();
		logger.info("Ready to eliminate words that are names, cities, countries or regions");
		gw.filterWithTheDatabase();
		logger.info("The process finish success");
	}
	

	public void filterWithTheDatabase() {
		this.db = new QueryDB();
		InputStreamReader in;
		try {
			in = new InputStreamReader(new FileInputStream(Constants.PATH_DIC_WORDS + Constants.FILE_DIC_NAME), Constants.ENCODING);
			BufferedReader br = new BufferedReader(in);
			String line, word, number;
			while((line = br.readLine()) != null) {
				word = line.split("\t\t\t")[0];
				number = line.split("\t\t\t")[1];
				if (this.isAnAllowedWord(word)) {
					this.map.put(word.toLowerCase(), Integer.valueOf(number));
				}
			}
			in.close();
			this.writeResult("WordsFilteredWithDB.txt", true);
			this.writeResult("WordsFilteredWithDBWithoutNum.txt", false);
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
		
	}


	private boolean isAnAllowedWord(String word) {
		//boolean isAnSpanishWord = this.checkLanguage(word);
		return !this.db.exist(word);
	}
	
	private boolean checkLanguage(String word) {
		
		for (int i = 0; i < word.length(); i++) {
//			word.
		}
		return true;
	}




	public void removeSomeWords() {
		if (logger.isDebugEnabled()) {
			logger.debug("The size of the map is: " + map.size());
		}
		if (map.size() > 100000) {
			Set<String> keys = new HashSet<String>(map.keySet());
			for(String key : keys) {
				if (map.get(key) < 10) {
					map.remove(key);
				}
			}
		}
		
	}

	public void writeResult(String fileName, boolean withNum) {
		Writer out;
		try {
			
			out = new OutputStreamWriter(new FileOutputStream(
					Constants.PATH_DIC_WORDS + fileName),
					Constants.ENCODING);
			
			BufferedWriter dicFile = new BufferedWriter(out);
			
			for (Pair<String, Integer> pair : this.getSortedList()) {
				if (withNum) {
					dicFile.write(pair.getX() + "\t\t\t" + pair.getY() + System.getProperty("line.separator"));
				} else {
					dicFile.write(pair.getX() + System.getProperty("line.separator"));
				}
			}
			dicFile.flush();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} 
	}

	private List<Pair<String, Integer>> getSortedList() {
		List<Pair<String, Integer>> sortedList = new ArrayList<Pair<String,Integer>>(map.size());
		
		for (String key : map.keySet()) {
			sortedList.add(new Pair<String, Integer>(key, map.get(key)));
		}
		Collections.sort(sortedList, new Comparator<Pair<String,Integer>>() {

			@Override
			public int compare(Pair<String,Integer> o1, Pair<String,Integer> o2) {
				return o2.getY().compareTo(o1.getY());
			}
		});
		return sortedList;
	}
	
}
