package edu.ncsu.contractminer.event.topic;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

import com.aliasi.tokenizer.IndoEuropeanTokenizerFactory;
import com.aliasi.tokenizer.Tokenizer;

import edu.ncsu.contractminer.utilities.Constants;
import edu.ncsu.contractminer.utilities.Dictionaries;
import edu.ncsu.contractminer.utilities.Stemmer;
import edu.stanford.nlp.objectbank.TokenizerFactory;

public class LDAPreprocessorFileToRRequired {
	// public static final String MULT = "mult.dat";
	// public static final String VOCAB = "vocab.dat";
	// public static final String ANNOTATIONS = "annotations.dat";
	// public static final String SECTOR = "Technology";
	// TokenizerFactory mTokenizer=new TokenizerFactory();
	// vocabulary
	private List<String> vocabulary = null;

	// map word (string) to count (int)
	List<Map<String, Integer>> docWordCounts = null;
	List<Map<String, Integer>> testWordCounts = null;

	// Map word to number of documents the word appears
	Map<String, Integer> wordDocCount = null;

	HashSet<String> stopWordSet = Dictionaries.getInstance().getStopWordSet();

	Stemmer stemmer = new Stemmer();

	public void setVocabulary(List<String> vocabulary) {
		this.vocabulary = vocabulary;
	}

	public LDAPreprocessorFileToRRequired(String folder) {
		loadDocuments(folder);
	}

	/**
	 * Load all document files (*.txt) in a folder; take each file as a document
	 * 
	 * @param folder
	 */
	private void loadDocuments(String folder) {
		File dir = new File(folder);

		try {
			// Only .txt files
			File[] files = dir.listFiles(new FilenameFilter() {
				public boolean accept(File dir, String name) {
					return name.endsWith(".txt");
				}
			});

			if (files == null || files.length == 0) {
				System.out.println("no files to load!");
				return;
			}

			// Sort documents by filenames
			Arrays.sort(files, new Comparator<File>() {
				public int compare(File f1, File f2) {
					return f1.getName().compareTo(f2.getName());
				}
			});

			docWordCounts = new LinkedList<Map<String, Integer>>();
			Set<String> vocab = new HashSet<String>();

			// Iterate messages from each receiver on this date
			for (int i = 0; i < files.length; i++) {
				// Logging
				System.out.println("(" + (i + 1) + "/" + files.length + ") "
						+ files[i].getName());

				BufferedReader in = new BufferedReader(new FileReader(files[i]));
				String line = "";
				Map<String, Integer> docWordCount = new HashMap<String, Integer>();
				while ((line = in.readLine()) != null) {
					// Process document and break it into words (tokens)
					String[] tokens = line.split(" ");
					for (String token : tokens) {

						if (!stopWordSet.contains(token.trim())
								&& !isPunctuation(token) && !isDigit(token)) {
							String word = stemmer.stemWord(token);

							// Update word index
							if (!vocab.contains(word)) {
								vocab.add(word);
							}
							// Count unique word in this document
							if (docWordCount.containsKey(word)) {
								docWordCount.put(word,
										docWordCount.get(word) + 1);
								// System.err.println("" +
								// docwordcount.get(wordindex.get(word)));
							} else {
								docWordCount.put(word, 1);
							}
						}
					}
				}

				docWordCounts.add(docWordCount);

			} // End messages from each receiver on this date

			vocabulary = new ArrayList<String>(vocab);

		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	private boolean isPunctuation(String token) {
		String remainder = token.replaceAll("\\p{Punct}", "");
		if (remainder.trim().isEmpty())
			return true;
		else
			return false;
	}

	private boolean isDigit(String token) {
		String remainder = token.replaceAll("\\d+", "");
		if (remainder.trim().isEmpty())
			return true;
		else
			return false;
	}

	/**
	 * Write "mult.dat" and "vocab.dat"
	 * 
	 * mult.dat per line: N word1:count1 word2:count2 word3:count3 ... N: (int)
	 * number of unique words word1:count1: a word (int) and its count (int) in
	 * the document
	 * 
	 * vocab:dat per line: word Each line is a word (string); line number is the
	 * associated index in mult.dat
	 * 
	 * @param folder
	 */
	public void writeFiles(String folder, String multname, String vocabname) {

		if (vocabulary == null || vocabulary.size() == 0) {
			System.out.println("no vocabulary!");
			return;
		}

		// build word index
		Map<String, Integer> wordToIndex = new HashMap<String, Integer>();

		try {
			System.out.print("Writing " + folder + "" + File.separator + ""
					+ vocabname + "...");

			BufferedWriter vocab = new BufferedWriter(new FileWriter(folder
					+ "" + File.separator + "" + vocabname));
			for (int i = 0; i < vocabulary.size(); i++) {
				String word = vocabulary.get(i);
				vocab.write(word);
				vocab.newLine();
				wordToIndex.put(word, i);
			}
			vocab.close();
			System.out.println("done!");

			System.out.print("Writing " + folder + "" + File.separator + ""
					+ multname + "...");
			BufferedWriter mult = new BufferedWriter(new FileWriter(folder + ""
					+ File.separator + "" + multname));
			for (int i = 0; i < docWordCounts.size(); i++) {
				Map<String, Integer> docWordCount = docWordCounts.get(i);

				// count = | key set in docWordCount AND key set in wordToIndex
				// |
				// mult.write("" + docWordCount.size());
				Set<String> docWords = new HashSet<String>(docWordCount
						.keySet()); // clone key set
				docWords.retainAll(wordToIndex.keySet());
				mult.write("" + docWords.size());

				Iterator<String> words = docWordCount.keySet().iterator();
				while (words.hasNext()) {
					String word = words.next();

					// word may not be in vocabulary
					if (wordToIndex.containsKey(word)) {
						mult.write(" " + wordToIndex.get(word) + ":"
								+ docWordCount.get(word));
					}
				}

				mult.newLine();
			}
			mult.close();
			System.out.println("done!");

		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public static void main(String[] args) throws SQLException, IOException,
			ClassNotFoundException, ParseException {

		try {
			String dir = Constants.LDADir;
			File directory = new File(dir);
			// Get all files in directory
			File[] files = directory.listFiles();
			for (File file : files) {
				// Delete each file
				if (!file.delete()) {
					// Failed to delete file
					System.out.println("Failed to delete " + file);
				}
			}

			int i = 0;
			Class.forName("sun.jdbc.odbc.JdbcOdbcDriver");
			String dataSourceName = Constants.DBName;
			String dbURL = "jdbc:odbc:" + dataSourceName;
			Connection con = DriverManager.getConnection(dbURL, "", "");
			// try and create a java.sql.Statement so we can run queries
			Statement statement = con.createStatement();
			String query0 = "SELECT Subject, EventCandidate, ClauseSignal,CounterClauseSignal FROM "
					+ Constants.eventTableName;
			statement.execute(query0);
			ResultSet result = statement.getResultSet();
			if (result != null) {
				while (result.next()) {
					String event = result.getString("EventCandidate");
					event = event.replaceAll(",", "");
					event = event.substring(1, event.length() - 1);
					if (event.indexOf(" ") != -1) {
						i++;
						File f = new File(dir + File.separator + i + ".txt");
						FileWriter fw = new FileWriter(f);
						BufferedWriter bw = new BufferedWriter(fw);
						bw.write(event.substring(event.indexOf(" ")).trim()
								.toLowerCase());
						bw.flush();
						bw.close();
					}
				}
			}

		} catch (ClassNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

		LDAPreprocessorFileToRRequired dc = new LDAPreprocessorFileToRRequired(
				Constants.LDADir);
		dc.writeFiles(Constants.LDADir, "a", "b");
	}
}
