package de.connecttext.dao.sql;

import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.w3c.dom.stylesheets.DocumentStyle;

import static de.connecttext.application.Constants.*;
import de.connecttext.dao.DocumentTagIdGenerator;
import de.connecttext.dao.IdGenerator;
import de.connecttext.dao.TagIdGenerator;
import de.connecttext.dao.solr.WikipediaSolrDao;
import de.connecttext.exception.SqlAccessException;
import de.connecttext.model.TagVector;
import de.connecttext.model.TextDocument;

/**
 * DataAccessObject, um mit SQL zu kommunizieren Singleton!
 * 
 * @author conrad
 * 
 */
public class SqlDao {

	private static SqlDao instance;

	// private Connection db;
	private List<TextDocument> bufferedDocuments;

	private int documentsWritten = 0;
	private int documentsSkipped = 0;

	private SqlDao() throws ClassNotFoundException, SQLException,
			SqlAccessException {
		// db = SqlConnection.open();
		Connection db = SqlConnection.open();
		prepareTables(db);
		db.close();
		this.bufferedDocuments = new ArrayList<TextDocument>();
	}

	private void create(String query, Connection db) throws SqlAccessException,
			SQLException {
		Statement s = db.createStatement();
		if (query.trim().startsWith("CREATE")) {
			s.execute(query);
		} else {
			throw new SqlAccessException(
					"Kein gültiges CREATE-Statement übergeben");
		}
	}

	private List<Map<String, String>> retrieve(String query, Connection db)
			throws SQLException, SqlAccessException {

		List<Map<String, String>> result = new ArrayList<Map<String, String>>();
		Statement s = db.createStatement();

		if (query.trim().startsWith("SELECT")) {
			ResultSet rs = s.executeQuery(query);
			while (rs.next()) {
				Map<String, String> row = buildDataMap(rs);
				result.add(row);
			}
		} else {
			throw new SqlAccessException(
					"0.Kein gültiges SELECT-Statement übergeben");
		}
		s.close();
		return result;

	}

	private void insert(String query, Connection db) throws SQLException,
			SqlAccessException {
		PreparedStatement s = db.prepareStatement(query);
		if (query.trim().startsWith("INSERT")) {
			s.executeUpdate();
		} else {
			throw new SqlAccessException(
					"Kein gültiges INSERT-Statement übergeben");
		}
	}

	private void update() {

	}

	private Map<String, String> buildDataMap(ResultSet rs) throws SQLException {

		Map<String, String> data = new HashMap<String, String>();
		ResultSetMetaData rsmd = rs.getMetaData();

		for (int i = 1; i <= rsmd.getColumnCount(); i++) {
			data.put(rsmd.getColumnName(i), rs.getString(i));
		}

		return data;

	}

	private void prepareTables(Connection db) throws SQLException,
			SqlAccessException {

		execute("DROP TABLE IF EXISTS `" + DOCUMENT_TABLE_NAME + "`;", db);
		String createString = "CREATE TABLE IF NOT EXISTS `"
			+ DOCUMENT_TABLE_NAME
			+ "` "
			+ "(`"
			+ DOCUMENT_ID_NAME
			+ "` INT NOT NULL PRIMARY KEY ,"
			+ "`"
			+ DOCUMENT_WIKI_ID_NAME
			+ "` TEXT NOT NULL ,"
			+ "`"
			+ DOCUMENT_AUTHOR_NAME
			+ "` TEXT,"
			+ "`"
			+ DOCUMENT_TAG_COUNT_NAME
			+ "` INT,"
			+ "`"
			+ DOCUMENT_TITLE_NAME
			+ "` TEXT) ENGINE = MYISAM CHARACTER SET utf8 COLLATE utf8_general_ci;";
		System.out.println(createString);
		create(createString,
				db);
		execute("DROP TABLE IF EXISTS `" + TAG_TABLE_NAME + "`;", db);
		create("CREATE TABLE IF NOT EXISTS `"
				+ TAG_TABLE_NAME
				+ "` "
				+ "(`"
				+ TAG_ID_NAME
				+ "` INT NOT NULL PRIMARY KEY ,"
				+ "`"
				+ TAG_LEMMA_NAME
				+ "` VARCHAR( 255 ) NOT NULL, "
				+ "INDEX ( `"
				+ TAG_LEMMA_NAME
				+ "` )) ENGINE = MYISAM CHARACTER SET utf8 COLLATE utf8_general_ci;",
				db);
		execute("DROP TABLE IF EXISTS `" + DOCUMENT_TAG_TABLE_NAME + "`;", db);
		create("CREATE TABLE IF NOT EXISTS `"
				+ DOCUMENT_TAG_TABLE_NAME
				+ "` "
				+ "(`"
				+ DOCUMENT_TAG_ID_NAME
				+ "` INT NOT NULL PRIMARY KEY ,"
				+ "`"
				+ DOCUMENT_TAG_DOCUMENT_ID_NAME
				+ "` INT NOT NULL ,`"
				+ DOCUMENT_TAG_TAG_ID_NAME
				+ "` INT NOT NULL ,"
				+ "`"
				+ DOCUMENT_TAG_TF_NAME
				+ "` INT NOT NULL ,"
				+ "`"
				+ DOCUMENT_TAG_TFIDF_NAME
				+ "` FLOAT NOT NULL ,"
				+ "`"
				+ DOCUMENT_TAG_TAG_ALT_NAME
				+ "` VARCHAR( 255 ) ,"
				+ "INDEX (  `"
				+ DOCUMENT_TAG_DOCUMENT_ID_NAME
				+ "` ,  `"
				+ DOCUMENT_TAG_TAG_ID_NAME
				+ "` )) ENGINE = MYISAM CHARACTER SET utf8 COLLATE utf8_general_ci;",
				db);
		System.out.println("All Tables are created equal!");

	}

	private void execute(String query, Connection db) throws SQLException {
		Statement s = db.createStatement();
		s.execute(query);
	}

	public static SqlDao getInstance() throws ClassNotFoundException,
			SQLException, SqlAccessException {
		if (instance == null)
			instance = new SqlDao();
		return instance;
	}

	/**
	 * Buffers Documents that are to be written. When the Limit is reached, the
	 * Buffer is flushed to the Database. flushBufferedDocuments() has to be
	 * called after iterating over the documents to not forget the remaining
	 * documents. The Buffer is used to set up a compromise between Performance
	 * and Memory Usage: When every Documents uses its own SqlConnection, the
	 * Performance is extremely low. When all use the same SqlConnection, the
	 * heap size blows.
	 * 
	 * @param document
	 * @throws Exception
	 */
	public void writeDocument(TextDocument document) throws Exception {
		this.bufferedDocuments.add(document);
		if (this.bufferedDocuments.size() >= SQL_DOCUMENTS_BUFFER_SIZE) {
			this.writeDocuments(this.bufferedDocuments);
			this.bufferedDocuments.clear();
		}
	}

	/**
	 * Writes documents and its Tags to the specified database. Useful if
	 * documents are to be added which are certainly NOT in the database yet.
	 * Does NOT check if there alreade is a document with that path, but just
	 * adds them to the table. DOES check if there are already Tags matching
	 * that Tag for that is the primary way of linking the Documents
	 * 
	 * 
	 * @param documents
	 * @throws Exception
	 */
	public void writeDocuments(List<TextDocument> documents) throws Exception {
		Connection db = SqlConnection.open();

		for (TextDocument document : documents) {

			//If the Document has no Tags, skip it!
			if (document.getTagCount() < 1) {
				documentsSkipped++;
				continue;
			}

			PreparedStatement psDoc = db
					.prepareStatement("INSERT INTO  `" + DOCUMENT_TABLE_NAME
							+ "` (`" + DOCUMENT_ID_NAME + "`,`"
							+ DOCUMENT_WIKI_ID_NAME + "` ,`"
							+ DOCUMENT_AUTHOR_NAME + "`,`"
							+ DOCUMENT_TITLE_NAME + "`,`"
							+ DOCUMENT_TAG_COUNT_NAME
							+ "`) VALUES ( ? , ? , ? , ? , ? ) ");
			// System.out.println("Doc ID: " + document.getUniqueDocId());
			psDoc.setInt(1, document.getUniqueDocId());
			psDoc.setString(2, document.getSolrId());
			psDoc.setString(3, document.getMetaData().getAuthor());
			psDoc.setString(4, document.getMetaData().getTitle());
			psDoc.setInt(5, document.getTagCount());

			// try {
			
			psDoc.execute();
			// } catch (Exception e) {
			/*
			 * System.out .println(
			 * "ERROR: Konnte folgendes nicht in die Datenbank schreiben: " +
			 * document.getMetaData().getAuthor() + " " +
			 * document.getMetaData().getTitle());
			 */
			// }
			psDoc.close();

			// List<Map<String, String>> docIdRetrieve = retrieve(
			// "SELECT COUNT(id) FROM `" + DOCUMENT_TABLE_NAME + "`", db);
			// ID of the newly added Document in the MysqlTable
			// int docId =
			// Integer.parseInt(docIdRetrieve.get(0).get("COUNT(id)"));
			// Jedes Tag wird ebenfalls in die Datenbank geschrieben. Dabei mus
			// geguckt werden, ob es schon als Tag enthalten ist.

			for (TagVector tagVector : document.getTagVectors()) {

				int tagId = -1;

				synchronized (this) {

					PreparedStatement psTagInDataBase = db
							.prepareStatement("SELECT `" + TAG_ID_NAME
									+ "` FROM `" + TAG_TABLE_NAME + "` WHERE `"
									+ TAG_LEMMA_NAME + "` = ? ");
					psTagInDataBase.setString(1, tagVector.getLemmatizedTerm());
					ResultSet rsTagInDataBase = psTagInDataBase.executeQuery();

					while (rsTagInDataBase.next()) {
						// Es darf nur eine Zeile zurückgegeben werden, da tags
						// unique
						// sein müssen! Wenn aber tagId schon gesetzt ist, und
						// die
						// Schleife weiterläuft, gibt es mehrere Rows!
						if (tagId != -1)
							throw new Exception(
									"Fehler beim Schreiben der Tags in die Datenbank. Das zu schreibende Tag ist mehrfach vorhanden. Oder ich habe die Funktionsweise von Reulstset nicht richtig kapiert. anywho, in dieser zeiel nachgucken!");
						tagId = rsTagInDataBase.getInt("id");
					}
					if (tagId == -1) {
						// Wenn es das Tag noch nicht in der Datenbank gibt,
						// schreibe es
						// hinein
						tagId = TagIdGenerator.getInstance().generateId();
						PreparedStatement psTag = db
								.prepareStatement("INSERT INTO  `"
										+ TAG_TABLE_NAME + "` (`" + TAG_ID_NAME
										+ "`,`" + TAG_LEMMA_NAME
										+ "`) VALUES ( ? , ? ) ");
						psTag.setInt(1, tagId);
						psTag.setString(2, tagVector.getLemmatizedTerm());
						try {
							psTag.execute();
						} catch (Exception e) {
							System.out
									.println("ERROR: Konnte folgendes nicht in die Datenbank schreiben: "
											+ tagVector.getLemmatizedTerm());
						}
						psTag.close();

						// List<Map<String, String>> tagIdRetrieve = retrieve(
						// "SELECT LAST_INSERT_ID() FROM `" + TAG_TABLE_NAME
						// + "`", db);
						// ID of the newly added Tag in the MysqlTable
						// tagId = Integer.parseInt(tagIdRetrieve.get(0).get(
						// "LAST_INSERT_ID()"));

					}
					psTagInDataBase.close();

				} // end synchronized

				// if (tagIdsToShow.contains(tagId))
				// System.out.println(tagId + " " +
				// tagVector.getLemmatizedTerm());

				PreparedStatement psDocTag = db
						.prepareStatement("INSERT INTO  `"
								+ DOCUMENT_TAG_TABLE_NAME + "` (`"
								+ DOCUMENT_TAG_ID_NAME + "`,`"
								+ DOCUMENT_TAG_DOCUMENT_ID_NAME + "`, `"
								+ DOCUMENT_TAG_TAG_ID_NAME + "`, `"
								+ DOCUMENT_TAG_TF_NAME + "`, `"
								+ DOCUMENT_TAG_TFIDF_NAME + "`, `"
								+ DOCUMENT_TAG_TAG_ALT_NAME
								+ "`) VALUES ( ? , ? , ? , ? , ? , ? ) ");
				psDocTag.setInt(1, DocumentTagIdGenerator.getInstance()
						.generateId());
				psDocTag.setInt(2, document.getUniqueDocId());
				psDocTag.setInt(3, tagId);
				psDocTag.setInt(4, (int) tagVector.getTf());
				psDocTag.setFloat(5, (float) tagVector.getTfidf());
				psDocTag.setString(6, tagVector.getOriginalTerm());
				psDocTag.execute();
				psDocTag.close();

			} // end for tagVector
			documentsWritten++;

		} // end for documents
		db.close();
	}

	public boolean refineDocuments(List<TextDocument> documents)
			throws SQLException, ClassNotFoundException, IOException {

		Connection db = SqlConnection.open();
		WikipediaSolrDao solrDao = WikipediaSolrDao.getInstance();

		for (TextDocument document : documents) {

			PreparedStatement documentStatement = db.prepareStatement("SELECT "
					+ DOCUMENT_TAG_TABLE_NAME + "." + DOCUMENT_TAG_ID_NAME
					+ ", " + DOCUMENT_WIKI_ID_NAME + ", " + TAG_LEMMA_NAME
					+ ", " + DOCUMENT_TAG_TAG_ALT_NAME + " FROM "
					+ DOCUMENT_TABLE_NAME + " LEFT JOIN "
					+ DOCUMENT_TAG_TABLE_NAME + " ON " + DOCUMENT_TABLE_NAME
					+ "." + DOCUMENT_ID_NAME + " = "
					+ DOCUMENT_TAG_DOCUMENT_ID_NAME + " LEFT JOIN "
					+ TAG_TABLE_NAME + " ON " + TAG_TABLE_NAME + "."
					+ TAG_ID_NAME + " = " + DOCUMENT_TAG_TAG_ID_NAME
					+ " WHERE " + DOCUMENT_TAG_DOCUMENT_ID_NAME + " = ?");
			documentStatement.setInt(1, document.getUniqueDocId());
			ResultSet documentResultSet = documentStatement.executeQuery();

			// This iterates over all Tags attributed to the
			// document
			String path = null;
			while (documentResultSet.next()) {
				if (path == null)
					path = documentResultSet.getString(DOCUMENT_WIKI_ID_NAME);

				int documentTagId = documentResultSet.getInt(DOCUMENT_ID_NAME);
				// Path should be the same anyways
				String tag = documentResultSet.getString(TAG_LEMMA_NAME);

				// Some Tags have encoding erros. Throw those away
				// please!
				if (tag != null && tag.contains("?"))
					continue;
				String tagAlt = documentResultSet
						.getString(DOCUMENT_TAG_TAG_ALT_NAME);

				if ((tag != null && !tag.isEmpty())
						&& (tagAlt == null || tagAlt.isEmpty())) {

					tagAlt = solrDao.retrieveOriginalTerm(path, tag);

					PreparedStatement documentUpdateStatement = db
							.prepareStatement("UPDATE "
									+ DOCUMENT_TAG_TABLE_NAME + " SET "
									+ DOCUMENT_TAG_TAG_ALT_NAME + " = ? WHERE "
									+ DOCUMENT_TAG_ID_NAME + " = ?");

					documentUpdateStatement.setString(1, tagAlt);
					documentUpdateStatement.setInt(2, documentTagId);

					documentUpdateStatement.executeUpdate();

				}

			}
			documentStatement.close();
		} // end for documents

		db.close();

		return true;
	}

	/**
	 * Empties the Buffer if still
	 * 
	 * @throws Exception
	 * 
	 */
	public void flushBufferedDocuments() throws Exception {
		if (bufferedDocuments.size() > 0) {
			writeDocuments(bufferedDocuments);
		}
		System.out.println("Buffer emptied!");
	}

	public int getDocumentsWritten() {
		return documentsWritten;
	}

	public int getDocumentsSkipped() {
		return documentsSkipped;
	}
	
	public List<TextDocument> getClusteringDocuments() {
		List<TextDocument> toReturn = new ArrayList<TextDocument>();
		return toReturn;		
	}

}
