package kisti.lod.classification;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;

import kisti.lod.classification.db.DBConnector;
import kisti.lod.classification.db.Domain;
import kisti.lod.classification.db.Keyword;
import kisti.lod.classification.db.URI;
import kisti.lod.sindice.MySindice;

import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.dao.DaoManager;

/**
 * Reads the keyword files in the keyword folder, issues search queries to sindice
 * for each each keyword and stores the first 100 returned URIs in a mysql database
 * @author Sebastian
 *
 */
public class URICollector {
	//number of results to be returned per keyword
	static double numberOfResults = 100;
	//number of results per page
	static double perpage = 10;
	//folder the keyword files are stored in
	static String keywordFolder = "keywords";
	//filters to be applied for the queries
	static String[] filters = {"format:RDF"};
	
	public static void main (String[] args) throws Exception {
		
		//create ORMLite DAOs
		Dao<Domain, String> domainDao = DaoManager.createDao(DBConnector.getConnectionSource(), Domain.class);
		Dao<Keyword, String> keywordDao = DaoManager.createDao(DBConnector.getConnectionSource(), Keyword.class);
		Dao<URI, String> uriDao = DaoManager.createDao(DBConnector.getConnectionSource(), URI.class);

		//TODO: implement proper logging using log4j
		//setup quirks mode logging for sindice errors and db errors
		File sindiceErrors = new File("logs/sindiceErrors.txt");
		BufferedWriter sindiceLog = new BufferedWriter(new FileWriter(sindiceErrors, true));
		File dbErrors = new File("logs/dbErrors.txt");
		BufferedWriter dbLog = new BufferedWriter(new FileWriter(dbErrors, true));
		//iterate over all files in keywords folder
		MySindice sindice = new MySindice();
		
		//iterate over keyword files
		File[] files = FileFactory.getKeywordFolder().listFiles();
		int x = 1;
		for(File file : files) {

			//create domain object with filename (equals filename without file extension)
			String fileName = file.getName();
			Domain domain = new Domain(fileName.substring(0, fileName.indexOf(".txt")));
			//store domain object in database
			domainDao.create(domain);
			
			//read keywordfile
			BufferedReader br = new BufferedReader(new FileReader(file));
			String line;
			
			//iterate over each line, ergo each keyword
			while ((line = br.readLine()) != null) {
				Keyword keyword = new Keyword(line, domain);
				keywordDao.create(keyword);
				//Show current status
				System.out.println("processing: " + line + "(" + x + "/5000)");
				x++;
				
				//iterate over the results for each keyword page per page
				double iterations = Math.ceil(numberOfResults / perpage);
				com.sindice.result.SearchResults searchResults = null;
				
				for(int i=1;i<=iterations;i++) {
					try{
						searchResults = sindice.termSearch(keyword.getName(), i, filters);
					}
					catch (Exception e) {
						sindiceLog.append(keyword.getName() + " page " + i);
						sindiceLog.append(e.getMessage() + "\n\n");
						continue;
					}
					
					//jump to next keyword if no further hits for current keyword are found
					if(searchResults.size() == 0) {
						break;
					}
					
					int n = 1;
					//iterate over search results and store uris in database
					for(com.sindice.result.SearchResult searchResult : searchResults) {
						double position = (i-1)*perpage+n;
						URI uri = new URI(searchResult.getLink(), keyword, (int) position);
						try {
							uriDao.create(uri);
						}
						catch (Exception e) {
							dbLog.append(searchResult.getLink() + "(keywordId: " + keyword.getKeywordId());
							dbLog.append(e.getMessage() + "\n\n");
						}
						if((i-1)*perpage+n >= numberOfResults) {
							break;
						}
						n++;
					}
				}
			}
			br.close();
			dbLog.close();
			sindiceLog.close();
		}
	}
}