/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package pl.tom.social.analyzer.analysis;

import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Hit;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
import org.getopt.stempel.lucene.StempelAnalyzer;
import pl.tom.social.analyzer.analysis.SocialAnalyzer;
import pl.tom.social.common.sitetool.ISiteTools;
import pl.tom.social.common.sitetool.SiteToolsFactory;
import pl.tom.social.dal.SocialDatabaseFacade;
import pl.tom.social.dal.entity.Site;
import pl.tom.social.dal.entity.SiteEntry;
import pl.tom.social.dal.utils.EntryIterator;

/**
 *
 * @author Tom
 */
public class QuerySearch {

	private static final Logger logger = Logger.getLogger(QuerySearch.class);

	public static Map<String, Float> getSitesScore(String query, List<Site> sites) {
		Directory dir = null;
		try {
			dir = FSDirectory.getDirectory("luceneDir");
			Analyzer analyzer = new StempelAnalyzer();

			createSiteDocument(dir, analyzer, sites);
			return getScore(dir, query);
		} catch(IOException ex) {
			logger.log(Level.ERROR, ex);
		}
		return null;
	}

	public static Map<String, Float> getEntriesScore(String query, List<Site> sites) {
		SocialDatabaseFacade db = new SocialDatabaseFacade();
		Directory dir = null;
		try {
			dir = FSDirectory.getDirectory("luceneDir");
			Analyzer analyzer = new SocialAnalyzer();

			createEntryDocument(db, dir, analyzer, sites);
			return getScore(dir, query);
		} catch(IOException ex) {
			logger.log(Level.ERROR, ex);
		}
		return null;
	}

	private static void createEntryDocument(SocialDatabaseFacade db, Directory dir, Analyzer analyzer, List<Site> sites) {
		IndexWriter iw = null;
		try {
			iw = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
			for(Site site : sites) {
				ISiteTools tools = SiteToolsFactory.getTools(site);
				for(Iterator iter = new EntryIterator(site); iter.hasNext();) {
					SiteEntry entry = (SiteEntry) iter.next();
					Document doc = new Document();
					String body = tools.getHtmlAsText(entry.getBody());
					doc.add(new Field("url", entry.getUrl(), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
					doc.add(new Field("body", body, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
					iw.addDocument(doc);
				}
			}
		} catch(CorruptIndexException ex) {
			logger.log(Level.ERROR, ex);
		} catch(LockObtainFailedException ex) {
			logger.log(Level.ERROR, ex);
		} catch(IOException ex) {
			logger.log(Level.ERROR, ex);
		} finally {
			if(iw != null) {
				try {
					iw.close();
				} catch(CorruptIndexException ex) {
					logger.log(Level.ERROR, ex);
				} catch(IOException ex) {
					logger.log(Level.ERROR, ex);
				}
			}
		}
	}

	private static void createSiteDocument(Directory dir, Analyzer analyzer, List<Site> sites) {
		IndexWriter iw = null;
		try {
			iw = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
			for(Site site : sites) {
				ISiteTools tools = SiteToolsFactory.getTools(site);
				Document doc = new Document();
				doc.add(new Field("url", site.getUrl(), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
				StringBuffer buffer = new StringBuffer();
				for(Iterator iter = new EntryIterator(site); iter.hasNext();) {
					SiteEntry entry = (SiteEntry) iter.next();
					String body = tools.getHtmlAsText(entry.getBody());
					buffer.append(body + "\n");
				}
				doc.add(new Field("body", buffer.toString(), Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
				iw.addDocument(doc);
			}
		} catch(CorruptIndexException ex) {
			logger.log(Level.ERROR, ex);
		} catch(LockObtainFailedException ex) {
			logger.log(Level.ERROR, ex);
		} catch(IOException ex) {
			logger.log(Level.ERROR, ex);
		} finally {
			if(iw != null) {
				try {
					iw.close();
				} catch(CorruptIndexException ex) {
					logger.log(Level.ERROR, ex);
				} catch(IOException ex) {
					logger.log(Level.ERROR, ex);
				}
			}
		}
	}

	private static Map<String, Float> getScore(Directory dir, String queryString) {
		Map<String, Float> result = new HashMap<String, Float>();
		try {
			Searcher searcher = new IndexSearcher(dir);
			QueryParser parser = new QueryParser("body", new SocialAnalyzer());
			Query query = parser.parse(queryString);
			Hits hits = searcher.search(query);
			for(Iterator iter = hits.iterator(); iter.hasNext();) {
				Hit hit = (Hit) iter.next();
				result.put(hit.getDocument().get("url"), hit.getScore());
			}
		} catch(ParseException ex) {
			logger.log(Level.ERROR, ex);
		} catch(CorruptIndexException ex) {
			logger.log(Level.ERROR, ex);
		} catch(IOException ex) {
			logger.log(Level.ERROR, ex);
		}
		result.remove(null);
		return result;
	}
}
