package edu.unika.aifb.graphindex.searcher.keyword.evaluation;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Set;
import java.util.SortedSet;
import java.util.StringTokenizer;
import java.util.TreeSet;

import org.apache.log4j.Logger;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.util.Version;

import edu.unika.aifb.graphindex.index.IndexDirectory;
import edu.unika.aifb.graphindex.model.IEntity;
import edu.unika.aifb.graphindex.model.impl.Entity;
import edu.unika.aifb.graphindex.query.KeywordQuery;
import edu.unika.aifb.graphindex.query.QNode;
import edu.unika.aifb.graphindex.query.QueryGraph;
import edu.unika.aifb.graphindex.searcher.hybrid.exploration.ExploringIndexMatcher;
import edu.unika.aifb.graphindex.searcher.keyword.KeywordSearcher;
import edu.unika.aifb.graphindex.searcher.keyword.model.Constant;
import edu.unika.aifb.graphindex.searcher.keyword.model.EntropyModel;
import edu.unika.aifb.graphindex.searcher.keyword.model.KeywordElement;
import edu.unika.aifb.graphindex.searcher.keyword.model.KeywordSegment;
import edu.unika.aifb.graphindex.searcher.keyword.model.RelevanceModel;
import edu.unika.aifb.graphindex.storage.StorageException;
import edu.unika.aifb.graphindex.storage.lucene.LuceneUtil;

public class TopKQueryEvaluator {
	protected edu.unika.aifb.graphindex.index.IndexReader indexReader;
	private RelevanceModel queryRelevanceModel;
	private EntropyModel entropyModel;
	private ExploringIndexMatcher matcher;
	private KeywordSearcher searcher;
	private IndexReader dbIndexReader;
	private IndexSearcher dbIndexSearcher;
	private IndexReader spoReader;
	private IndexSearcher spoSearcher;
	private IndexReader posReader;
	private IndexSearcher posSearcher;
	
	private Map<String, Double> backgroundTermsProb = new HashMap<String, Double>();
	
	
	private static final int MAX_INTERPRETATIONS = 30;
	private static final Logger log = Logger.getLogger(TopKQueryEvaluator.class);
	
	
	public TopKQueryEvaluator(edu.unika.aifb.graphindex.index.IndexReader indexReader) throws IOException, StorageException{
		this.indexReader = indexReader;
		matcher = new ExploringIndexMatcher(indexReader);
		matcher.initialize();
		searcher = new KeywordSearcher(indexReader);
		dbIndexReader = IndexReader.open(FSDirectory.open(indexReader.getIndexDirectory().getDirectory(IndexDirectory.DB_IDX_DIR)));
	    dbIndexSearcher = new IndexSearcher(dbIndexReader);
	    spoReader = IndexReader.open(NIOFSDirectory.open(new File(indexReader.getIndexDirectory().getDirectory(IndexDirectory.VP_DIR).getAbsoluteFile() + "/sp")), new KeepOnlyLastCommitDeletionPolicy(), true, LuceneUtil.LUCENE_TERMINFO_INDEX_DIVISOR);
	    spoSearcher = new IndexSearcher(spoReader);
	    posReader = IndexReader.open(NIOFSDirectory.open(new File(indexReader.getIndexDirectory().getDirectory(IndexDirectory.VP_DIR).getAbsoluteFile() + "/po")), new KeepOnlyLastCommitDeletionPolicy(), true, LuceneUtil.LUCENE_TERMINFO_INDEX_DIVISOR);
	    posSearcher = new IndexSearcher(posReader);
	}
	
	public Set<Row> evaluate(KeywordQuery query, int numberOfQueries, int topK) throws StorageException, IOException{
		List<QueryGraph> queryGraphs = getTopQueries(query, numberOfQueries);
		
		
		SortedSet<Row> result = new TreeSet<Row>(); 
		
		for(QueryGraph queryGraph : queryGraphs){
			QueryExecution queryExecution = new QueryExecution(this, queryGraph);
		
			if(queryGraph.vertexSet().size() > 5)
				continue;
			
			result.addAll(queryExecution.execute(topK));
		}
		
		for(Row row : result){
			for(IEntity entity : row.getNodeToEntity().values()){
				if(entity.getUri().startsWith("http://fzi.de/dbimporter#")){
					String searchIdProperty = entity.getUri().substring(0, entity.getUri().indexOf(".", entity.getUri().indexOf("#"))) + ".__search_id";
					Iterator<String> searchIdIterator = getTargetEntities(entity.getUri(),searchIdProperty).iterator();
					if(searchIdIterator.hasNext())
						entity.setSearchId(searchIdIterator.next());
				}
			}
		}
		
		return result;
	}
	
	public List<QueryGraph> getTopQueries(KeywordQuery query, int numberOfQueries) throws StorageException, IOException{
		if (numberOfQueries < 0)
			numberOfQueries = MAX_INTERPRETATIONS;
		
		log.info("evaluating...");
		log.debug("Query " + query);
		Map<KeywordSegment,Collection<KeywordElement>> segment2elements = search(query.getQuery(), searcher);
		
		Collection<KeywordElement> allKeywordElements = new HashSet<KeywordElement>();
		
		for(Collection<KeywordElement> keywordElements : segment2elements.values())
			for(KeywordElement keywordElement : keywordElements)
				if(keywordElement.entities.size() != 0){
					allKeywordElements.add(keywordElement);
					IEntity entity = keywordElement.entities.iterator().next();
					log.debug(keywordElement.getUri() + " "+ entity.getUri() +" " + entity.getTerms().length + " " + keywordElement.getKeywordSegment() + " " + keywordElement.entities.size());
				}
		
		List<QueryGraph> queryGraphs = new ArrayList<QueryGraph>();
		explore(query, numberOfQueries, segment2elements, queryGraphs);
		
		Map<String, Set<KeywordElement>> typeToKeywordElements = new HashMap<String, Set<KeywordElement>>();
		for(KeywordElement keywordElement : allKeywordElements){
			Set<KeywordElement> keywordElementsWithType = typeToKeywordElements.get(keywordElement.getElementType());
			if(keywordElementsWithType == null){
				keywordElementsWithType = new HashSet<KeywordElement>();
				typeToKeywordElements.put(keywordElement.getElementType(), keywordElementsWithType);
			}
			keywordElementsWithType.add(keywordElement);
		}
		
		for(String keywordElementType : typeToKeywordElements.keySet()){
			Set<KeywordElement> keywordElementsWithType = typeToKeywordElements.get(keywordElementType);
			Set<IEntity> allEntities = new HashSet<IEntity>();
			for(KeywordElement keywordElement : keywordElementsWithType)
				allEntities.addAll(keywordElement.entities);
			
			for(KeywordElement keywordElement : keywordElementsWithType){
				keywordElement.entities.clear();
			}
			
			for(KeywordElement keywordElement : keywordElementsWithType){
				keywordElement.entities.addAll(allEntities);
			}
		}
		
		List<QueryGraph> finalQueryGraphs = new ArrayList<QueryGraph>();
		Set<QueryGraph> isomorphQueryGraphs = new HashSet<QueryGraph>();
		
		for(QueryGraph queryGraph : queryGraphs){
			
			if(isomorphQueryGraphs.contains(queryGraph) || !queryGraph.isValid())
				continue;
			
			finalQueryGraphs.add(queryGraph);
			
			for(QueryGraph otherQueryGraph : queryGraphs){
				if(otherQueryGraph != queryGraph && queryGraph.isIsomorphTo(otherQueryGraph))
					isomorphQueryGraphs.add(otherQueryGraph);	
			}
			
		}
		
		queryGraphs.clear();
		isomorphQueryGraphs.clear();
		
		Set<KeywordElement> keywordElements = new HashSet<KeywordElement>();
		
		for(QueryGraph queryGraph : finalQueryGraphs){
			keywordElements.addAll(queryGraph.getKeywordElements());
		}
		
		Set<String> keywords = new HashSet<String>();
		for(KeywordSegment keywordSegment : segment2elements.keySet()){
			keywords.addAll(keywordSegment.getKeywords());
		}
		
		queryRelevanceModel = new RelevanceModel(segment2elements, 20, this);
		log.debug("Query Relevance Model " + queryRelevanceModel.getNormalizedTermFrequencies());
		entropyModel = new EntropyModel(queryRelevanceModel, dbIndexReader, dbIndexSearcher);	
		
		for(KeywordElement keywordElement : keywordElements){
			if(keywordElement.entities != null){
				for(IEntity entity : keywordElement.entities){
					if(entity.getScore() == 0){
						entity.setScore(entropyModel.calculateEntropyToDocument(new RelevanceModel(entity, queryRelevanceModel.getKeywords(), 1000)));
					}
				}
				keywordElement.sortEntities();
				IEntity firstEntity = keywordElement.entities.iterator().next();
				log.debug("HIER " + keywordElement.getUri() + " " +  firstEntity.getUri() + " " + firstEntity.getScore() + " " + Arrays.toString(firstEntity.getTerms()));
			}
		}
		
		Map<String, Double> freeNodeToMaxScore = new HashMap<String, Double>();
		
		for(QueryGraph queryGraph : finalQueryGraphs){
			for (QNode node : queryGraph.vertexSet()) {
				if(!node.isKeywordNode()){
					if(freeNodeToMaxScore.containsKey(node.getLabel())){
						node.setMaxScore(freeNodeToMaxScore.get(node.getLabel()));
					}else{
						try {
							QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, Constant.DPO_FIELD, new StandardAnalyzer(Version.LUCENE_CURRENT));
							queryParser.setDefaultOperator(QueryParser.OR_OPERATOR);
							String relevanceModelQuery = "";
							for(String keyword : queryRelevanceModel.getNormalizedTermFrequencies().keySet())
								relevanceModelQuery += keyword + " ";
							BooleanQuery booleanQuery = new BooleanQuery();
							booleanQuery.add(queryParser.parse(relevanceModelQuery), Occur.SHOULD);
							booleanQuery.add(new TermQuery(new Term(Constant.EXT_FIELD, node.getLabel())), Occur.MUST);
							TopScoreDocCollector collector = TopScoreDocCollector.create(1, true);
							dbIndexSearcher.search(booleanQuery, collector);
							ScoreDoc[] docs = collector.topDocs().scoreDocs;
							if(docs.length != 0 ){
								TermFreqVector termFreqVector = dbIndexReader.getTermFreqVector(docs[0].doc, Constant.DPO_FIELD);
								if(termFreqVector != null){
									IEntity entity = new Entity("", "", termFreqVector.getTerms(), termFreqVector.getTermFrequencies());
									node.setMaxScore(entropyModel.calculateEntropyToDocument(new RelevanceModel(entity, queryRelevanceModel.getKeywords(), 500)));
								}
							}
							freeNodeToMaxScore.put(node.getLabel(), node.getMaxScore());
							
						} catch (ParseException e) {
							e.printStackTrace();
						}
					}
				}else{
					if(node.getTopEntity() != null)
						node.setMaxScore(node.getTopEntity().getScore());
				}
			}
		}
		return finalQueryGraphs.subList(0, Math.min(finalQueryGraphs.size(), numberOfQueries));
	}
	
	protected void explore(KeywordQuery query, int k, Map<KeywordSegment,Collection<KeywordElement>> segment2elements, List<QueryGraph> queryGraphs) throws StorageException, IOException {
		double inMax = 0.0, outMax = 0.0;
		Map<String,Double> inprops = new HashMap<String,Double>(), outprops = new HashMap<String,Double>();
		for (KeywordSegment ks : segment2elements.keySet()) {
			for (KeywordElement ele : segment2elements.get(ks)) {
				for (String property : ele.getInPropertyWeights().keySet()) {
					Double w = inprops.get(property) == null ? 0.0 : inprops.get(property);
					w += ele.getInPropertyWeights().get(property);
					inMax = Math.max(inMax, w);
					inprops.put(property, w);
				}
				
				for (String property : ele.getOutPropertyWeights().keySet()) {
					Double w = outprops.get(property) == null ? 0.0 : outprops.get(property);
					w += ele.getOutPropertyWeights().get(property);
					outMax = Math.max(outMax, w);
					outprops.put(property, w);
				}
			}
		}
		
		for (KeywordSegment ks : segment2elements.keySet()) {
			for (KeywordElement ele : segment2elements.get(ks)) {
				for (String property : ele.getInPropertyWeights().keySet())
					ele.getInPropertyWeights().put(property, inprops.get(property) / inMax);
				
				for (String property : ele.getOutPropertyWeights().keySet())
					ele.getOutPropertyWeights().put(property, outprops.get(property) / outMax);
			}
		}


		matcher.setKeywords(segment2elements);
		matcher.setK(MAX_INTERPRETATIONS);
		matcher.match();
		
		queryGraphs.addAll(matcher.getQueryGraphs());
	}
	
	protected Map<KeywordSegment,Collection<KeywordElement>> search(String query, KeywordSearcher searcher) throws StorageException, IOException {
		List<String> list = KeywordSearcher.getKeywordList(query);
		log.debug("keyword list: " + list);
		Map<KeywordSegment,Collection<KeywordElement>> res = searcher.searchKeywordElements(list);
		return res;
	}
	
	public Set<String> getTargetEntities(String sourceEntity, String property){
		try {
			Set<String> targetEntities = new HashSet<String>();
			StringBuffer sb = new StringBuffer();
			sb.append(sourceEntity).append(LuceneUtil.KEY_DELIM).append(property).append(LuceneUtil.KEY_DELIM);
			TermQuery termQuery = new TermQuery(new Term("sp", sb.toString()));
			TopDocs topDocs = spoSearcher.search(termQuery, 1);
			if(topDocs.totalHits > 0){
				String targets = spoReader.document(topDocs.scoreDocs[0].doc).get("o");
				StringTokenizer st = new StringTokenizer(targets, "\n");
				while(st.hasMoreElements())
					targetEntities.add(st.nextToken());
			}
			return targetEntities;
		} catch (IOException e) {
			e.printStackTrace();
			return null;
		}
	}
	
	public Set<String> getSourceEntities(String targetEntity, String property){
		try {
			Set<String> sourceEntities = new HashSet<String>();
			StringBuffer sb = new StringBuffer();
			sb.append(property).append(LuceneUtil.KEY_DELIM).append(targetEntity).append(LuceneUtil.KEY_DELIM);
			TermQuery termQuery = new TermQuery(new Term("po", sb.toString()));
			TopDocs topDocs = posSearcher.search(termQuery, 1);
			if(topDocs.totalHits > 0){
				String sources = posReader.document(topDocs.scoreDocs[0].doc).get("s");
				StringTokenizer st = new StringTokenizer(sources, "\n");
				while(st.hasMoreElements())
					sourceEntities.add(st.nextToken());
			}
			return sourceEntities;
		} catch (IOException e) {
			e.printStackTrace();
			return null;
		}
	}
	
	public void setEntityScore(IEntity entity){
		try {
			BooleanQuery booleanQuery = new BooleanQuery();
			booleanQuery.add(new TermQuery(new Term(Constant.ENT_FIELD, entity.getUri())), Occur.MUST);
			TopScoreDocCollector collector = TopScoreDocCollector.create(1, true);
			dbIndexSearcher.search(booleanQuery, collector);
			ScoreDoc[] docs = collector.topDocs().scoreDocs;
			if(docs.length != 0){
				TermFreqVector termFreqVector = dbIndexReader.getTermFreqVector(docs[0].doc, Constant.DPO_FIELD);
				if(termFreqVector != null){
					entity.setTerms(termFreqVector.getTerms());
					entity.setTermFrequencies(termFreqVector.getTermFrequencies());
					entity.setScore(entropyModel.calculateEntropyToDocument(new RelevanceModel(entity, queryRelevanceModel.getKeywords(), 500)));
				}
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	
	public double getBackgroundTermProbability(String term){
		try {
			Double backgroundTermProb = backgroundTermsProb.get(term);
			if(backgroundTermProb == null){
				TopDocs topDocs = dbIndexSearcher.search(new TermQuery(new Term(Constant.TERM_FIELD, term)), 1);
				String prob = dbIndexReader.document(topDocs.scoreDocs[0].doc).getFieldable(Constant.PROB_FIELD).stringValue();
				backgroundTermProb = Double.parseDouble(prob);
				backgroundTermsProb.put(term, backgroundTermProb);
			}
			return backgroundTermProb;
		} catch (Exception e) {
			e.printStackTrace();
			return 0;
		}
	}
}
