package edu.kit.aifb.bowsim.index;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;

import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.StaleReaderException;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.SimpleFSDirectory;

public class TextIndexReader {
	transient Logger _log = Logger.getLogger(this.getClass().getName());

	static IndexReader _reader;
	
	public static final String URI = "uri";
	public static final String TITLE = "title";
	public static final String DESCRIPTION = "description";

	public TextIndexReader(File dir) throws IOException {
		this(dir, true);
 	}

	public TextIndexReader(File dir, boolean readonly) throws IOException {
		if (_reader == null) {
			if (dir.exists()) {
				Directory ldir = new SimpleFSDirectory(dir);

				_reader = IndexReader.open(ldir, readonly); //getAbsolutePath());
			} else {
				throw new IOException("index dir " + dir + " not found");
			}
		}

//		if (_searcher == null) {
//			_searcher = new IndexSearcher(_reader);
//		}
 	}
	
	public TextIndexReader(IndexReader reader) {
		_reader = reader;
//		_searcher = new IndexSearcher(_reader);
	}
	
	public List<Integer> getDocIds() {
		List<Integer> li = new ArrayList<Integer>();
		
		for (int i=0; i < _reader.numDocs(); i++) {
			if (!_reader.isDeleted(i)) {
				li.add(i);
			}
		}
		
		return li;
	}
	
	/**
	 * 
	 * @param uri URI in N3 Syntax (.toN3())
	 * @return
	 * @throws IOException 
	 */
	public int getDocId(String uri) throws IOException {
		int docid = -1;
		
		Term term = new Term(TextIndexWriter.URI, uri);
		
		TermDocs td = _reader.termDocs(term);
		int[] docs = new int[1];
		int[] lengths = new int[1];
		
		int len = td.read(docs, lengths);

		if (len > 0) {
			docid = docs[0];
		}
		
		_log.info("getting docid for " + uri + ": " + docid);
		
		return docid;
	}
	
	public String getUri(int docid) throws CorruptIndexException, IOException {
		_log.info("getting uri for " + docid);
		
		Document d = _reader.document(docid);
		
		String uri = d.get(URI);

		_log.info("getting uri for " + docid + " " + uri);

		return uri; //.substring(1, uri.length()-1);
	}
	
	public Term[] getTerms() throws IOException {
		Term[] terms = new Term[getTermsCount()];

		TermEnum te = _reader.terms();

		int i = 0;
		while (te.next()) {
			terms[i] = te.term();
			i++;
		}
		
		return terms;
	}
	
	public int getTermsCount() throws IOException {
		int i = 0;
		TermEnum te = _reader.terms();
		
		while (te.next()) {
			i++;
		}
		
		return i;
	}
	
	public TermFreqVector[] getTermFreq(int docid) throws CorruptIndexException, IOException {
		return  _reader.getTermFreqVectors(docid);
	}

	public TermDocs getTermDocs(Term t) throws IOException {
		return _reader.termDocs(t);
	}
	
	public int getDocFreq(Term t) throws IOException {
		return _reader.docFreq(t); 
	}
	
	public void deleteItem(int docId) throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
		_reader.deleteDocument(docId);
	}
	
	public TermDocs termDocs(Term term) throws IOException {
		return _reader.termDocs(term);
	}
	
	public double similarity(int doc1, int doc2) throws IOException {
		TfIdfVector[] m = new TfIdfVector[2];
		
		int[] docids = new int[] { doc1, doc2 };
		
		for (int i = 0; i < docids.length; i++) {
			Integer id = docids[i];
			TermFreqVector[] tfv = getTermFreq(id);
			
			m[i] = new TfIdfVector();

			for (TermFreqVector tfreq : tfv) {
				String field = tfreq.getField();

				int[] freqs = tfreq.getTermFrequencies();
				String[] txts = tfreq.getTerms();

				_log.info("no of terms: " + txts.length);

				for (int j = 0; j < txts.length; j++) {
					String txt = txts[j];
					int freq = freqs[j];
					Term term = new Term(field, txt);

					_log.info(field + ":" + txt + " freq " + freq + " docfreq " + getDocFreq(term));
					double tf = (double)freq/(double)txts.length;
					double idf = Math.log(getDocIds().size() /  getDocFreq(term));
					_log.info("tf-idf is " + tf + "*" + idf + "=" + tf*idf);
					
					m[i].add(term, tf*idf);
				}
			}
		}
	                                 
		double sim = m[0].cosSim(m[1]);

		return sim;
	}
		
	public void close() throws IOException {
		if (_reader != null) {
			_reader.close();
			_reader = null;
		}
	}
}

