package edu.unika.aifb.graphindex.storage.lucene;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.Semaphore;

import org.apache.log4j.Logger;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.FSDirectory;

import edu.unika.aifb.graphindex.storage.IndexDescription;
import edu.unika.aifb.graphindex.storage.IndexStorageWriter;
import edu.unika.aifb.graphindex.storage.StorageException;
import edu.unika.aifb.graphindex.util.Util;

public class LuceneIndexStorageWriter implements IndexStorageWriter {
	private File m_dir;
	private IndexDescription m_idx;
	private IndexWriter m_writer;
	enum CacheType {
		DOCUMENT_CACHE, NO_CACHE
	}
	
	private CacheType m_cacheType = CacheType.DOCUMENT_CACHE;
	
	private Map<String,Set<String>> m_valueCache;
	private int m_flushes = 0;
	private long m_maxCacheSize = 0;
	public long m_cacheSize = 0;
	
	private static Semaphore m_flushSemaphore = new Semaphore(2, true);
	
	private static final Logger log = Logger.getLogger(LuceneIndexStorage.class);

	public LuceneIndexStorageWriter(File directory, IndexDescription index) throws StorageException {
		m_dir = directory;
		m_idx = index;
		
		try {
			m_writer = new IndexWriter(FSDirectory.open(m_dir), new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED);
			m_writer.setMergeFactor(40);
		}
		catch (IOException e) {
			throw new StorageException(e);
		}
		
		setMaxMemory((int)(Runtime.getRuntime().maxMemory() / 1024 / 1024 / 20));
		
		m_valueCache = new HashMap<String,Set<String>>();
		m_cacheType = CacheType.DOCUMENT_CACHE;
	}
	
	public void disableCache() {
		m_cacheType = CacheType.NO_CACHE;
	}
	
	public void flush() throws StorageException {
		try {
			m_flushSemaphore.acquire();
		} catch (InterruptedException e1) {
			e1.printStackTrace();
		}
		if (m_cacheType == CacheType.DOCUMENT_CACHE)
			flushCache();
		else if (m_cacheType != CacheType.NO_CACHE)
			throw new RuntimeException("unknown cache type");
		
		try {
			m_writer.commit();
		} catch (CorruptIndexException e) {
			throw new StorageException(e);
		} catch (IOException e) {
			throw new StorageException(e);
		}
		
		m_flushSemaphore.release();
	}
	
	public void clear() throws StorageException {
		m_valueCache = new HashMap<String,Set<String>>();
		try {
			m_writer.deleteAll();
			m_writer.commit();
		}
		catch (IOException e) {
			throw new StorageException(e);
		}
	}

	public void close() throws StorageException {
		flush();
		
		try {
			m_writer.close();
		}
		catch (IOException e) {
			throw new StorageException(e);
		}
		
		m_writer = null;
		
		log.debug(m_idx + " closed (flushed: " + m_flushes + ")");
	}

	public void setMaxMemory(int maxMemoryMB) throws StorageException {
		m_writer.setRAMBufferSizeMB(Math.min(maxMemoryMB * 0.3, 1536));
		m_maxCacheSize = (long)(maxMemoryMB * 0.7 * 1024 * 1024);
		
		log.debug("writer cache: " + (int)m_writer.getRAMBufferSizeMB() + ", max cache size: " + m_maxCacheSize / 1024 / 1024);

		if (checkFlush())
			flush();
	}
	
	private boolean checkFlush() {
		return m_cacheSize >= m_maxCacheSize || m_cacheType == CacheType.NO_CACHE;
	}
	
	private void addToCache(String indexKey, Collection<String> values) throws StorageException {
		for (String val : values)
			addToCache(indexKey, val);

		if (checkFlush())
			flush();
	}
	
	private void addToCache(String indexKey, String value) throws StorageException {
		if (m_cacheType == CacheType.DOCUMENT_CACHE) {
			Set<String> cached = m_valueCache.get(indexKey);
			if (cached == null) {
				cached = new HashSet<String>();
				m_valueCache.put(indexKey, cached);
				m_cacheSize += 256;
			}
			cached.add(value);
			m_cacheSize += value.length() * 6.2 + 128;
		}

		if (checkFlush())
			flush();
	}

	private void flushCache() throws StorageException {
		if (m_valueCache.isEmpty())
			return;

		if (m_cacheType != CacheType.NO_CACHE)
			log.debug(m_idx + " flushing cache (estimated cache size: " + m_cacheSize + ", mem: " + Util.memory() + ")");
		
		m_flushes++;
		
		for (String indexKey : m_valueCache.keySet()) {
			List<String> cached = new ArrayList<String>(m_valueCache.get(indexKey));
			Collections.sort(cached);
			
			Document doc = new Document();
			doc.add(LuceneUtil.getIndexedField(m_idx, indexKey));

			StringBuilder sb = new StringBuilder();
			for (String s : cached)
				sb.append(s).append('\n');
			
			doc.add(LuceneUtil.getStoredField(m_idx.getValueField(), sb.toString()));
			
			try {
				m_writer.addDocument(doc);
			} catch (CorruptIndexException e) {
				throw new StorageException(e);
			} catch (IOException e) {
				throw new StorageException(e);
			}
		}
		
		if (m_cacheType != CacheType.NO_CACHE)
			log.debug(m_idx + " flush done");
		
		m_valueCache = new HashMap<String,Set<String>>();
		m_cacheSize = 0;
	}

	public void addData(String[] indexKeys, Collection<String> values) throws StorageException {
		String indexKey = LuceneUtil.toIndexKey(indexKeys);
		addToCache(indexKey, values);
	}

	public void addData(String[] indexKeys, List<String> values) throws StorageException {
		addData(indexKeys, (Collection<String>)values);
	}

	public void addData(String[] indexKeys, String value) throws StorageException {
		String indexKey = LuceneUtil.toIndexKey(indexKeys);
		addToCache(indexKey, value);
	}
	
	public void internalMerge() throws StorageException {
		try {
			log.debug(m_idx + " merging...");
			
			flush();
			
			LuceneIndexStorageReader reader = new LuceneIndexStorageReader(m_dir, m_idx);
			int termsMerged = 0;
			int docsMerged = 0;
			int termsProcessed = 0;
			
			TermEnum te = reader.getReader().terms();
			while (te.next()) {

				if (te.docFreq() > 1) {
					Term t = te.term();
					termsMerged++;
					
					List<Integer> docIds = reader.getDocumentIds(new TermQuery(t));
					docsMerged += docIds.size();
					
					TreeSet<String> values = new TreeSet<String>();
					for (int docId : docIds) {
						values.addAll(reader.getDocumentValues(docId));
					}
					
					m_writer.deleteDocuments(t);
					
					Document doc = new Document();
					doc.add(LuceneUtil.getIndexedField(m_idx, t.text()));
					StringBuilder sb = new StringBuilder();
					for (String s : values)
						sb.append(s).append('\n');
					
					doc.add(LuceneUtil.getStoredField(m_idx.getValueField(), sb.toString()));
					m_writer.addDocument(doc);
				}
				
				termsProcessed++;

				if (termsProcessed % 1000000 == 0) {
					log.debug(" terms: " + termsProcessed  + ", merged terms: " + termsMerged + " docs: " + docsMerged);
				}
			}
			
			m_writer.commit();
			reader.close();
			
			log.debug(m_idx + " merge complete, terms: " + termsProcessed  + ", merged terms: " + termsMerged + " docs: " + docsMerged);
		}
		catch (IOException e) {
			throw new StorageException(e);
		}
	}

	public void merge() throws StorageException {
		internalMerge();
		return;
	}

	public void optimize() throws StorageException {
		try {
			log.debug(m_idx + " optimizing...");
			m_writer.commit();
			m_writer.optimize();
			log.debug(m_idx + " optimize complete");
		}
		catch (IOException e) {
			throw new StorageException(e);
		}
	}
}
