package org.apache.ocean.transaction;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Logger;

import org.apache.commons.lang.SerializationUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.index.IndexCommitPoint;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.MultiSegmentReader;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.ocean.CDouble;
import org.apache.ocean.CLong;
import org.apache.ocean.ClusterConstants;
import org.apache.ocean.IndexSchema;
import org.apache.ocean.server.NumberUtils;
import org.apache.ocean.util.BitDocSet;
import org.apache.ocean.util.ConcurrentTreeMap;
import org.apache.ocean.util.OpenBitSet;

// TODO: transaction record should only serialize compressed bytes of document
// TODO: replicate filter per segment using index filesystem
// TODO: figure out way to keep track of ids from batches that never committed
// TODO: keep batch commit log
// TODO: create merge of filters and field caches in segmentmerger
// TODO: perf test loading ids from payloads
// TODO: make deleting more efficient by using payloads
// TODO: on new segment in segmentmerger, need to load filter and field cache using snapshot id and ids contained in segment
// TODO: create merge log, log of which segments merged into which for each index version
public class Index implements IndexDeletionPolicy {
	public static Logger log = Logger.getLogger(Index.class.getName());
	IndexWriter indexWriter;
	Directory directory;
	ConcurrentTreeMap<CLong, IndexSnapshot> snapshotMap = new ConcurrentTreeMap<CLong, IndexSnapshot>();
	AtomicLong snapshotIDSequence = new AtomicLong(0);
	AtomicLong batchIDSequence = new AtomicLong(0);
	AtomicLong transactionIDSequence = new AtomicLong(0);
	IndexSchema indexSchema;
	// Map<String, SnapshotFilters> segmentFilterVersions;
	private ReentrantLock addBatchLock = new ReentrantLock();
	private static String internedTransactionIDField = ClusterConstants.TRANSACTION_ID_FIELD.intern();
	ConcurrentHashMap<SegmentReader, Map<CLong, Integer>> segmentReaderTransactionIDCacheMap = new ConcurrentHashMap<SegmentReader, Map<CLong, Integer>>();
  HashMap<CLong,IDSnapshots> transactionIDSnapshotMap = new HashMap<CLong,IDSnapshots>();
	
	public Index(Directory directory, IndexSchema indexSchema) throws Exception {
		this.directory = directory;
		this.indexSchema = indexSchema;
		boolean createIndex = !IndexReader.indexExists(directory);
		indexWriter = new IndexWriter(directory, true, indexSchema.getAnalyzer(), createIndex, this);
		LogDocMergePolicy logDocMergePolicy = new LogDocMergePolicy();
		logDocMergePolicy.setMergeFactor(10);
		logDocMergePolicy.setMinMergeDocs(100);
		logDocMergePolicy.setMaxMergeDocs(50000);
		indexWriter.setMergePolicy(logDocMergePolicy);
		indexWriter.setMergeScheduler(new SerialMergeScheduler());
		IndexReader indexReader = IndexReader.open(directory);
		IndexSnapshot indexSnapshot = new IndexSnapshot(indexReader, this);
		add(indexSnapshot);
	}
  
	public boolean isTransactionIDValid(CLong transactionID, CLong snapshotID) {
		IDSnapshots idSnapshots = transactionIDSnapshotMap.get(transactionID);
		if (idSnapshots == null) return false;
		return idSnapshots.contains(snapshotID);
	}
	
	public static class IDSnapshots {
		public Map<CLong,IDSnapshot> snapshotMap = new HashMap<CLong,IDSnapshot>(10);
		
		public boolean contains(CLong snapshotID) {
			IDSnapshot idSnapshot = snapshotMap.get(snapshotID);
			if (idSnapshot != null) return true;
			else return false;
		}
		
		public void delete(CLong snapshotID) {
			snapshotMap.remove(snapshotID);
		}
		
		public static class IDSnapshot {
			public CLong transactionID;
			public boolean exists;
			public SegmentReader segmentReader;
			public int doc;
			
			public IDSnapshot() {
			}
		}
	}
	
	private void writeFile(String fileName, BitDocSet filter) throws IOException {
		byte[] bytes = SerializationUtils.serialize(filter.getBits());
		IndexOutput indexOutput = directory.createOutput(fileName);
		indexOutput.writeBytes(bytes, bytes.length);
		indexOutput.close();
	}

	// TODO: delete old filter files when segment is deleted
	/**
	private BitDocSet readFilterFile(String fileName) throws IOException {
		int fileLength = (int) directory.fileLength(fileName);
		IndexInput indexInput = directory.openInput(fileName, fileLength);
		byte[] bytes = new byte[fileLength];
		indexInput.readBytes(bytes, 0, fileLength);
		OpenBitSet openBitSet = (OpenBitSet) SerializationUtils.deserialize(bytes);
		return new BitDocSet(openBitSet);
	}
  **/
	public void close() {
		for (IndexSnapshot indexSnapshot : snapshotMap.values()) {
			try {
				indexSnapshot.indexReader.close();
			} catch (Throwable throwable) {
				log.severe(throwable.getMessage());
			}
			try {
				indexWriter.close(true);
			} catch (Throwable throwable) {
				log.severe(throwable.getMessage());
			}
		}
	}

	/**
	 * public static class SnapshotFilters { public TreeMap<CLong, DocSet>
	 * filterMap; // key is snapshot id public String segmentName;
	 * 
	 * public void applyDeletes(CLong snapshotID, int[] deletedDocs) { DocSet
	 * previousFilter = filterMap.get(filterMap.lastKey()); DocSet newFilter = new
	 * BitDocSet(previousFilter.getBits()); for (int x = 0; x <
	 * deletedDocs.length; x++) { newFilter.getBits().clear(deletedDocs[x]); }
	 * filterMap.put(snapshotID, newFilter); } }
	 */
	public void onInit(List indexCommitPoints) throws IOException {
		onCommit(indexCommitPoints);
	}

	public void onCommit(List indexCommitPoints) throws IOException {
		for (int x = 0; x < indexCommitPoints.size() - 1; x++) {
			IndexCommitPoint indexCommitPoint = (IndexCommitPoint) indexCommitPoints.get(x);
			// Set<String> segmentNamesInUse = getSegmentNamesInUse();
			String segmentsFileName = indexCommitPoint.getSegmentsFileName();
			CLong snapshotID = new CLong(SegmentInfos.generationFromSegmentsFileName(segmentsFileName));
			IndexSnapshot indexSnapshot = snapshotMap.get(snapshotID);
			if (indexSnapshot != null) {
				if (indexSnapshot.refCount.get() == 0) {
					indexCommitPoint.delete();
					snapshotMap.remove(snapshotID);
				}
			}
			/**
			 * boolean delete = true; for (String commitPointFileName : (Collection<String>)
			 * indexCommitPoint.getFileNames()) { if
			 * (segmentNamesInUse.contains(commitPointFileName)) { delete = false;
			 * break; } } if (delete) { indexCommitPoint.delete();
			 * indexCommitPoint.getFileNames() }
			 */
		}
	}

	public Batch newBatch(CDouble bufferSize) throws Exception {
		Batch batch = new Batch(bufferSize, new CLong(batchIDSequence.incrementAndGet()), this);
		return batch;
	}

	public IndexSnapshot getLatestIndexSnapshot() {
		return snapshotMap.lastValue();
	}

	private void add(IndexSnapshot indexSnapshot) {
		snapshotMap.put(indexSnapshot.snapshotID, indexSnapshot);
	}

	public IndexReader getIndexReader(CLong snapshotID) {
		return snapshotMap.get(snapshotID).indexReader;
	}

	/**
	 * public Set<String> getSegmentNamesInUse() { Set<String> segmentNamesInUse =
	 * new HashSet<String>(); for (IndexSnapshot indexSnapshot :
	 * snapshotMap.values()) { if (indexSnapshot.refCount.get() > 0) {
	 * segmentNamesInUse.addAll(indexSnapshot.getSegmentNames()); } } return
	 * segmentNamesInUse; }
	 */
	public static class IndexSnapshot {
		IndexSnapshot previousIndexSnapshot;
		IndexReader indexReader;
		CLong snapshotID;
		CLong batchID;
		AtomicLong refCount = new AtomicLong(0);
		Index index;
		Set<String> segmentNames;
		Map<String, SegmentSnapshot> segmentSnapshotMap;
		HashSet<CLong> addedIDs;
		HashSet<CLong> deleteIDs;
    
	  // TODO: scan through each segmentsnapshot
		public boolean idOk(CLong transactionID) {
			return index.isTransactionIDValid(transactionID, snapshotID);
		}
		
		public IndexSnapshot(IndexReader indexReader, Index index) throws Exception {
			this.snapshotID = new CLong(indexReader.getVersion());
			this.indexReader = indexReader;
			segmentNames = Index.getSegmentNames(indexReader);
			segmentSnapshotMap = new HashMap<String, SegmentSnapshot>(segmentNames.size());
			for (SegmentReader segmentReader : getSegmentReaders(indexReader)) {
				SegmentSnapshot segmentSnapshot = new SegmentSnapshot(snapshotID, segmentReader, index);
				segmentSnapshotMap.put(segmentSnapshot.name, segmentSnapshot);
			}
			addedIDs = new HashSet<CLong>(indexReader.maxDoc());
			TermEnum termEnum = indexReader.terms(new Term(internedTransactionIDField, ""));
			try {
				do {
					Term term = termEnum.term();
					if (term == null || term.field() != internedTransactionIDField)
						break;
					CLong transactionID = toReadable(term.text());
					addedIDs.add(transactionID);
				} while (termEnum.next());
			} finally {
				if (termEnum != null) termEnum.close();
			}
		}

		public IndexSnapshot(CLong batchID, HashSet<CLong> addedIDs, HashSet<CLong> deleteIDs, IndexReader indexReader, IndexSnapshot previousIndexSnapshot,
				Index index) throws Exception {
			this.snapshotID = new CLong(indexReader.getVersion());
			this.batchID = batchID;
			this.indexReader = indexReader;
			this.index = index;
			Set<String> newSegmentNames = null;
			segmentNames = Index.getSegmentNames(indexReader);
			if (previousIndexSnapshot != null) {
				IndexReader previousIndexReader = previousIndexSnapshot.indexReader;
				this.previousIndexSnapshot = previousIndexSnapshot;
				newSegmentNames = getNewSegmentNames(previousIndexSnapshot.indexReader);
			} else {
				newSegmentNames = segmentNames;
			}
			segmentSnapshotMap = new HashMap<String, SegmentSnapshot>(segmentNames.size());
			Map<String, SegmentReader> segmentReaderMap = new HashMap<String, SegmentReader>(segmentNames.size());
			Set<String> oldSegmentNames = new HashSet<String>();
			for (String segmentName : segmentNames) {
				SegmentReader segmentReader = getSegmentReader(segmentName, indexReader);
				segmentReaderMap.put(segmentName, segmentReader);
				if (previousIndexSnapshot != null && previousIndexSnapshot.segmentNames.contains(segmentName)) {
					oldSegmentNames.add(segmentName);
				}
			}
			for (String segmentName : newSegmentNames) {
				SegmentReader segmentReader = segmentReaderMap.get(segmentName);
				SegmentSnapshot newSegmentSnapshot = new SegmentSnapshot(snapshotID, addedIDs, deleteIDs, segmentReader, previousIndexSnapshot, index);
				segmentSnapshotMap.put(newSegmentSnapshot.name, newSegmentSnapshot);
			}
			for (String segmentName : oldSegmentNames) {
				SegmentSnapshot previousSegmentSnapshot = previousIndexSnapshot.segmentSnapshotMap.get(segmentName);
				SegmentSnapshot segmentSnapshot = new SegmentSnapshot(snapshotID, deleteIDs, previousSegmentSnapshot, index);
				segmentSnapshotMap.put(segmentName, segmentSnapshot);
			}
		}

		public static class SegmentSnapshot {
			CLong snapshotID;
			String name;
			BitDocSet filter;
			SegmentReader segmentReader;
			Index index;

			public void writeSegmentSnapshotFilter() throws IOException {
				String fileName = getFilterFileName();
				index.writeFile(fileName, filter);
			}

			public String getFilterFileName() {
				String fileName = name + "." + snapshotID + ".filter";
				return fileName;
			}

			// for client side replication
			public SegmentSnapshot(CLong snapshotID, SegmentReader segmentReader, Index index) throws IOException {
				this.snapshotID = snapshotID;
				this.index = index;
				this.name = segmentReader.segment;
				String filterFileName = getFilterFileName();
				//filter = index.readFilterFile(filterFileName);
			}

			// for existing segments
			public SegmentSnapshot(CLong snapshotID, Set<CLong> deleted, SegmentSnapshot previousSegmentSnapshot, Index index) throws IOException {
				this.snapshotID = snapshotID;
				/**
				 * if (previousSegmentSnapshot.snapshotTransactionIDMap != null &&
				 * deleted != null && deleted.size() > 0) { snapshotTransactionIDMap =
				 * (HashMap<CLong,Integer>)previousSegmentSnapshot.snapshotTransactionIDMap.clone();
				 * for (CLong id : deleted) { snapshotTransactionIDMap.remove(id); } }
				 * else { snapshotTransactionIDMap =
				 * previousSegmentSnapshot.snapshotTransactionIDMap; }
				 */
				filter = new BitDocSet((OpenBitSet) previousSegmentSnapshot.filter.getBits().clone());
				segmentReader = previousSegmentSnapshot.segmentReader;
				this.index = index;
				if (deleted != null) {
					for (CLong id : deleted) {
						int doc = getDoc(id, segmentReader);
						filter.getBits().clear(doc);
					}
				}
				writeSegmentSnapshotFilter();
			}

			// for new segments
			public SegmentSnapshot(CLong snapshotID, HashSet<CLong> added, HashSet<CLong> deleted, SegmentReader segmentReader, IndexSnapshot previousIndexSnapshot,
					Index index) throws Exception {
				this.snapshotID = snapshotID;
				name = segmentReader.segment;
				int maxDoc = segmentReader.maxDoc();
				// snapshotTransactionIDMap = new HashMap<CLong,Integer>(maxDoc);
				// HashMap<CLong, Integer> transactionIDMap = new HashMap<CLong,
				// Integer>(maxDoc);
				// index.segmentReaderTransactionIDCacheMap.putIfAbsent(segmentReader,
				// transactionIDMap);
				filter = new BitDocSet(maxDoc);
				TermDocs termDocs = segmentReader.termDocs();
				TermEnum termEnum = segmentReader.terms(new Term(internedTransactionIDField, ""));
				try {
					do {
						Term term = termEnum.term();
						if (term == null || term.field() != internedTransactionIDField)
							break;
						CLong transactionID = toReadable(term.text());
						if (termDocs.next()) {
							int doc = termDocs.doc();
							// transactionIDMap.put(transactionID, new Integer(doc));
							if (added.contains(transactionID)) {
								filter.add(doc);
							} else if (deleted.contains(transactionID)) {
								//filter.getBits().clear(doc);
							} else if (previousIndexSnapshot != null) {
								//if (previousIndexSnapshot.idExists(transactionID)) {
								//	filter.add(doc);
								//}
							} else {
								//filter.getBits().clear(doc);
							}
						}
					} while (termEnum.next());
				} finally {
					if (termEnum != null)
						termEnum.close();
					if (termDocs != null)
						termDocs.close();
				}
				writeSegmentSnapshotFilter();
			}
		}

		public Set<String> getSegmentNames() {
			return segmentNames;
		}

		public void incrementRef() {
			refCount.incrementAndGet();
		}

		public void decrementRef() {
			refCount.decrementAndGet();
		}

		public Set<String> getNewSegmentNames(IndexReader previousIndexReader) {
			Set<String> segmentNames = Index.getSegmentNames(indexReader);
			if (previousIndexReader == null)
				return segmentNames;
			Set<String> oldSegmentNames = Index.getSegmentNames(previousIndexReader);
			Set<String> newSegmentNames = new HashSet<String>();
			for (String segmentName : segmentNames) {
				if (!oldSegmentNames.contains(segmentName)) {
					newSegmentNames.add(segmentName);
				}
			}
			return newSegmentNames;
		}
	}

	void add(Batch batch) throws Exception {
		Term[] deletedTerms = toTerms(batch.deletedTransactionIDs);
		indexWriter.deleteDocuments(deletedTerms);
		indexWriter.addIndexesNoOptimize(new Directory[] { batch.directory });
		indexWriter.flush(true, true);
		addBatchLock.lock();
		try {
			IndexSnapshot previousIndexSnapshot = snapshotMap.lastValue();
			IndexReader newIndexReader = previousIndexSnapshot.indexReader.reopen();
			IndexSnapshot newIndexSnapshot = new IndexSnapshot(batch.id, batch.addedTransactionIDs, batch.deletedTransactionIDs, newIndexReader,
					previousIndexSnapshot, this);
			add(newIndexSnapshot);
		} finally {
			addBatchLock.unlock();
		}
	}

	public static SegmentReader getSegmentReader(String name, IndexReader indexReader) {
		if (indexReader instanceof SegmentReader) {
			SegmentReader segmentReader = (SegmentReader) indexReader;
			if (StringUtils.equals(name, segmentReader.segment)) {
				return segmentReader;
			} else
				return null;
		} else if (indexReader instanceof MultiSegmentReader) {
			MultiSegmentReader multiSegmentReader = (MultiSegmentReader) indexReader;
			for (int x = 0; x < multiSegmentReader.subReaders.length; x++) {
				if (StringUtils.equals(name, multiSegmentReader.subReaders[x].segment)) {
					return multiSegmentReader.subReaders[x];
				}
			}
			return null;
		}
		throw new RuntimeException("unknown indexreader");
	}

	public static List<SegmentReader> getSegmentReaders(IndexReader indexReader) {
		if (indexReader instanceof SegmentReader) {
			SegmentReader segmentReader = (SegmentReader) indexReader;
			List<SegmentReader> list = new ArrayList<SegmentReader>(1);
			list.add(segmentReader);
			return list;
		} else if (indexReader instanceof MultiSegmentReader) {
			MultiSegmentReader multiSegmentReader = (MultiSegmentReader) indexReader;
			List<SegmentReader> list = new ArrayList<SegmentReader>(multiSegmentReader.subReaders.length);
			for (SegmentReader segmentReader : multiSegmentReader.subReaders) {
				list.add(segmentReader);
			}
			return list;
		}
		throw new RuntimeException("unknown indexreader");
	}

	public static Set<String> getSegmentNames(IndexReader indexReader) {
		if (indexReader instanceof SegmentReader) {
			SegmentReader segmentReader = (SegmentReader) indexReader;
			Set<String> set = new HashSet<String>(1);
			set.add(segmentReader.segment);
			return set;
		} else if (indexReader instanceof MultiSegmentReader) {
			MultiSegmentReader multiSegmentReader = (MultiSegmentReader) indexReader;
			Set<String> set = new HashSet<String>(multiSegmentReader.subReaders.length);
			for (SegmentReader segmentReader : multiSegmentReader.subReaders) {
				set.add(segmentReader.segment);
			}
			return set;
		}
		throw new RuntimeException("unknown indexreader");
	}

	public static Term[] toTerms(Collection<CLong> collection) {
		Term[] terms = new Term[collection.size()];
		int count = 0;
		for (CLong value : collection) {
			String text = toIndexed(value);
			terms[count] = toTerm(value);
		}
		return terms;
	}

	public static Term toTerm(CLong transactionID) {
		String text = NumberUtils.long2sortableStr(transactionID.toString());
		return new Term(internedTransactionIDField, text);
	}

	public static String toIndexed(CLong transactionID) {
		return NumberUtils.long2sortableStr(transactionID.toString());
	}

	public static CLong toReadable(String indexed) {
		return new CLong(NumberUtils.sortableStr2long(indexed));
	}

	public static int getDoc(CLong transactionID, SegmentReader segmentReader) throws IOException {
		TermDocs termDocs = segmentReader.termDocs();
		TermEnum termEnum = segmentReader.terms(new Term(internedTransactionIDField, ""));
		try {
			do {
				Term term = termEnum.term();
				if (term == null || term.field() != internedTransactionIDField)
					return -1;
				if (termDocs.next()) {
					int doc = termDocs.doc();
					return doc;
				}
				return -1;
			} while (termEnum.next());
		} finally {
			if (termEnum != null)
				termEnum.close();
			if (termDocs != null)
				termDocs.close();
		}
	}
}
