package org.apache.ocean.main;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.List;
import java.util.concurrent.locks.ReentrantLock;

import org.apache.commons.io.FileUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.store.FSDirectory;
import org.apache.ocean.main.Index.IndexException;
import org.apache.ocean.main.replication.Replication.ReplicationException;

public class LargeBatch {
	private Long id;
	private File directoryFile;
	private File deletesFile;
	private FSDirectory fsDirectory;
	private IndexWriter indexWriter;
	private ConcurrentMergeScheduler concurrentMergeScheduler;
	private Analyzer analyzer;
	private ReentrantLock deletesLock = new ReentrantLock();
	private TransactionSystem.Category category;
  
	// TODO: use indexcreator in large batch
	public LargeBatch(Long id, File directoryFile, Analyzer analyzer, double ramBufferSize, TransactionSystem.Category category) throws IOException {
		this.id = id;
		this.directoryFile = directoryFile;
		this.analyzer = analyzer;
		this.category = category;
		directoryFile.mkdirs();
		FileUtils.touch(new File(directoryFile, "largebatch.started"));
		deletesFile = new File(directoryFile, "largebatch.deletes");
		concurrentMergeScheduler = new ConcurrentMergeScheduler();
		fsDirectory = FSDirectory.getDirectory(directoryFile);
		indexWriter = new IndexWriter(fsDirectory, false, analyzer, true, new KeepOnlyLastCommitDeletionPolicy());
		indexWriter.setRAMBufferSizeMB(ramBufferSize);
		LogByteSizeMergePolicy mergePolicy = new LogByteSizeMergePolicy();
		mergePolicy.setMaxMergeMB(100.0);
		indexWriter.setMergePolicy(mergePolicy);
		indexWriter.setMergeScheduler(concurrentMergeScheduler);
	}

	public Long getID() {
		return id;
	}

	public File getDirectoryFile() {
		return directoryFile;
	}

	public void deleteDeletesFile() {
		deletesFile.delete();
	}

	public void finished() throws Exception, IOException, CorruptIndexException, ReplicationException, IndexException {
		indexWriter.optimize(true);
		indexWriter.close(true);
		File largeBatchStartedFile = new File(directoryFile, "largebatch.started");
		largeBatchStartedFile.delete();
		category.saveLargeBatchMaster(this);
	}

	public Deletes loadDeletes() throws IOException, ClassNotFoundException {
		deletesLock.lock();
		try {
			if (!deletesFile.exists())
				return null;
			FileInputStream fileInput = new FileInputStream(deletesFile);
			ObjectInputStream objectInput = new ObjectInputStream(fileInput);
			Deletes deletes = (Deletes) objectInput.readObject();
			objectInput.close();
			return deletes;
		} finally {
			deletesLock.unlock();
		}
	}

	public void addDeletes(Deletes moreDeletes) throws IOException, ClassNotFoundException {
		deletesLock.lock();
		try {
			Deletes deletes = loadDeletes();
			if (deletes == null) {
				deletes = moreDeletes;
			} else {
				deletes.merge(moreDeletes);
			}
			FileOutputStream fileOutput = new FileOutputStream(deletesFile);
			ObjectOutputStream objectOutput = new ObjectOutputStream(fileOutput);
			objectOutput.writeObject(deletes);
			objectOutput.close();
		} finally {
			deletesLock.unlock();
		}
	}

	public void addDocuments(List<Document> documents) throws Exception {
		for (Document document : documents) {
			indexWriter.addDocument(document, analyzer);
		}
	}
}
