package joelwilson.lucene.dao;

import static joelwilson.lucene.SystemConstants.keyMapping;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;

import joelwilson.lucene.SystemConstants;
import joelwilson.lucene.dto.GenericDTO;
import joelwilson.lucene.dto.Mapping;

import org.apache.lucene.IndexProvider;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.GAELuceneStaticMethods;
import org.apache.lucene.store.LockObtainFailedException;

import com.google.appengine.api.blobstore.BlobKey;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.Transaction;

public class GenericDAOImpl implements GenericDAO {

	private final IndexProvider primaryIndexProvider;
	
	protected final DatastoreService datastoreService = DatastoreServiceFactory.getDatastoreService();

	private Logger logger = Logger.getLogger(getClass().getSimpleName());

	public GenericDAOImpl(IndexProvider indexProvider) {
		super();
		assert indexProvider != null;
		this.primaryIndexProvider = indexProvider;
	}
	
	public GenericDAOImpl(IndexProvider indexProvider, Logger logger) {
		this(indexProvider);
		this.logger = logger;
	}
	
	private boolean tryImmediateIndexMode = true;
	
	private boolean saveUnindexableDocumentsForLaterMode = false;
	
	public boolean isTryImmediateIndexMode() {
		return tryImmediateIndexMode;
	}
	
	/*@Inject(optional=true)
	public void setTryImmediateIndexMode(boolean tryImmediateIndexMode) {
		this.tryImmediateIndexMode = tryImmediateIndexMode;
	}*/
	
	public boolean isSaveUnindexableDocumentsForLaterMode() {
		return saveUnindexableDocumentsForLaterMode;
	}

	/*private void setSaveUnindexableDocumentsForLaterMode(
			boolean saveUnindexableDocumentsForLaterMode) {
		this.saveUnindexableDocumentsForLaterMode = saveUnindexableDocumentsForLaterMode;
	}*/

	/**
	 * Returns either the primary index provider or the one passed in if it's not null.
	 * 
	 * @param alternativeIndexProvider
	 * @return
	 */
	protected IndexProvider getIndexProvider(IndexProvider alternativeIndexProvider) {
		assert primaryIndexProvider != null || alternativeIndexProvider != null;
		return alternativeIndexProvider != null ? alternativeIndexProvider : primaryIndexProvider;
	}

	protected Logger getLogger() {
		return logger != null ? logger : Logger.getLogger(getClass().getSimpleName()); 
	}
	
	// PUT methods
	
	public String put(GenericDTO dto) throws IOException {
		return put(dto, null);
	}
	
	
	public String put(GenericDTO dto, IndexProvider alternativeIndexProvider) throws IOException {
		if (dto == null)
			throw new IllegalArgumentException("no DTO provided");
		
		List<GenericDTO> singleDTO = new ArrayList<GenericDTO>(1);
		singleDTO.add(dto);
		return put(singleDTO, getIndexProvider(alternativeIndexProvider)).get(0);
	}

	public List<String> put(Iterable<? extends GenericDTO> dtos) throws IOException {
		return put(dtos, null);
	}
	
	/**
	 * The goal of this method is to create Datastore Entities and Lucene Documents
	 * from the provided DTO(s), using the mapping information from the DTO class
	 * itself (allowing this generic method to process specific data correctly).
	 * 
	 * If tryImmediateIndexMode and saveUnindexableDocumentsForLaterMode are both
	 * false, then throw an IllegalStateException();
	 * 
	 * Write the entities to the datastore, hold the transaction
	 * 
	 * If tryImmediateIndexMode is true, open a Lucene Index (using triple retry logic
	 * with 100ms initial delay). 
	 * If successful:
	 * 	1) clear out old documents based on the key
	 * 	2) write the documents to the index
	 * 	3) Close the index writer 
	 * 
	 * If unsuccessful:
	 * 	Check if saveUnindexableDocumentsForLaterMode is true:
	 * 		If not: rollback entity save transaction and throw an IOException wrapping the lock acquisition exception.
	 *		If so: create entities for the documents and put them into the datastore under the same transaction as the initial write
	 *
	 *	Commit the transaction(s)
	 * 
	 */
	public List<String> put(Iterable<? extends GenericDTO> dtos, IndexProvider alternativeIndexProvider) throws IOException {
		// safety checks
		if (dtos == null)
			throw new IllegalArgumentException("no DTO iterator provided");
		
		if (!tryImmediateIndexMode && !saveUnindexableDocumentsForLaterMode)
			throw new IllegalStateException("tryImmediateIndexMode && saveUnindexableDocumentsForLaterMode are both false, one must be true to put a DTO using this method.");
		
		// get the entities and documents ready for storing
		// lists of each of object type we need
		int dtoCount = (dtos instanceof List<?>) ? ((List<?>)dtos).size() : 1;
		List<Entity> dtoEntities = new ArrayList<Entity>(dtoCount);
 		List<String> dtoKeyStrings = new ArrayList<String>(dtoCount);
		List<Term> keyStringTerms = new ArrayList<Term>(dtoCount);
		List<Document> dtoDocuments = new ArrayList<Document>(dtoCount);
		
		// loop over the dtos
		for (GenericDTO dto : dtos) {
			// get the keys
			String keyString = dto.getKey();
			Key key = keyString != null ? KeyFactory.stringToKey(keyString) : null;
			
			// create the entity object and put into the list
			Entity dtoEntity = key != null ? new Entity(key) : new Entity(dto.getKind());
			dtoEntities.add(dtoEntity);
			
			// create the term search if we have a pre-existing key
			if (keyString != null) {
				Term term = new Term(keyMapping.getMapping(), keyString);
				keyStringTerms.add(term);
			}
			
			// create the document
			Document dtoDocument = new Document();
			boolean hasContent = false;
			
			// loop over the fields
			for (Mapping mapping : dto.getMappings()) {
				if (dto.getDataMap().containsKey(mapping.getMapping())) {
				
					// put the property into the datastore entity if we are supposed to
					if (mapping.isDatastoreStored()) {
						// get the value
						Object storedValue = dto.getDataMap().get(mapping.getMapping());
						
						// deal with special case types we know of
						if (storedValue instanceof String)
						switch (mapping.getType()) {
							case Key:
								storedValue = KeyFactory.stringToKey((String)storedValue);
								break;
							case BlobKey:
								storedValue = new BlobKey((String)storedValue);
						}
						
						// put the property into the entity using the appropriate method
						if (mapping.isDatastoreIndexed())
							dtoEntity.setProperty(mapping.getMapping(), storedValue);
						else
							dtoEntity.setUnindexedProperty(mapping.getMapping(), storedValue);
					}
					
				
					// put the property into the lucene document if we are supposed to
					if (mapping != keyMapping && (mapping.isLuceneStored() || mapping.isLuceneIndexed())) {
						Object storedValue = dto.getDataMap().get(mapping.getMapping());
						if (storedValue != null) {
							// get the string value of this object
							String storedStringValue = null;
							storedStringValue = storedValue.toString();
							// if we need some special handling for certain types, it should go here between these two lines
							dtoDocument.add(new Field(mapping.getMapping(), storedStringValue, mapping.getLuceneStoreMode(), mapping.getLuceneIndexMode()));
							hasContent = true;
						}
					}
				}
			}
			
			// put the document into the list
			if (hasContent)
				dtoDocuments.add(dtoDocument);
		}
		
		Transaction entityTrx = null;
		try {
			// create a transaction for storing the entities
			entityTrx = datastoreService.beginTransaction();
			
			// write the entities
			List<Key> entityKeys = GAELuceneStaticMethods.writeEntitiesToPersistentStore(dtoEntities, true, entityTrx);
	
			// populate the dto key strings
			for (Key key : entityKeys)
				dtoKeyStrings.add(KeyFactory.keyToString(key));
			
			// put the entity keys into the lucene documents
			// we know these are of the
			if (dtoDocuments.size() == entityKeys.size()) {
				for (int i = 0; i < entityKeys.size(); i++)
					dtoDocuments.get(i).add(new Field(keyMapping.getMapping(), dtoKeyStrings.get(i), keyMapping.isLuceneStored() ? Store.YES : Store.NO, keyMapping.isLuceneIndexed() ? Index.ANALYZED : Index.NO));
				
				// attempt to get the index writer and the directory lock
				boolean saveDocumentsForLaterIndexing = !tryImmediateIndexMode;
				if (tryImmediateIndexMode) {
					IndexWriter indexWriter = null;
					try {						
						// get the index writer
						indexWriter = getIndexProvider(alternativeIndexProvider).getStandardIndexWriter();
						
						// delete the pre-existing documents from the index
						indexWriter.deleteDocuments(keyStringTerms.toArray(new Term[keyStringTerms.size()]));
						
						// index the new documents
						for (Document dtoDoc : dtoDocuments)
							indexWriter.addDocument(dtoDoc);
					} catch (LockObtainFailedException lockObtainFailedException) {
						if (!saveUnindexableDocumentsForLaterMode) {
							entityTrx.rollback();
							throw lockObtainFailedException;
						} else				
							saveDocumentsForLaterIndexing = true;
					} finally {
						// close the index writer
						if (indexWriter != null)
							try {
								indexWriter.close();
								// want to double check that the writer is removed, can't restart server in the cloud
								getIndexProvider(alternativeIndexProvider).resetWriter();
							} catch (IOException ioException) {
								// want to triple check that the writer is removed, can't restart server in the cloud
								getIndexProvider(alternativeIndexProvider).resetWriter();
							}
					}
				}
				
				// write the documents and terms to the datastore
				if (saveDocumentsForLaterIndexing) {
					List<Entity> luceneDocEntities = new ArrayList<Entity>(dtoDocuments.size());
					int currentTermIndex = 1;
					
					for (int i = 0; i < dtoEntities.size(); i++) {
						// create the entity, child of the dto entity
						Entity docEntity = new Entity(SystemConstants.indexQueueKind, entityKeys.get(i));
						// load the document into it
						docEntity.setUnindexedProperty("document", dtoDocuments.get(i));
						
						// check if have a term for this one
						try {
							if (keyStringTerms.get(currentTermIndex).text().equals(entityKeys.get(i).toString())) {
								docEntity.setUnindexedProperty("delete_term", keyStringTerms.get(currentTermIndex));
								currentTermIndex++;
							}
						} catch (NullPointerException nullExp) {}
						
						// put the entity into the list
						luceneDocEntities.add(docEntity);
					}
					
					// store the doc entities, they are children of the main entities, so can go in the same transaction
					GAELuceneStaticMethods.writeEntitiesToPersistentStore(luceneDocEntities, false, entityTrx);
				}
			} else {
				if (!dtoDocuments.isEmpty())
					throw new RuntimeException("There were Lucene Documents that should been written but could not because they did not have a 1:1 ratio with the Datastore Entities to be written.");
			}
			
			// try to commit the direct entities (lucene occurs in separate transactions)
			GAELuceneStaticMethods.attemptCommit(entityTrx);
		} catch (IOException exception) {
			// since there was an exception and the transaction will be rolled back, clear the output list
			dtoKeyStrings.clear();
			// wrap the exception in an IOException
			throw exception;
		} catch (RuntimeException exception) {
			// since there was an exception and the transaction will be rolled back, clear the output list
			dtoKeyStrings.clear();
			// wrap the exception in an IOException
				GAELuceneStaticMethods.throwIOWrappedException(exception, "writing DTO entities");
		} finally {
			// verify the transaction is committed or rollback
			GAELuceneStaticMethods.checkTransaction(entityTrx);
		}
			
		// return the key strings
		return dtoKeyStrings;
	}
	
	// LOAD methods
	
	// DELETE methods
	
	public void delete(String keyString) throws IOException {
		delete(keyString, null);
	}
	
	public void delete(String keyString, IndexProvider alternativeIndexProvider) throws IOException {
		delete(KeyFactory.stringToKey(keyString), alternativeIndexProvider);
	}
	
	public void delete(Key key) throws IOException {
		delete(key, null);
	}
	
	public void delete(Key key, IndexProvider alternativeIndexProvider) throws IOException {
		Transaction dsTransaction = null;
		IOException luceneIOException = null;
		IOException recipeIOException = null;
		
		IndexProvider provider = getIndexProvider(alternativeIndexProvider);
		
		try {
			// open a lucene index writer
			IndexWriter writer = provider.getStandardIndexWriter();
			
			// start a transaction and delete the key inside the transaction
			dsTransaction = datastoreService.beginTransaction();
			datastoreService.delete(dsTransaction, key);
			
			try {
				// delete the document with the key associated to this recipe
				Term deleteTerm = new Term(SystemConstants.keyMapping.getMapping(), KeyFactory.keyToString(key));
				writer.deleteDocuments(deleteTerm);
				writer.close();
				
			} catch (IOException ioException) {
				luceneIOException = ioException;
				dsTransaction.rollback();
			}
			
			// commit the recipe delete transaction
			GAELuceneStaticMethods.attemptCommit(dsTransaction);
			
		} catch (RuntimeException runtimeException) {
			recipeIOException = GAELuceneStaticMethods.wrapInIOException(runtimeException, "deleting entity");
			if (recipeIOException == null )
				throw runtimeException;
		} finally {
			GAELuceneStaticMethods.checkTransaction(dsTransaction);
			GAELuceneStaticMethods.combineMultipleIOExceptions(logger, recipeIOException, luceneIOException);
		}
	}
	
}
