package org.apache.lucene.ocean;

import java.io.IOException;
import java.util.Date;

import org.apache.commons.lang.SerializationUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.ocean.log.RecordData;
import org.apache.lucene.ocean.log.TransactionLog;
import org.apache.lucene.ocean.util.Bytes;
import org.apache.lucene.ocean.util.Constants;
import org.apache.lucene.store.RAMDirectory;

/**
 * Abstract Batch class that contains what is needed to execute a transaction.  
 * There will be either documents or a ramDirectory but never both.  A ramDirectory
 * represents documents encoded into a Lucene segment.
 * 
 */
// TODO: add externalizable method to insure that 
public abstract class Batch {
  protected Documents documents;
  protected RAMDirectory ramDirectory;
  protected Analyzer analyzer;
  protected Deletes deletes;
  protected Date timestamp;
  protected boolean isClosed = false;

  public Batch() {
    timestamp = new Date();
  }
  
  /**
   * If the batch contains a RAMDirectory
   * @return
   */
  public boolean hasRAMDirectory() {
    return ramDirectory != null;
  }
  
  /**
   * Get the RAMDirectory
   * @return
   */
  public RAMDirectory getRamDirectory() {
    return ramDirectory;
  }
  
  /**
   * The batch object created on a master before it is saved into the transactionlog.
   *
   */
  public static class MasterBatch extends Batch {
    private TransactionSystem transactionSystem;
    private Bytes docBytes;
    private int docType;
    private Bytes otherBytes;
    private int otherType;
    
    /**
     * Constructor for the MasterBatch.  Requires the transactionSystem for the 
     * commit method.
     * @param transactionSystem
     */
    public MasterBatch(TransactionSystem transactionSystem) {
      this.transactionSystem = transactionSystem;
    }
    
    /**
     * Get the record data
     * @return
     * @throws IOException
     */
    public RecordData getRecordData() throws IOException {
      if (hasDocuments() && !hasDocData()) {
        createDocData();
      }
      if (hasDeletes() && !hasOtherData()) {
        createDeleteData();
      }
      return new RecordData(docType, docBytes, otherType, otherBytes);
    }
    
    /**
     * If it has the other data
     * @return
     */
    public boolean hasOtherData() {
      return otherBytes != null;
    }
    
    /**
     * Get the other Bytes
     * @return
     */
    public Bytes getOtherBytes() {
      return otherBytes;
    }
    
    /**
     * Get the other type
     * @return
     */
    public int getOtherType() {
      return otherType;
    }
    
    /**
     * Get the doc bytes
     * @return
     */
    public Bytes getDocBytes() {
      return docBytes;
    }
    
    /**
     * Get the doc type
     * @return
     */
    public int getDocType() {
      return docType;
    }
    
    /**
     * If there is docBytes
     * @return
     */
    public boolean hasDocData() {
      return docBytes != null;
    }
    
    /**
     * Serializes the deletes to the otherBytes and sets the otherType to Constants.DELETES_SERIALIZE_TYPE
     * @throws IOException
     */
    public void createDeleteData() throws IOException {
      if (hasDeletes()) {
        Deletes deletes = getDeletes();
        otherBytes = new Bytes(1024);
        SerializationUtils.serialize(deletes, otherBytes.getOutputStream());
        otherType = Constants.DELETES_SERIALIZE_TYPE;
      }
    }
    
    /**
     * Serializes the documents data depending on if the batch contains a RAMDirectory or documents.  
     * The respective docType options are Constants.RAM_DIRECTORY_TYPE and Constants.DOCUMENTS_TYPE
     * @throws IOException
     */
    public void createDocData() throws IOException {
      if (hasRAMDirectory()) {
        docBytes = TransactionLog.serialize(getRamDirectory());
        docType = Constants.RAM_DIRECTORY_TYPE;
      } else if (hasDocuments()) {
        Documents documents = getDocuments();
        docBytes = TransactionLog.serialize(documents);
        docType = Constants.DOCUMENTS_TYPE;
      }
    }
    
    /**
     * Set the RAMDirectory 
     * @param ramDirectory
     */
    void setRAMDirectory(RAMDirectory ramDirectory) {
      documents = null;
      this.ramDirectory = ramDirectory;
    }
    
    /**
     * Set the analyzer
     * @param analyzer
     */
    void setAnalyzer(Analyzer analyzer) {
      this.analyzer = analyzer;
    }
    
    /**
     * Set the deletes
     * @param deletes
     */
    void setDeletes(Deletes deletes) {
      if (isClosed)
        throw new RuntimeException("batch closed");
      this.deletes = deletes;
    }
    
    /**
     * Add a document to the documents 
     * @param document
     */
    void addDocument(Document document) {
      if (this.documents == null)
        this.documents = new Documents();
      this.documents.add(document);
    }
    
    /**
     * Add the given documents to the documents
     * @param documents
     */
    void addDocuments(Documents documents) {
      if (isClosed)
        throw new RuntimeException("batch closed");
      if (this.documents == null)
        this.documents = new Documents();
      this.documents.addAll(documents);
    }

    /**
     * public Serializable getSerializableRamDirectory(Long id) { assert
     * ramDirectory != null; return new SerializableRamDirectory(id,
     * ramDirectory); }
     * 
     * public Serializable getSerializableBatchDeletes(Long id) { return new
     * SerializableBatchDeletes(id, deletes); }
     * 
     * public Serializable getSerializableBatchDocuments(Long id) { return new
     * SerializableBatchDocuments(id, documents); }
     */
    /**
     * Calls transactionSystem.commitBatch with this MasterBatch as the parameter
     */
    void commit() throws Exception {
      transactionSystem.commitBatch(this);
    }
  }
  
  /**
   * When a batch is loaded from the transaction log it is a SlaveBatch because
   * all of the variables are immutable.
   *
   */
  public static class SlaveBatch extends Batch {
    private Long id;

    public SlaveBatch(Long id, Documents documents, Deletes deletes) {
      this.id = id;
      this.documents = documents;
      this.deletes = deletes;
    }

    public SlaveBatch(Long id, RAMDirectory ramDirectory, Deletes deletes) {
      this.id = id;
      this.ramDirectory = ramDirectory;
      this.deletes = deletes;
    }

    public Long getId() {
      return id;
    }
  }

  /**
   * public static class SerializableRamDirectory extends SerializableBatch
   * implements Externalizable { private static final long serialVersionUID =
   * 1l; private RAMDirectory ramDirectory;
   * 
   * public SerializableRamDirectory(Long id, RAMDirectory ramDirectory) {
   * super(id); this.ramDirectory = ramDirectory; } // TODO: use native version
   * not externalizable public void readExternal(ObjectInput objectInput) throws
   * IOException, ClassNotFoundException { long objectVersion =
   * objectInput.readLong(); ramDirectory =
   * RamDirectorySerializer.deserialize(objectInput); }
   * 
   * public void writeExternal(ObjectOutput objectOutput) throws IOException {
   * objectOutput.writeLong(serialVersionUID);
   * RamDirectorySerializer.serialize(ramDirectory, objectOutput); }
   * 
   * public RAMDirectory getRamDirectory() { return ramDirectory; } }
   * 
   * public static class SerializableBatchDocuments extends SerializableBatch {
   * private static final long serialVersionUID = 1l; private Documents
   * documents;
   * 
   * public SerializableBatchDocuments(Long id, Documents documents) {
   * super(id); this.documents = documents; }
   * 
   * public Documents getDocuments() { return documents; } }
   * 
   * public static class SerializableBatchDeletes extends SerializableBatch {
   * private static final long serialVersionUID = 1l; private Deletes deletes;
   * 
   * public SerializableBatchDeletes(Long id, Deletes deletes) { super(id); }
   * 
   * public Deletes getDeletes() { return deletes; } }
   * 
   * public abstract static class SerializableBatch implements Serializable {
   * private Long id;
   * 
   * public SerializableBatch(Long id) { this.id = id; } public Long getId() {
   * return id; } }
   */
  /**
   * Get the analyzer for the batch
   */
  public Analyzer getAnalyzer() {
    return analyzer;
  }
  
  /**
   * Does the batch contain documents
   * @return
   */
  public boolean hasDocuments() {
    if (documents == null || documents.size() == 0)
      return false;
    else
      return true;
  }
  
  /**
   * Does the batch contain deletes
   * @return
   */
  public boolean hasDeletes() {
    if (deletes == null || !deletes.hasDeletes()) {
      return false;
    } else {
      return true;
    }
  }
  
  /**
   * Disallow any more additions
   */
  public void close() {
    isClosed = true;
  }
  
  /**
   * Get the documents
   * @return
   */
  public Documents getDocuments() {
    return documents;
  }
  
  /**
   * Get the deletes
   * @return
   */
  public Deletes getDeletes() {
    return deletes;
  }
  
  /**
   * Get the timestamp
   * @return
   */
  public Date getTimestamp() {
    return timestamp;
  }
}
