package org.apache.lucene.ocean.util;

import java.io.IOException;
import java.util.List;

import org.apache.commons.lang.SerializationUtils;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.SmallFloat;

/**
 * An attempt at a DocumentSerializer that does not use object serialization.  
 * Currently it is not used.
 *
 */
public class DocumentSerializer {
  final static int INDEXED = 0x00000001;
  final static int TOKENIZED = 0x00000002;
  final static int STORED = 0x00000004;
  final static int BINARY = 0x00000008;
  final static int COMPRESSED = 0x00000010;
  final static int OMIT_NORMS = 0x00000020;
  final static int STORE_TERMVECTORS = 0x00000040;
  final static int STORE_TERMPOSITIONS = 0x00000080;
  final static int STORE_TERMOFFSETS = 0x00000100;
  final static int STRING = 0x00000200;
  final static int TOKENSTREAM = 0x00000400;
  final static int TOKENS = 0x00000800;

  // final static int REQUIRED = 0x00001000;

  public static Document toDocument(IndexInput input) throws IOException {
    int numFields = input.readVInt();
    Document document = new Document();
    for (int x=0; x < numFields; x++) {
      Field field = null;
      Field.Store store = Field.Store.NO;
      Field.Index index = Field.Index.NO;
      String name = input.readString();
      int type = input.readVInt();
      if ((type & STORED) != 0) {
        store = Field.Store.YES;
      } else if ((type & COMPRESSED) != 0) {
        store = Field.Store.COMPRESS;
      }
      if ((type & TOKENIZED) != 0) {
        index = Field.Index.TOKENIZED;
      } else if ((type & INDEXED) != 0) {
        index = Field.Index.UN_TOKENIZED;
      } else if ((type & INDEXED) != 0) {
        index = Field.Index.NO_NORMS;
      }
      float boost = SmallFloat.byte315ToFloat(input.readByte());
      if ((type & STRING) != 0) {
        String string = input.readString();
        field = new Field(name, string, store, index);
      } else if ((type & BINARY) != 0) {
        int length = input.readVInt();
        byte[] binary = new byte[length];
        input.readBytes(binary, 0, length);
        field = new Field(name, binary, store);
      }
      if (field != null) {
        document.add(field);
      }
    }
    return document;
  }

  // TODO: implement serializing tokens
  public static void writeDocument(Document document, IndexOutput output, boolean includeTokens) throws IOException {
    List<Fieldable> fields = document.getFields();
    output.writeVInt(fields.size());
    for (Fieldable field : fields) {
      output.writeString(field.name());
      int type = 0;
      if (field.isBinary()) {
        type |= BINARY;
      } else if (field.isIndexed()) {
        type |= INDEXED;
      } else if (field.getOmitNorms()) {
        type |= OMIT_NORMS;
      } else if (field.isCompressed()) {
        type |= COMPRESSED;
      } else if (field.isStored()) {
        type |= STORED;
      } else if (field.isTermVectorStored()) {
        type |= STORE_TERMVECTORS;
      } else if (field.isStorePositionWithTermVector()) {
        type |= STORE_TERMPOSITIONS;
      } else if (field.isStoreOffsetWithTermVector()) {
        type |= STORE_TERMOFFSETS;
      } else if (field.isTokenized()) {
        type |= TOKENIZED;
      } else if (field.tokenStreamValue() != null) {
        type |= TOKENSTREAM;
      } else if (field.stringValue() != null) {
        type |= STRING;
      } else if (includeTokens) {
        type |= TOKENS;
      }
      output.writeVInt(type);
      output.writeByte(SmallFloat.floatToByte315(field.getBoost())); // boost
      if (field.stringValue() != null) {
        String string = field.stringValue();
        output.writeString(string);
      } else if (field.binaryValue() != null) {
        byte[] bytes = field.binaryValue();
        output.writeVInt(bytes.length);
        output.writeBytes(bytes, bytes.length);
      }
      // if (includeTokens && field.isTokenized()) {
      // TokenStream tokenStream = analyzer.tokenStream(field.name(), reader);
      // }
    }
  }

  public static void writeTokens(TokenStream tokenStream) throws IOException {
    Token token;
    while ((token = tokenStream.next()) != null) {

    }
  }

  public static void write(Document document, IndexOutput output) throws IOException {
    byte[] bytes = SerializationUtils.serialize(document);
    output.writeVInt(bytes.length);
    output.writeBytes(bytes, bytes.length);
  }

  public static Document read(IndexInput input) throws IOException {
    int length = input.readVInt();
    byte[] bytes = new byte[length];
    input.readBytes(bytes, 0, length);
    return (Document) SerializationUtils.deserialize(bytes);
  }
}
