package org.apache.lucene.ocean.util;

import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.Payload;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.UnicodeUtil;

// incomplete
public class TokenStreamOutput {
  private Analyzer analyzer;
  private Token localToken = new Token();
  int position;
  int length;
  int offset;
  int offsetEnd;
  final int maxFieldLength;
  private IndexOutput output;
  private UnicodeUtil.UTF8Result utf8Result = new UnicodeUtil.UTF8Result();
  private int fieldIndexPosition;
  private Map<String,FieldIndex> fieldIndexes;
  
  public TokenStreamOutput(List<Document> documents, Analyzer analyzer, IndexOutput output, int maxFieldLength) throws IOException {
    this.maxFieldLength = maxFieldLength;
    output.writeVInt(documents.size());
    for (Document document : documents) {
      writeDocument(document);
    }
  }
  
  public void writeFieldIndexes(IndexOutput output) throws IOException {
    int doc = -1;
    //output.writeVInt(i);
    //for (FieldIndex fieldIndex : fieldIndexes) {
   //   if (doc != fieldIndex.doc) output.writeVInt(fieldIndex.doc); 
      
    //}
  }
  
  public static class FieldIndex {
    public int doc;
    public int position;
  }
  
  public void writeDocument(Document document) throws IOException {
    List<Fieldable> fields = document.getFields();
    List<Fieldable> indexFields = new ArrayList<Fieldable>(fields.size());
    for (Fieldable field : fields) {
      if (field.isIndexed()) {
        indexFields.add(field);
      }
    }
    output.writeVInt(indexFields.size());
    for (Fieldable field : indexFields) {
      writeField(field);
    }
  }

  public void writeField(Fieldable field) throws IOException {
    long filePointer = output.getFilePointer();
    
    if (length > 0)
      position += analyzer.getPositionIncrementGap(field.name());
    //output.writeString(field.name());
    if (!field.isTokenized()) { // un-tokenized field
      String stringValue = field.stringValue();
      final int valueLength = stringValue.length();
      Token token = localToken;
      token.clear();
      char[] termBuffer = token.termBuffer();
      if (termBuffer.length < valueLength)
        termBuffer = token.resizeTermBuffer(valueLength);
      stringValue.getChars(0, valueLength, termBuffer, 0);
      token.setTermLength(valueLength);
      token.setStartOffset(offset);
      token.setEndOffset(offset + stringValue.length());
      addPosition(token);
      offset += stringValue.length();
      length++;
    } else {
      final TokenStream stream;
      final TokenStream streamValue = field.tokenStreamValue();
      if (streamValue != null)
        stream = streamValue;
      else {
        final Reader reader; // find or make Reader
        final Reader readerValue = field.readerValue();
        if (readerValue != null)
          reader = readerValue;
        else {
          String stringValue = field.stringValue();
          if (stringValue == null)
            throw new IllegalArgumentException("field must have either TokenStream, String or Reader value");
          reader = new StringReader(stringValue);
        }
        stream = analyzer.reusableTokenStream(field.name(), reader);
      }
      stream.reset();
      try {
        offsetEnd = offset - 1;
        for (;;) {
          Token token = stream.next(localToken);
          if (token == null)
            break;
          position += (token.getPositionIncrement() - 1);
          addPosition(token);
          if (++length >= maxFieldLength) {
            // if (threadState.docWriter.infoStream != null)
            // threadState.docWriter.infoStream.println("maxFieldLength "
            // +maxFieldLength+ " reached for field " + fieldInfo.name + ",
            // ignoring following tokens");
            break;
          }
        }
        offset = offsetEnd + 1;
      } finally {
        stream.close();
      }
    }
  }

  private void addPosition(Token token) throws IOException {
    final char[] tokenText = token.termBuffer();
    final int tokenTextLen = token.termLength();
    UnicodeUtil.UTF16toUTF8(tokenText, 0, tokenTextLen, utf8Result);
    output.writeVInt(tokenTextLen);
    output.writeBytes(utf8Result.result, 0, utf8Result.length);
    output.writeVInt(token.startOffset());
    output.writeVInt(token.endOffset());
    Payload payload = token.getPayload();
    if (payload == null) {
      output.writeVInt(position<<1);
    } else if (payload != null) {
      output.writeVInt((position<<1)|1);
      int payloadLength = payload.getData().length;
      output.writeVInt(payloadLength);
      output.writeBytes(payload.getData(), payload.getOffset(), payloadLength);
    } 
  }
}
