/*
 * Copyright 2011-2012 by The Regents of the University of California Licensed
 * under the Apache License, Version 2.0 (the "License"); you may not use this
 * file except in compliance with the License. you may obtain a copy of the
 * License from
 * 
 * http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 * License for the specific language governing permissions and limitations under
 * the License.
 */
package edu.ucsd.db.bassarisk.indexer.hyracks;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.logging.Logger;

import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.ucsd.db.bassarisk.data.api.IValueType.IValueReference;
import edu.ucsd.db.bassarisk.indexer.api.IArtifactParser;
import edu.ucsd.db.bassarisk.indexer.api.ICorpus;
import edu.ucsd.db.bassarisk.indexer.api.IPosting.ISequencedPosting;
import edu.ucsd.db.bassarisk.indexer.api.IPostingHandler;
import edu.ucsd.db.bassarisk.indexer.api.IStageOneAnalyzerFactory;
import edu.ucsd.db.bassarisk.indexer.api.IStageOneFactory;
import edu.ucsd.db.bassarisk.indexer.api.ITextArtifact;
import edu.ucsd.db.bassarisk.indexer.hyracks.HyracksPostingSerialization.IPostingTupleWriter;
import edu.ucsd.db.bassarisk.indexer.impl.PostingFactory;

public class IndexerStageOneOperatorDescriptor extends
    AbstractSingleActivityOperatorDescriptor {
  private static final long serialVersionUID = -4339489350588586453L;

  private static final Logger LOG = Logger
      .getLogger(IndexerStageOneOperatorDescriptor.class.getName());

  private final IStageOneFactory stageOneComponantFactory;
  private final IStageOneAnalyzerFactory[] analyzers;
  private final IFileSplitProvider fileSplitProvider;

  public IndexerStageOneOperatorDescriptor(final JobSpecification spec,
                                           final IFileSplitProvider fileSplitProvider,
                                           final IStageOneAnalyzerFactory[] analyzers,
                                           final IStageOneFactory stageOneComponantFactory) {
    super(spec, 0, 1);
    this.fileSplitProvider = fileSplitProvider;
    this.analyzers = analyzers;
    this.stageOneComponantFactory = stageOneComponantFactory;
  }

  @Override
  public IOperatorNodePushable
      createPushRuntime(final IHyracksTaskContext ctx,
                        final IRecordDescriptorProvider recordDescProvider,
                        final int partition, final int nPartitions)
          throws HyracksDataException {
    final FileSplit split = this.fileSplitProvider.getFileSplits()[partition];

    final ICorpus corpus = this.stageOneComponantFactory.createCorpus();
    FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
    final IPostingTupleWriter<ISequencedPosting> postingWriter =
        HyracksPostingSerialization.createSequencedPostingTupleWriter(ctx
            .allocateFrame(), appender);
    IPostingHandler<ISequencedPosting> postingSink = postingWriter;
    // Iterate backwards over the analyzers to build the proper analysis
    // pipeline.
    for (int i = (this.analyzers.length - 1); i >= 0; ++i) {
      IStageOneAnalyzerFactory factory = this.analyzers[i];
      postingSink = factory.createAnalyzer(postingSink, PostingFactory.INSTANCE);
    }
    final IArtifactParser parser =
        this.stageOneComponantFactory.createArtifactParser(postingSink,
                                                           PostingFactory.INSTANCE);

    return new AbstractUnaryOutputSourceOperatorNodePushable() {
      @Override
      public void initialize() throws HyracksDataException {
        File f = split.getLocalFile().getFile();
        LOG.info(String.format("Initializing Corpus split %s", f.getName()));
        this.writer.open();
        postingWriter.setFrameWriter(this.writer);
        try {
          InputStream in;
          try {
            in = new FileInputStream(f);
          } catch (FileNotFoundException e) {
            this.writer.fail();
            throw e;
          }
          // iterator over the parsed postings and send them into the analyzer
          // pipeline
          corpus.initialize(in);
          for (ITextArtifact artifact : corpus) {
            parser.collectPostings(artifact);
          }
          postingWriter.close();
        } catch (Exception e) {
          this.writer.fail();
          LOG.severe(e.toString());
          throw new RuntimeException(e);
        } finally {
          this.writer.close();
        }
      }
    };
  }
}
