/*
 * Copyright 2011-2012 by The Regents of the University of California Licensed
 * under the Apache License, Version 2.0 (the "License"); you may not use this
 * file except in compliance with the License. you may obtain a copy of the
 * License from
 * 
 * http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 * License for the specific language governing permissions and limitations under
 * the License.
 */
package edu.ucsd.db.bassarisk.indexer.app;

import java.io.File;
import java.util.logging.Logger;

import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;

import edu.uci.ics.hyracks.api.client.HyracksConnection;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
import edu.ucsd.db.bassarisk.indexer.api.IStageOneAnalyzerFactory;
import edu.ucsd.db.bassarisk.indexer.api.IStageOneFactory;
import edu.ucsd.db.bassarisk.indexer.api.IStageTwoAnalyzerFactory;
import edu.ucsd.db.bassarisk.indexer.hyracks.CabinetLoadOperatorDescriptor;
import edu.ucsd.db.bassarisk.indexer.hyracks.HyracksPostingSerialization;
import edu.ucsd.db.bassarisk.indexer.hyracks.IndexerStageOneOperatorDescriptor;
import edu.ucsd.db.bassarisk.indexer.hyracks.IndexerStageTwoOperatorDescriptor;

public class Indexer {
  private static class Options {
    @Option(name = "-app", usage = "Hyracks Application name", required = false) public String app =
        "bassarisk-indexer";

    @Option(name = "-har", usage = "Application Assembly File", required = true) public String har;

    @Option(name = "-host",
            usage = "Hyracks Cluster Controller Host name",
            required = true) public String host;

    @Option(name = "-hashtable-size",
            usage = "Hash table size (default: 8191)",
            required = false) public int htSize = 8191;

    @Option(name = "-index-file",
            usage = "Absolute path to index file that will be written.",
            required = true) public String indexFile;

    @Option(name = "-infile-splits",
            usage = "Comma separated list of file-splits for the input. A file-"
                + "split is <node-name>:<path>",
            required = true) public String inFileSplits;

    @Option(name = "-port",
            usage = "Hyracks Cluster Controller Port (default: 1098)") public int port =
        1098;

    @Option(name = "-sortbuffer-size",
            usage = "Sort buffer size in frames (default: 32768)",
            required = false) public int sbSize = 32768;

    @Option(name = "-stage1-analyzers",
            usage = "Comma separated list of analyzers to apply in indexer stage "
                + "one. Stage one analyzers are specified using a package-qualified "
                + "name for a factory class implementing edu.ucsd.db.bassarisk."
                + "indexer.api.IStageOneAnalyzerFactory.",
            required = false) public String stage1Analyzers = "";

    @Option(name = "-stage-one",
            usage = "Package-qualified name for a stage-one factory class extending "
                + "edu.ucsd.db.bassarisk.indexer.hyracks.AbstractStageOneFactory.",
            required = true) public String stage1Factory;

    @Option(name = "-stage2-analyzers",
            usage = "Comma separated list of analyzers to apply in indexer stage "
                + "two. Stage two analyzers are specified using a package-qualified "
                + "name for a factory class implementing edu.ucsd.db.bassarisk."
                + "indexer.api.IStageTwoAnalyzerFactory.",
            required = false) public String stage2Analyzers = "";
  }

  private static final Logger LOG = Logger.getLogger(Indexer.class.getName());

  public static void main(final String[] args) throws Exception {
    LOG.info("Starting Bassarisk indexer process");
    Options options = new Options();
    CmdLineParser parser = new CmdLineParser(options);
    parser.parseArgument(args);

    Indexer indexer = new Indexer(options);

    indexer.Run();
  }

  protected FileSplit[] inSplits;

  private final Options options;

  protected Indexer(final Options options) {
    this.options = options;
  }

  private JobSpecification createJob(final int htSize, final int sbSize) {
    JobSpecification spec = new JobSpecification();
    IFileSplitProvider splitsProvider = new ConstantFileSplitProvider(this.inSplits);

    // BASSARISK INDEXER STAGE 1
    // Get stage 1 analyzers
    String[] analyzerOneClassNames =
        this.options.stage1Analyzers.length() > 0 ? this.options.stage1Analyzers
            .split(",") : new String[0];
    IStageOneAnalyzerFactory[] stage1Analyzers =
        new IStageOneAnalyzerFactory[analyzerOneClassNames.length];
    LOG.info("Initializing stage 1 analyzers.");
    LOG.info(String.format("%s", analyzerOneClassNames.length));
    for (int i = 0; i < analyzerOneClassNames.length; ++i) {
      stage1Analyzers[i] =
          (IStageOneAnalyzerFactory) this
              .instantiateClass(analyzerOneClassNames[i],
                                "Stage 1 analyzer factory",
                                IStageOneAnalyzerFactory.class);
    }
    // Get stage 1 factory
    IStageOneFactory stage1Factory =
        (IStageOneFactory) this.instantiateClass(this.options.stage1Factory,
                                                 "Stage 1 factory",
                                                 IStageOneFactory.class);
    // Get stage 1 operator
    LOG.info("Initializing stage 1 operator.");
    IndexerStageOneOperatorDescriptor stage1Operator =
        new IndexerStageOneOperatorDescriptor(spec, splitsProvider, stage1Analyzers,
                                              stage1Factory);
    this.createPartitionConstraint(spec, stage1Operator, this.inSplits);

    // BASSARISK INDEXER STAGE 2
    // Get stage 2 analyzers
    String[] analyzerTwoClassNames =
        this.options.stage2Analyzers.length() > 0 ? this.options.stage2Analyzers
            .split(",") : new String[0];
    IStageTwoAnalyzerFactory[] stage2Analyzers =
        new IStageTwoAnalyzerFactory[analyzerTwoClassNames.length];
    LOG.info("Initializing stage 2 analyzers.");
    for (int i = 0; i < analyzerTwoClassNames.length; ++i) {
      stage2Analyzers[i] =
          (IStageTwoAnalyzerFactory) this
              .instantiateClass(analyzerTwoClassNames[i],
                                "Stage 2 analyzer factory",
                                IStageTwoAnalyzerFactory.class);
    }
    LOG.info("Initializing stage 2 operator.");
    IOperatorDescriptor stage2Operator =
        new IndexerStageTwoOperatorDescriptor(spec, stage2Analyzers);
    this.createPartitionConstraint(spec, stage2Operator, this.inSplits);

    // BASSARISK INDEXER SORT STAGE
    // Sort posting tuples by groupKey, tokenKey, position
    // TODO: It would be ideal to sort finally by value
    LOG.info("Initializing sort operator.");
    int[] sortKeys = new int[] { 0, 1, 3 };
    IBinaryComparatorFactory[] cfs =
        new IBinaryComparatorFactory[] {
                                        PointableBinaryComparatorFactory
                                            .of(UTF8StringPointable.FACTORY),
                                        PointableBinaryComparatorFactory
                                            .of(UTF8StringPointable.FACTORY),
                                        PointableBinaryComparatorFactory
                                            .of(IntegerPointable.FACTORY) };
    IOperatorDescriptor sortOperator =
        new ExternalSortOperatorDescriptor(spec, sbSize, sortKeys, null, cfs,
                                           HyracksPostingSerialization
                                               .positionedPostingsDescriptor());
    this.createPartitionConstraint(spec, sortOperator, this.inSplits);

    // BASSARISK INDEXER WRITE STAGE
    LOG.info("Initializing output operator.");
    IOperatorDescriptor writeOperator =
        new CabinetLoadOperatorDescriptor(spec, this.options.indexFile);
    this.createPartitionConstraint(spec, writeOperator, this.inSplits);

    // SETUP STAGE LINKS
    LOG.info("Initializing opertor connections.");
    OneToOneConnectorDescriptor stage1ToStage2 =
        new OneToOneConnectorDescriptor(spec);

    IBinaryHashFunctionFactory[] hff =
        new IBinaryHashFunctionFactory[] {
                                          PointableBinaryHashFunctionFactory
                                              .of(UTF8StringPointable.FACTORY),
                                          PointableBinaryHashFunctionFactory
                                              .of(IntegerPointable.FACTORY) };
    ITuplePartitionComputerFactory partitionComputer =
        new FieldHashPartitionComputerFactory(sortKeys, hff);
    IConnectorDescriptor stage2ToSort =
        new MToNPartitioningConnectorDescriptor(spec, partitionComputer);
    spec.connect(stage2ToSort, stage2Operator, 0, sortOperator, 0);

    OneToOneConnectorDescriptor sortToWrite = new OneToOneConnectorDescriptor(spec);

    // ASSEMBLE PLAN
    spec.connect(stage1ToStage2, stage1Operator, 0, stage2Operator, 0);
    spec.connect(stage2ToSort, stage2Operator, 0, sortOperator, 0);
    spec.connect(sortToWrite, sortOperator, 0, writeOperator, 0);
    spec.addRoot(writeOperator);
    LOG.info("Job specification complete.");
    return spec;
  }

  private void createPartitionConstraint(final JobSpecification spec,
                                         final IOperatorDescriptor op,
                                         final FileSplit[] splits) {
    String[] parts = new String[splits.length];
    for (int i = 0; i < splits.length; ++i) {
      parts[i] = splits[i].getNodeName();
    }
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, op, parts);
  }

  private Object
      instantiateClass(final String className, final String errorHint,
                       @SuppressWarnings("rawtypes") final Class returnType) {
    try {
      @SuppressWarnings("rawtypes") Class theClass = Class.forName(className);
      Object theInstance = theClass.newInstance();
      // this .cast() lets us handle the ClassCastException here
      return returnType.cast(theInstance);
    } catch (ClassNotFoundException ex) {
      String message = "%s %s class must be included in the application assembly.";
      LOG.severe(String.format(message, ex, errorHint));
      throw new RuntimeException(ex);
    } catch (InstantiationException ex) {
      String message = "%s %s class must be concrete.";
      LOG.severe(String.format(message, ex, errorHint));
      throw new RuntimeException(ex);
    } catch (IllegalAccessException ex) {
      String message = "%s %s class must have a no-arg constructor.";
      LOG.severe(String.format(message, ex, errorHint));
      throw new RuntimeException(ex);
    } catch (ClassCastException ex) {
      String message = "%s %s class must extend %s";
      LOG.severe(String.format(message, ex, errorHint, returnType.getName()));
      throw new RuntimeException(ex);
    }
  }

  private FileSplit[] parseFileSplits(final String fileSplits) {
    String[] splits = fileSplits.split(",");
    FileSplit[] fSplits = new FileSplit[splits.length];
    for (int i = 0; i < splits.length; ++i) {
      String s = splits[i].trim();
      int idx = s.indexOf(':');
      if (idx < 0) {
        String message = "File split not well formed";
        throw new IllegalArgumentException(message);
      }
      fSplits[i] =
          new FileSplit(s.substring(0, idx),
                        new FileReference(new File(s.substring(idx + 1))));
    }
    return fSplits;
  }

  protected void Run() throws Exception {
    String harFilePath = this.options.har;
    File harFile = new File(harFilePath);
    IHyracksClientConnection hcc =
        new HyracksConnection(this.options.host, this.options.port);
    hcc.createApplication(this.options.app, harFile);

    try {
      this.inSplits = this.parseFileSplits(this.options.inFileSplits);
      JobSpecification job =
          this.createJob(this.options.htSize, this.options.sbSize);

      long start = System.currentTimeMillis();
      JobId jobId = hcc.createJob(this.options.app, job);
      hcc.start(jobId);
      LOG.info("Indexer job started.  Sleeping....");
      hcc.waitForCompletion(jobId);
      long end = System.currentTimeMillis();
      System.err.println(start + " " + end + " " + (end - start));
    } finally {
      hcc.destroyApplication(this.options.app);
    }
  }

}