/*
 * $AIST_Release: 0.9.0 $
 * Copyright 2011 Information Technology Research Institute, National
 * Institute of Advanced Industrial Science and Technology
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *    http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package hadoop

import java.io.File
import org.apache.hadoop.io.BytesWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.util.GenericOptionsParser
import parser._
import parser.analyzer._
import parser.Nodes._
import parser.DataTypes._
import aggr.Aggregators.aggrMap
import generator.Main.parse
import generator.Main.makeJarfile
import generator.TableValue
import generator.Helpers
import generator.Generator
import util.Tools
import util.PBUtils
import util.FileUtils
import util.OptionParser._

/**
 * Entry point of local generator.
 */
object GenMain {

  object Options extends generator.Options {
    object NoCombiner extends CmdOpt
    object LocalSceduler extends CmdOpt

    override def opts = super.opts.filterNot { x => x.id == NoInput } ++ List(
      OptDesc(NoCombiner,    List(), List("no-combiner"), NoArg, "Does not use combiner."),
      OptDesc(LocalSceduler, List(), List("local"),       NoArg, "Uses local scheduler."))
  }

  val TMPJARS = "tmpjars"

  def main(args: Array[String]) {
    val conf = new Configuration()
    val otherArgs: Array[String] = (new GenericOptionsParser(conf, args)).getRemainingArgs()

    def usage() = Options.usage("szlcln-hadoop", "<script-file>", "<indir>", "<outdir>")
    val options = try {
      Options.parse(otherArgs)
    } catch {
      case e: OptException =>
      System.err.println(e.getMessage)
      usage()
      exit(1)
    }
    
    if (options.hasOpt(Options.Help)) {
      usage()
      exit(0)
    }

    if (options.args.length != 3) { 
      usage()
      exit(2)
    }

    val src    = options.args(0)
    val indir  = new Path(options.args(1))
    val outdir = new Path(options.args(2))

    val start = System.currentTimeMillis;

    val state = parse(src, options.hasOpt(Options.DumpTree))
    val (jarfile, tables, sVars) = makeJarfile(state, options.hasOpt(Options.LeaveFiles))
    val fds = state.fds

    val stop = System.currentTimeMillis;

    System.err.println("The compiling time: %d ms".format(stop - start));
    System.err.println("The javac time: %d ms".format(Tools.getTotalJavacTimeMillis))

    if (options.hasOpt(Options.LocalSceduler)) {
      conf.set("mapred.job.tracker", "local")
      conf.set("fs.default.name", "file:///"); 
    }

    val tmpjars = conf.getStrings(TMPJARS) match {
      case null => Nil
      case x    => x.toList
    }
    conf.setStrings(TMPJARS, (tmpjars ++ List("file://" + jarfile.getAbsolutePath)): _*)

    val isText = options.hasOpt(Options.Text)

    val c = SCConfiguration(
      tables, sVars,
      PBUtils.FileDescriptorList2FileDescriptorSet(fds))
    c.setTo(conf)

    val job = new Job(conf, "%s(Sawzall Clone)".format(src))

    job.setJarByClass(GenMain.getClass)

    job.setInputFormatClass(classOf[PBTableInputFormat])

    job.setMapperClass(classOf[GenMapper])
    job.setOutputKeyClass(classOf[BytesWritable])
    job.setOutputValueClass(classOf[BytesWritable])

    if (tables.size > 1) {
        job.setReducerClass(classOf[SCReducer])
        job.setOutputFormatClass(classOf[PBMultiTableOutputFormat])

        val tmp_dir = new Path(outdir, "tmp")

        FileInputFormat.addInputPath(job, indir)
        FileOutputFormat.setOutputPath(job, tmp_dir)

        if (!job.waitForCompletion(true)) {
          System.err.println("Mapper job failed.")
          exit(1)
        }

        val fs = FileSystem.get(conf)
        val jobs = tables.flatMap { table =>
          // This must equal key of GenMapper.MultiEmitter class.
          val tableInputPath = new Path(tmp_dir, table.id.toString)

          if (fs.exists(tableInputPath)) {
            conf.set("sc.tableId", table.id.toString)

            val job = new Job(conf, "%s - %s(Sawzall Clone)".format(table.id, src))

            job.setJarByClass(GenMain.getClass)

            job.setInputFormatClass(classOf[PBTableInputFormat])

            job.setMapperClass(classOf[SCThroughMapper])
            job.setOutputKeyClass(classOf[BytesWritable])
            job.setOutputValueClass(classOf[BytesWritable])

            if (isText) {
              job.setReducerClass(classOf[SCTextReducer])
              job.setOutputFormatClass(classOf[TextOutputFormat[Text, Text]])
            } else {
              job.setReducerClass(classOf[SCReducer])
              job.setOutputFormatClass(classOf[PBTableOutputFormat])
            }
            FileInputFormat.addInputPath(job, tableInputPath)
            FileOutputFormat.setOutputPath(job, new Path(outdir, "table-%03d-%s".format(table.id, table.name)))

            Some(job)
          } else {
            None
          }
        }

        for (job <- jobs) {
          job.submit()
        }

        val results = for (job <- jobs) yield {
          job.waitForCompletion(true)
        }

        exit(if (results.foldLeft(true) {_ && _} ) 0 else 1)

    } else {
      tables match {
        case List(table) =>
          if (!options.hasOpt(Options.NoCombiner) && aggrMap(table.aggr).hasCombiner) {
            job.setCombinerClass(classOf[SCCombiner])
          }
        case _ =>
      }

      if (isText) {
        job.setReducerClass(classOf[SCTextReducer])

        job.setOutputFormatClass(classOf[TextOutputFormat[Text, Text]])
      } else {
        job.setReducerClass(classOf[SCReducer])

        job.setOutputFormatClass(classOf[PBTableOutputFormat])
      }
      FileInputFormat.addInputPath(job, indir)
      FileOutputFormat.setOutputPath(job, outdir)

      exit(if (job.waitForCompletion(true)) 0 else 1)
    }
  }
}
