package main

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.IntWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
import org.apache.hadoop.util.GenericOptionsParser
import kotlin.system.exitProcess

class CourseScoreDriver

fun main(args: Array<String>) {
  val conf = Configuration()
  conf.set("fs.defaultFS", "hdfs://192.168.244.143:9000")
  System.setProperty("HADOOP_USER_NAME", "root")
  val job = Job.getInstance(conf, "CourseScoreAverage")
  val otherArgs = GenericOptionsParser(conf, args).remainingArgs
  if (otherArgs.size < 2) {
    System.err.println("Usage: hadoop jar MyWordCount.jar <in> [<in>...] <out>")
    System.err.println("hadoop jar MyWordCount.jar")
    exitProcess(2)
  } else {
    for (i in 0 until otherArgs.size - 1) {
      FileInputFormat.addInputPath(job, Path(otherArgs[i]))
    }
    FileOutputFormat.setOutputPath(job, Path(otherArgs[otherArgs.size - 1]))
  }

  val hdfs = FileSystem.get(conf)
  hdfs.delete(Path(otherArgs[otherArgs.size - 1]), true)
  job.setJarByClass(CourseScoreDriver::class.java)
  job.mapperClass = CourseScoreAverageMapper::class.java
  job.combinerClass = CourseScoreAverageReducer::class.java
  job.reducerClass = CourseScoreAverageReducer::class.java
  job.outputKeyClass = Text::class.java
  job.outputValueClass = IntWritable::class.java
  job.inputFormatClass = TextInputFormat::class.java
  job.outputFormatClass = TextOutputFormat::class.java
  exitProcess(if (job.waitForCompletion(true)) 0 else 1)
}