package process
import org.apache.hadoop.mapreduce.Mapper
import org.apache.hadoop.mapreduce.Reducer
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat
import org.apache.hadoop.io.LongWritable
import org.apache.hadoop.io.Text


object JobHelper:
    val conf = new Configuration
    val SITE = "hdfs://192.168.10.101:9000/data"
    val DATA = "/positions.csv"
    val hdfs = new Path(SITE)

    def run(
        jobName :String,
        jobClass :Class[?],
        mapperClass :Class[? <: Mapper[?, ?, ?, ?]], 
        reducerClass :Class[? <: Reducer[?, ?, ?, ?]], 
        inputPaths :Array[String], 
        outputPath :String,
        mapKeyClass :Class[?]=classOf[Text],
        mapValClass :Class[?]=classOf[Text],
        outKeyClass :Class[?]=classOf[Text], 
        outValClass :Class[?]=classOf[Text]) = 

      val input = inputPaths.map(s=>SITE+s).mkString(",")
      val output = new Path(SITE+outputPath)
      val job = new Job(conf, jobName)

      // remove previous output
      new Path(SITE).getFileSystem(conf).delete(output)

      job.setJarByClass(Degree.getClass)
      
      job.setMapperClass(mapperClass)
      job.setReducerClass(reducerClass)
      job.setCombinerClass(reducerClass)


      job.setMapOutputKeyClass(mapKeyClass)
      job.setMapOutputValueClass(mapValClass)


      job.setOutputKeyClass(outKeyClass)
      job.setOutputValueClass(outValClass)
      FileInputFormat.addInputPaths(job, input)
      FileOutputFormat.setOutputPath(job, output)

      job.waitForCompletion(true)

    
      

def main(args: Array[String]): Unit = {
  // Degree.run
  // Exp.run 
  // Skill.run
  // Salary.run
  Hdfs.trans2mysql("degree", "degree")
  Hdfs.trans2mysql("skill", "skill")
  Hdfs.trans2mysql("exp", "exp")
  Hdfs.trans2mysql("salary", "salary")

}