package spark.youtube

import org.apache.spark.{SparkConf,SparkContext}

object etl1 {
  def main(args: Array[String]) {
    val dirPath = "/input/mr/youtube/video/"
    val file0 = dirPath + "0.txt"
    val file1 = dirPath + "1.txt"
    val file2 = dirPath + "2.txt"
    val file3 = dirPath + "3.txt"
    val file4 = dirPath + "4.txt"
    val output = "/output/spark/test"

    val conf = new SparkConf().setAppName("etl example")
    val sc = new SparkContext(conf)

    val rdd0 = transform(sc,file0)
    val rdd1 = transform(sc,file1)
    //val rdd2 = transform(sc,file2)
    //val rdd3 = transform(sc,file3)
    //val rdd4 = transform(sc,file4)
    //rdd0.saveAsTextFile(output)
    rdd0.union(rdd1).saveAsTextFile(output)
  }

  def transform(sc: SparkContext, filePath : String): org.apache.spark.rdd.RDD[String] = {
    val input = sc.textFile(filePath)
    input.map(_.split("\t")).map{x =>
      val strBuff = new StringBuffer()
      val length = x.length
      if (length >= 4) x(3) = x(3).replace(" ","")
      for (i <- 0 until length) {
        strBuff.append(x(i))
        if (i <9) {
          if (i != length - 1) strBuff.append("\t")
        } else {
          if (i != length - 1) strBuff.append("&")
        }
      }
      strBuff.toString
    }
  }
}