package com.huang.week9.spark

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer


object DistCp {
  def main(args: Array[String]): Unit = {
    val appName = "DistCP-yyf"

    val master = args.apply(0)
    val sourcePath: String = args.apply(1)
    val destPath: String = args.apply(2)
    val arg3: String = args.apply(3)
    val arg4: String = args.apply(4)
    var ignore: Boolean = true
    var max: Int = 5

    if (arg3.startsWith("ignore")) {
      val iParam = arg3.replace("ignore", "").replace("=", "").replace(" ", "")
      iParam match {
        case "true" => ignore = true
        case "false" => ignore = false
      }
    } else if (arg3.startsWith("max")) {
      val mParam = arg3.replace("max", "").replace("=", "").replace(" ", "")
      try {
        max = Integer.valueOf(mParam)
      }
    }

    if (arg4.startsWith("ignore")) {
      val iParam = arg4.replace("ignore", "").replace("=", "").replace(" ", "")
      iParam match {
        case "true" => ignore = true
        case "false" => ignore = false
      }
    } else if (arg4.startsWith("max")) {
      val mParam = arg4.replace("max", "").replace("=", "").replace(" ", "")
      try {
        max = Integer.valueOf(mParam)
      }
    }

    val sparkConfig = new SparkConf().setAppName(appName).setMaster(master)
    val sc = new SparkContext(sparkConfig)

    val fileList: ArrayBuffer[(Path, Path)] = new ArrayBuffer[(Path, Path)]()
    buildDir(sc, new Path(sourcePath), new Path(destPath), fileList, ignore)
    copy(sc, fileList, ignore, max)

    def buildDir(sc: SparkContext, sourcePath: Path, targetPath: Path, fileList: ArrayBuffer[(Path, Path)], ignore: Boolean): Unit = {
      val fs = FileSystem.get(sc.hadoopConfiguration)
      fs.listStatus(sourcePath)
        .foreach(cur => {
          if (cur.isDirectory) {
            val subPath = cur.getPath.toString.split(sourcePath.toString)(1)
            val nextTargetPath = new Path(targetPath + subPath)
            try {
              fs.mkdirs(nextTargetPath)
            } catch {
              case ex: Exception => if (ignore) throw ex else println(ex.getMessage)
            }
            buildDir(sc, cur.getPath, nextTargetPath, fileList, ignore)
          } else {
            fileList.append((cur.getPath, targetPath))
          }
        })
    }

    def copy(sc: SparkContext, fileList: ArrayBuffer[(Path, Path)], ignore: Boolean, concurrentCount: Int): Unit = {
      val maxConcurrenceTask = Some(concurrentCount).getOrElse(5)
      val rdd = sc.makeRDD(fileList, maxConcurrenceTask)
      rdd.mapPartitions(ite => {
        val config = new Configuration()
        ite.foreach(tup => {
          try{
            FileUtil.copy(tup._1.getFileSystem(config), tup._1, tup._2.getFileSystem(config), tup._2, false, config)
          } catch {
            case ex: Exception => if (!ignore) throw ex else println(ex.getMessage)
          }
        })
        ite
      }).collect()
    }
  }
}
