package com.two

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer

object FileCpTest {

  def copyPathDir(sparkSession: SparkSession,sourcePath:Path,targetPath:Path,fileList:ArrayBuffer[(Path,Path)],ignoreFailures:Boolean): ArrayBuffer[(Path,Path)] = {

      var fs = sourcePath.getFileSystem(sparkSession.sparkContext.hadoopConfiguration);
      fs.listStatus(sourcePath).foreach(currPath =>{
            if(currPath.isDirectory){
              //目录
              var sub = currPath.getPath.toString.split(sourcePath.toString)(1)
              var nextPath = new Path(targetPath + sub)
              try{
                  fs.mkdirs(nextPath)
              }catch{
                case ex:Exception => if(!ignoreFailures) throw ex else println(ex.getMessage)
              }
              copyPathDir(sparkSession,currPath.getPath,nextPath,fileList,ignoreFailures)
            }else{
              fileList.append((currPath.getPath,targetPath))
            }
      })
      return fileList
  }


  def copy(sparkSession: SparkSession,fileList:ArrayBuffer[(Path,Path)],ignoreFailures:Boolean,maxTaskNum:Int): Unit ={
    var sc = sparkSession.sparkContext
    var maxTask = Some(maxTaskNum).getOrElse(5)
    var rdd = sc.makeRDD(fileList,maxTask)
    rdd.mapPartitions(item =>{
      var hadoopConf = new Configuration()
      item.foreach(f => {
        try{
            FileUtil.copy(f._1.getFileSystem(hadoopConf),f._1,f._2.getFileSystem(hadoopConf),f._2,false,hadoopConf)
        } catch {
          case ex:Exception => if(!ignoreFailures) throw ex else println(ex.getMessage)
        }
      })
      item
    }).collect()
  }

  //提交命令：spark-submit --master spark://127.0.0.1:7077 --class com.two.FileCpTest  bigdata_rdd-1.0-SNAPSHOT.jar sourcePath targetPath -i true -m 1
  def main(args: Array[String]): Unit = {
    var sourcePath = ""//"hdfs://127.0.0.1:9000/spark";
    var targetPath = ""//"hdfs://127.0.0.1:9000/sparkcopy";
    var ignoreFailures = false
    var maxTask = 5
    try {
      sourcePath = args(0);
      targetPath = args(1);
      for (i <- 0 to (args.length - 1)) {
        var params = args(i);
        if (params == "-i") {
          ignoreFailures = args(i + 1).toBoolean
        }
        if (params == "-m") {
          maxTask = args(i + 1).toInt
        }
      }
      try {
        val sparkSession = SparkSession.builder.appName("FileCpTest").master("local").getOrCreate()

        var fileList = new ArrayBuffer[(Path, Path)]

        copyPathDir(sparkSession, new Path(sourcePath), new Path(targetPath), fileList, ignoreFailures)

        copy(sparkSession, fileList, ignoreFailures, maxTask)
      } catch {
        case ex:Exception => if(!ignoreFailures) throw ex else println(ex.getMessage)
      }
    }catch {
      case ex :Exception => println("参数错误"+ex.getMessage)
    }



  }

}
