package com.spark

import org.apache.spark.{SerializableWritable, SparkConf, SparkContext}

import java.io.StringWriter
import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SerializableWritable, SparkConf}
import org.apache.spark.SerializableWritable
import org.apache.spark.broadcast.Broadcast

import scala.collection.mutable
import scala.tools.cmd.CommandLineParser
import scala.tools.reflect.StdRuntimeTags.{m, u}

object SparkCopy {
  /**
   * 创建目录
   * @param sPath
   * @param tPath
   * @param fs
   * @param inputPath
   * @param outPatch
   */
  def checkDirector(sPath: String, fs : FileSystem,inputPath: String, outPatch: String) = {
    val fileStatus = fs.listStatus(new Path(sPath))
    for (i<-0 until fileStatus.length){
      val file = fileStatus(i)
      if(file.isDirectory){
        val dirName = file.getPath.toString.split(inputPath)(1)
        val newToPath = new Path(outPatch + dirName)
        if(fs.exists(newToPath)){
          fs.mkdirs(newToPath)
        }
//        checkDirector(inputPath+dirName ,fs,  inputPath, outPatch)
      }
    }
  }



  def main(args: Array[String]): Unit = {
    //定义目录
    val s_path = "/Users/zhaoguanglai/hadoop学习资料/0815work/resource/"
    val t_path = "/Users/zhaoguanglai/hadoop学习资料/0815work/target_sc/"
    val conf = new SparkConf().setMaster("local[*]").setAppName("local")
    val sc = new SparkContext(conf)
    val configuration = sc.hadoopConfiguration
    val fs = FileSystem.get(configuration)
    val m = 10 //m参数
    val hasI = true //i参数
    val srcPath = new Path(s_path)
    val toPath = new Path(t_path)
    if(fs.exists(toPath) && fs.listStatus(toPath).length>0){
      System.exit(1);
    }
    val files = mutable.MutableList[(String, String)]()
    if(fs.exists(srcPath)){
      //生成目录
      checkDirector(s_path, fs, s_path, t_path)
      //生成文件对应关系
      val listFiles = fs.listFiles(srcPath, true)
      while (listFiles.hasNext){
        val next = listFiles.next()
        val path1 = next.getPath
        val fileSPath = next.getPath.toUri.getPath
        val fileTPath = t_path + fileSPath.split(s_path)(1)
        files += ((path1.toUri.getPath, fileTPath))

      }
    }else if(fs.exists(srcPath)){
      files += ((s_path, t_path))
    }else{
      System.exit(1)
    }

    if (files.nonEmpty) {
      val broadcast = sc.broadcast(new SerializableWritable(configuration))
      distSparkCopy(sc, files, m, broadcast)
    }
  }

  /**
   * distSparkCopy
   * @param sc
   * @param files
   * @param m
   * @param broadcast
   */
  def distSparkCopy(sc: SparkContext, files: mutable.MutableList[(String, String)], m: Int, broadcast: Broadcast[SerializableWritable[Configuration]]) = {
    val rdd = sc.makeRDD(files, m).mapPartitions(x=>{
      val result = mutable.MutableList()
      val configuration = broadcast.value.value
      val fs = FileSystem.get(configuration)

      while (x.hasNext){
        val next = x.next()
        val src = new Path(next._1)
        val dst = new Path(next._2)
        FileUtil.copy(fs, src, fs, dst, false, configuration)
        result .map("copy " + next._1 + " to " + next._2 + " success by worker #")
      }
      result.iterator
    }).collect()
  }







}
