package cn.kaiux.bigdata.homework.week05_06

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
import org.apache.hadoop.io.IOUtils
import org.apache.spark.SparkContext
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD

import java.io.FileNotFoundException
import java.net.URI
import java.util.UUID
import java.util.concurrent.{ConcurrentHashMap, Executors, TimeUnit}
import scala.collection.JavaConverters._
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
import scala.util.{Failure, Success, Try}

/**
 * @author kaiux@foxmail.com
 * @date 2022/6/11 11:30
 */
object FileUtils extends Logging with Serializable {


  private implicit class ScalaRemoteIterator[T](underlying: RemoteIterator[T]) extends Iterator[T] {

    override def hasNext: Boolean = underlying.hasNext

    override def next(): T = underlying.next()

  }

  def listFiles(fs: FileSystem, path: Path, threads: Int): Seq[(FileStatusWithType, Seq[FileStatusWithType])] = {

    val pathRoot = Some(FileStatusWithType(fs.getFileStatus(path)))

    val processed = new java.util.concurrent.LinkedBlockingQueue[(FileStatusWithType, Seq[FileStatusWithType])](pathRoot.map((_, Seq.empty)).toSeq.asJava)
    val toProcess = new java.util.concurrent.LinkedBlockingDeque[(Path, Seq[FileStatusWithType])](List((path, pathRoot.toSeq)).asJava)
    val exceptions = new java.util.concurrent.ConcurrentLinkedQueue[Exception]()
    val threadsWorking = new ConcurrentHashMap[UUID, Boolean]()

    class FileLister extends Runnable {

      private val localFS = FileSystem.get(fs.getUri, fs.getConf)

      private val uuid = UUID.randomUUID()
      threadsWorking.put(uuid, true)

      override def run(): Unit = {
        while (threadsWorking.containsValue(true)) {
          Try(
            Option(toProcess.pollFirst(50, TimeUnit.MILLISECONDS))
          ).toOption.flatten match {
            case None => threadsWorking.put(uuid, false)
            case Some(p) =>
              logDebug(s"Thread [$uuid] searching [${p._1}], waiting to process depth [${toProcess.size()}]")
              threadsWorking.put(uuid, true)
              try {
                localFS
                  .listLocatedStatus(p._1)
                  .foreach {
                    case l if l.isSymlink =>
                      throw new RuntimeException(s"Link [$l] is not supported")
                    case d if d.isDirectory => {
                      val s = FileStatusWithType(d)
                      toProcess.addFirst((d.getPath, p._2 :+ s))
                      processed.add((s, p._2))
                    }
                    case f =>
                      processed.add((FileStatusWithType(f), p._2))
                  }
              } catch {
                case e: Exception => exceptions.add(e)
              }
          }
        }
      }
    }

    val pool = Executors.newFixedThreadPool(threads)

    logInfo(s"Beginning recursive list of [$path]")
    val tasks: Seq[Future[Unit]] = List
      .fill(threads)(new FileLister)
      .map(pool.submit)
      .map(j =>
        Future {
          j.get()
          ()
        }(scala.concurrent.ExecutionContext.global)
      )

    import scala.concurrent.ExecutionContext.Implicits.global
    Await.result(Future.sequence(tasks), Duration.Inf)
    pool.shutdown()

    if (!toProcess.isEmpty)
      throw new RuntimeException(
        "Exception listing files, toProcess queue was not empty"
      )

    if (!exceptions.isEmpty) {
      val collectedExceptions = exceptions.iterator().asScala.toList
      collectedExceptions
        .foreach { e =>
          logError("Exception during file listing", e)
        }
      throw collectedExceptions.head
    }

    logInfo(s"Finished recursive list of [$path]")

    processed.iterator().asScala.toSeq

  }

  def getSourceFiles(sparkContext: SparkContext, sourceURIs: Seq[URI], destinationURI: URI, threads: Int): RDD[(URI, CopyDefinitionWithDependencies)] = {
    val sourceRDD = sourceURIs
      .map { sourceURI =>
        val sourceFS = new Path(sourceURI).getFileSystem(sparkContext.hadoopConfiguration)
        sparkContext
          .parallelize(FileUtils.listFiles(sourceFS, new Path(sourceURI), threads), threads)
          .map { case (f, d) =>
            val dependentFolders = d.map { dl =>
              val udl = sourceURIToDestinationURI(dl.uri, sourceURI, destinationURI)
              SingleCopyDefinition(dl, udl)
            }
            val fu = sourceURIToDestinationURI(f.uri, sourceURI, destinationURI)
            CopyDefinitionWithDependencies(f, fu, dependentFolders)
          }
      }
      .reduce(_ union _)
      .map(_.toKeyedDefinition)
    sourceRDD
  }

  def getDestinationFiles(sparkContext: SparkContext, destinationPath: Path, threads: Int): RDD[(URI, FileStatusWithType)] = {
    val destinationFS = destinationPath.getFileSystem(sparkContext.hadoopConfiguration)
    sparkContext
      .parallelize(listFiles(destinationFS, destinationPath, threads), threads)
      .map { case (f, _) => (f.getPath.toUri, f) }
  }

  def sourceURIToDestinationURI(file: URI, sourceURI: URI, destinationURI: URI): URI = {

    val sourceFolderURI: URI = Option(new Path(sourceURI).getParent).map(_.toUri).getOrElse(sourceURI)

    val relativeFile = sourceFolderURI.relativize(file).getPath

    new Path(new Path(destinationURI), relativeFile).toUri

  }

  def pathToQualifiedPath(hadoopConfiguration: Configuration, path: Path): Path = {
    val fs = FileSystem.get(hadoopConfiguration)
    path.makeQualified(fs.getUri, fs.getWorkingDirectory)
  }

  def doCopy(sourceFS: FileSystem, destFS: FileSystem, definition: SingleCopyDefinition, taskAttemptID: Long, ignoreErrors: Boolean): DistCPResult = {
    val r = {
      if (definition.source.isDirectory)
        createDirectory(destFS, definition, ignoreErrors)
      else if (definition.source.isFile)
        copyFile(sourceFS, destFS, definition, taskAttemptID, ignoreErrors)
      else
        throw new UnsupportedOperationException(s"Given file is neither file nor directory. Copy unsupported: ${definition.source.getPath}")
    }
    logInfo(r.getMessage)
    r
  }

  private def createDirectory(destFS: FileSystem, definition: SingleCopyDefinition, ignoreErrors: Boolean): DirectoryCopyResult = {
    val destPath = new Path(definition.destination)
    if (destFS.exists(destPath))
      DirectoryCopyResult(definition.source.getPath.toUri, definition.destination, CopyActionResult.SkippedAlreadyExists)
    else {
      val result = Try {
        if (destFS.exists(destPath.getParent)) {
          destFS.mkdirs(destPath)
          DirectoryCopyResult(definition.source.getPath.toUri, definition.destination, CopyActionResult.Created)
        } else
          throw new FileNotFoundException(s"Parent folder [${destPath.getParent}] does not exist.")
      }
        .recover {
          case _: FileAlreadyExistsException =>
            DirectoryCopyResult(definition.source.getPath.toUri, definition.destination, CopyActionResult.SkippedAlreadyExists)
        }
      result match {
        case Success(v) => v
        case Failure(e) if ignoreErrors =>
          logError(s"Exception whilst creating directory [${definition.destination}]", e)
          DirectoryCopyResult(definition.source.getPath.toUri, definition.destination, CopyActionResult.Failed(e))
        case Failure(e) => throw e
      }
    }
  }

  def copyFile(sourceFS: FileSystem, destFS: FileSystem, definition: SingleCopyDefinition, taskAttemptID: Long, ignoreErrors: Boolean): FileCopyResult = {
    val destPath = new Path(definition.destination)
    Try(destFS.getFileStatus(destPath)) match {
      case Failure(_: FileNotFoundException) => performCopy(sourceFS, definition.source, destFS, definition.destination, removeExisting = false, ignoreErrors, taskAttemptID)
      case Failure(e) if ignoreErrors =>
        logError(s"Exception whilst getting destination file information [${definition.destination}]", e)
        FileCopyResult(definition.source.getPath.toUri, definition.destination, definition.source.len, CopyActionResult.Failed(e))
      case Failure(e) => throw e
      case Success(_) => performCopy(sourceFS, definition.source, destFS, definition.destination, removeExisting = true, ignoreErrors, taskAttemptID)
    }
  }

  def performCopy(sourceFS: FileSystem, sourceFile: FileStatusWithType, destFS: FileSystem, dest: URI, removeExisting: Boolean, ignoreErrors: Boolean, taskAttemptID: Long): FileCopyResult = {
    val destPath = new Path(dest)
    val tempPath = new Path(destPath.getParent, s".sparkdistcp.$taskAttemptID.${destPath.getName}")
    Try {
      var in: Option[FSDataInputStream] = None
      var out: Option[FSDataOutputStream] = None
      try {
        in = Some(sourceFS.open(sourceFile.getPath))
        if (!destFS.exists(tempPath.getParent)) throw new RuntimeException(s"Destination folder [${tempPath.getParent}] does not exist")
        out = Some(destFS.create(tempPath, false))
        IOUtils.copyBytes(in.get, out.get, sourceFS.getConf.getInt("io.file.buffer.size", 4096))
      } catch {
        case e: Throwable => throw e
      } finally {
        in.foreach(_.close())
        out.foreach(_.close())
      }
    }.map { _ =>
      val tempFile = destFS.getFileStatus(tempPath)
      if (sourceFile.getLen != tempFile.getLen) throw new RuntimeException(s"Written file [${tempFile.getPath}] length [${tempFile.getLen}] did not match source file [${sourceFile.getPath}] length [${sourceFile.getLen}]")
      if (removeExisting) {
        val res = destFS.delete(destPath, false)
        if (!res) throw new RuntimeException(s"Failed to clean up existing file [$destPath]")
      }
      if (destFS.exists(destPath)) throw new RuntimeException(s"Cannot create file [$destPath] as it already exists")
      val res = destFS.rename(tempPath, destPath)
      if (!res) throw new RuntimeException(s"Failed to rename temporary file [$tempPath] to [$destPath]")
    } match {
      case Success(_) if removeExisting => FileCopyResult(sourceFile.getPath.toUri, dest, sourceFile.len, CopyActionResult.OverwrittenOrUpdated)
      case Success(_) => FileCopyResult(sourceFile.getPath.toUri, dest, sourceFile.len, CopyActionResult.Copied)
      case Failure(e) if ignoreErrors =>
        logError(s"Failed to copy file [${sourceFile.getPath}] to [$destPath]", e)
        FileCopyResult(sourceFile.getPath.toUri, dest, sourceFile.len, CopyActionResult.Failed(e))
      case Failure(e) => throw e
    }
  }

  def uriIsChild(parent: URI, child: URI): Boolean = {
    if (!parent.isAbsolute || !child.isAbsolute)
      throw new RuntimeException(s"URIs [$parent] and [$child] must have a scheme component.")
    else if (!parent.getPath.startsWith("/") || !child.getPath.startsWith("/"))
      throw new RuntimeException(s"URIs [$parent] and [$child] must have an absolute path component.")
    else
      parent.relativize(child) != child
  }
}