package com.github.dtkavin.bgq.path

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataOutputStream, FileSystem, Path, PathFilter}
import org.slf4j.LoggerFactory

import scala.util.{Failure, Success, Try}

/**
  * Function: 数据仓库全量路径校验器
  * #Author: john 
  * #Create: 2018 03 15 下午1:44 
  */

case class PathInfo(flag: Int = -1, base: String = "", db: String = "", table: String = "", dt: String = "", hour: String = "", ptl: String = "", et: String = "", tp: String = "", others: String = "")

//获取　数据仓库　每一个路径对应的　大小，更新时间
object WarehousePathChecker {
  val logger = LoggerFactory.getLogger(WarehousePathChecker.getClass)
  val SEPERATOR = ","

  //所有叶子节点大小
  def writeNodeInfo(node: Path, writer: FSDataOutputStream)(implicit hdfsFs: FileSystem): Unit = {
    val fileStatus = hdfsFs.getFileStatus(node)
    //写hdfs 大小，更新时间，路径
    val fileInfo = Array(fileStatus.getLen, fileStatus.getModificationTime, fileStatus.getPath)
    val line = fileInfo.mkString(SEPERATOR)
    writer.write(s"${line}\r\n".getBytes)
  }

  def packagePathInfo(originPath: String): Map[String, String] = {
    val path = originPath.replace("""hdfs://apus-cloud""", "")
    path.contains("warehouse") match {
      case true => {
        //属于　库/表 范畴
        val baseArr = path.split("""warehouse""")
        baseArr.length match {
          case 2 => {
            val base = s"${baseArr(0)}warehouse"
            val info = baseArr(1)
            val items = info.split("/")
            val pairs = (0 until items.length) zip (items)
            val flag = "1"
            val mp = new scala.collection.mutable.HashMap[String, String]()
            mp.clear()
            mp.put("base", base)
            mp.put("flag", "1")
            pairs.foreach {
              case (idx, value) => {
                value match {
                  case v if v.endsWith(".db") => mp.put("db", v)
                  //防止 /cc/dt=/aa/aab
                  case v if v.contains("dt=") && v.length > 3 => val a = v.split("="); mp.put(a(0), a(1))
                  case v if v.contains("hour=") && v.length > 5 => val a = v.split("="); mp.put(a(0), a(1))
                  case v if v.contains("pn=") && v.length > 3 => val a = v.split("="); mp.put(a(0), a(1))
                  case v if v.contains("ptl=") && v.length > 4 => val a = v.split("="); mp.put(a(0), a(1))
                  case v if v.contains("et=") && v.length > 3 => val a = v.split("="); mp.put(a(0), a(1))
                  case v if v.contains("pkg=") && v.length > 4 => val a = v.split("="); mp.put(a(0), a(1))
                  case v if v.contains("type=") && v.length > 5 => val a = v.split("="); mp.put(a(0), a(1))
                  case v if idx == 2 => mp.put("table", v)
                  case _ => mp.put("others", s"${mp.getOrElse("others", "")};${idx}_${value}")
                }
              }
            }
            //            PathInfo(flag, base = base, db = mp.getOrElse("db", ""), table = mp.getOrElse("table", ""), dt = mp.getOrElse("dt", ""), hour = mp.getOrElse("hour", ""), ptl = mp.getOrElse("ptl", ""), et = mp.getOrElse("et", ""), tp = mp.getOrElse("type", ""), others = mp.getOrElse("others", ""))
            mp.toMap
          }
          case _ => {
            val flag = "0"
            //            PathInfo(flag = flag, others = path)
            Map("flag" -> flag, "others" -> path)
          }
        }
      }
      case false => {
        //不属于　库/表 范畴 flag＝－１
        //        PathInfo(others = path)
        Map("others" -> path)
      }
    }
  }

  //统一列顺序
  def PathInfo2mkString(mp: Map[String, String]): String = {
    Array("flag", "base", "db", "table", "dt", "hour", "pn", "pkg", "ptl", "et", "type", "others").map {
      case k if k.equals("flag") => mp.getOrElse(k, "-1")
      case k => mp.getOrElse(k, "")
    }.mkString(",")
  }

  //需要计算双亲节点大小
  def writeLeafParentInfo(node: Path, writer: FSDataOutputStream)(implicit hdfsFs: FileSystem): Unit = {
    val fileStatus = hdfsFs.getFileStatus(node)
    var len: Long = 0
    hdfsFs.listStatus(node).foreach {
      fileStatus => len = fileStatus.getLen + len
    }
    val leafSize = hdfsFs.listStatus(node).length
    Try {
      //写hdfs 大小，叶子个数,更新时间，路径
      Array(len, leafSize, fileStatus.getModificationTime, PathInfo2mkString(packagePathInfo(fileStatus.getPath.toString)), fileStatus.getPath.toString)
    } match {
      case Success(fileInfo) => {
        val line = fileInfo.mkString(SEPERATOR)
        writer.write(s"${line}\r\n".getBytes)
      }
      case Failure(ex) => logger.debug(ex.getMessage)
    }

  }

  //对所有叶子几点操作
  def foldWithInDeal(node: Path, writer: FSDataOutputStream)(implicit hdfsFs: FileSystem): Unit = {
    hdfsFs.isFile(node) match {
      case true => writeNodeInfo(node, writer)
      case false => hdfsFs.listStatus(node).map {
        fileStatus => foldWithInDeal(fileStatus.getPath, writer)
      }
    }
  }

  //对所有叶子节点的双亲节点操作
  def getAllLeafParents(node: Path, writer: FSDataOutputStream)(implicit hdfsFs: FileSystem): Unit = {
    hdfsFs.isDirectory(node) match {
      case true => {
        hdfsFs.listStatus(node).foldLeft(true) {
          case (status, fileStatus) => {
            val path = fileStatus.getPath
            val name = path.getName
            status && (name.endsWith("LOCK") && name.startsWith("_") || hdfsFs.isFile(path))
          }
        } match {
          case true => writeLeafParentInfo(node, writer)
          case false => hdfsFs.listStatus(node).map {
            fileStatus => getAllLeafParents(fileStatus.getPath, writer)
          }
        }
      }
      case false => node.getName match {
        case name if name.endsWith("log") =>
        case name if name.endsWith("parquet") =>
        case name if name.endsWith("csv") =>
        case name if name.startsWith("part-") =>
        case name if name.startsWith("_SUCCESS") =>
        case name if name.endsWith("LOCK") & name.startsWith("_") =>
        case _ => writeLeafParentInfo(node, writer)
      }
    }

  }

  //  def pathFilter(dt:String,pn:String)

  class HdfsPathFilter(kv: Map[String, String]) extends PathFilter {
    override def accept(path: Path): Boolean = {
      kv.keys.map {
        case "update_dt" => {

        }
        case "pn" => {
          val pn = kv.get("pn").get
          path.getName.contains(s"pn=${pn}")
        }
        case _ => false
      }

      false //todo 临时编译
    }
  }

  val pathFilter = new HdfsPathFilter(Map {
    "dt" -> ""
  })

  //对所有叶子节点的双亲节点操作,指定更新日期
  def getAllLeafParentsByDt(node: Path, dt: String, writer: FSDataOutputStream)(implicit hdfsFs: FileSystem): Unit = {
    hdfsFs.isDirectory(node) match {
      case true => {
        var isLeaf = true
        hdfsFs.listStatus(node, pathFilter).map {
          fileStatus => isLeaf = isLeaf & hdfsFs.isFile(fileStatus.getPath)
        }
        isLeaf match {
          case true => writeLeafParentInfo(node, writer)
          case false => hdfsFs.listStatus(node, pathFilter).map {
            fileStatus => getAllLeafParents(fileStatus.getPath, writer)
          }
        }
      }
      case false => node.getName match {
        case name if name.endsWith("log") =>
        case name if name.endsWith("parquet") =>
        case name if name.endsWith("csv") =>
        case name if name.startsWith("part-") =>
        case _ => writeLeafParentInfo(node, writer)
      }
    }

  }

  def main(args: Array[String]): Unit = {
    if (args.length < 2) throw new IllegalArgumentException("Usage: genPath outPath")
    val genPath = args(0)
    val outPath = args(1)

    val conf = new Configuration()
    implicit val hdfsFs = FileSystem.get(conf)
    val writer = hdfsFs.create(new Path((outPath)), true, 4096, 3, 134217728)

    try {
      val gen = new Path(genPath)
      hdfsFs.exists(gen) match {
        case false => logger.info(s"${gen} is not exists in file system!")
        case true => getAllLeafParents(gen, writer)
      }

    } catch {
      case _ =>
    } finally {
      writer.close()
      hdfsFs.close()
    }

  }
}
