package com.example.bigdata.hadoop

import java.io.FileWriter

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path}
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hadoop.util.StringUtils

import scala.io.Source

object hdpDemo {
    def main(args: Array[String]): Unit = {

        val dfs = dfsInit("jdhdp310001", true)
        delete(dfs,"/tmp/test_map")
//        val ecInputFile = "input/ecFile/du.size"
//        getDirLength(hdfs,"/tmp")
        /*val lines = Source.fromFile(ecInputFile).getLines()
        for (line <- lines) {
            eqDir(hdfs, line)
        }*/

    }

    def dfsInit(hdfsNameServices: String, isKerberos: Boolean): FileSystem = {
        val path = "conf/" + hdfsNameServices + "/"
        val coreFile = path + "core-site.xml"
        val hdfsFile = path + "hdfs-site.xml"
        val conf = new Configuration
        conf.addResource(new Path(coreFile))
        conf.addResource(new Path(hdfsFile))
        println("fs.defaultFS : " + conf.get("fs.defaultFS"))
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem")
        conf.set("dfs.client.failover.proxy.provider." + hdfsNameServices, "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider")
        if (isKerberos) {
            val krb5File = path + "krb5.conf"
            System.setProperty("java.security.krb5.conf", krb5File)
            UserGroupInformation.setConfiguration(conf)
            val keytabFile = path + "hadoop.keytab"
            val kerbersUser = "hadoop/bj-jd-dc-namenode-prod-0009.tendcloud.com@HADOOP.COM"
            UserGroupInformation.loginUserFromKeytab(kerbersUser, keytabFile)
        }

        FileSystem.get(conf)
    }
    def delete(hdfs:FileSystem , deletefile:String): Unit = {
        println(hdfs.delete(new Path(deletefile), true))
    }

    def getErasureCodedDir(hdfs: FileSystem, namedir: String): Unit = {

        val ecList = scala.collection.mutable.ListBuffer[String]()
        val ecSet = scala.collection.mutable.Set[String]()
        hdfs.delete(new Path("xxx"),true)
        for (dir <- hdfs.listStatus(new Path(namedir))) {
            println
            if (dir.isDirectory) {
                getErasureCodedDir(hdfs, namedir + dir.getPath.getName + "/")
            }
            else if (dir.isFile && dir.isErasureCoded) {
                println(namedir + dir.getPath.getName + " -> " + dir.isErasureCoded)
                //ecList.append(namedir + dir.isDirectory)
                ecSet.add(namedir + dir.isDirectory)
                //println(namedir + dir.getPath.getName)
            }
        }
        ecSet.foreach(println)
        //ecList.foreach(println)
        //println(ecArr)
    }

    def eqDir(hdfs: FileSystem, namedir: String): Unit = {
        val head = namedir.split("/")(1)
        val noEcDir : String = head.r.replaceAllIn(namedir,head+"_noec")
        try {
//            println(s"${namedir}\t${noEcDir}")
            val length = hdfs.getContentSummary(new Path(namedir)).getLength
            val noEcDirLength = hdfs.getContentSummary(new Path(noEcDir)).getLength
            if (length != noEcDirLength) {
                println(s"${namedir}\t${length}\t${noEcDirLength}")
            }
        } catch {
            case ex:Exception => {println(ex.getMessage)}
        }

    }

    def getDirLength(hdfs: FileSystem, namedir: String): Unit = {
        val ecFileLog = "output/ecOut/ecfile1.log"
        val out = new FileWriter(ecFileLog, true)
        // minsize 目录大小100GB ,400GB
        //val minsize = 107374182400L
        val minsize = 429496729600L
        for (dir <- hdfs.listStatus(new Path(namedir))) {
            val dirpath = namedir + "/" + dir.getPath.getName
            val dirContentSummary = hdfs.getContentSummary(new Path(dirpath))
            val length = dirContentSummary.getLength
            val strConsumed = StringUtils.byteDesc(length)
            if ((length > minsize) && dir.isDirectory) {
                //println(s"${dirpath}\t${length}\t--> ${minsize}")
                getDirLength(hdfs, dirpath)
            }
            else {
                val descDirPath = dirpath + "_noec"
                val info = s"${dirpath}\t${descDirPath}\t${strConsumed}\n"
                println(info)
                out.write(info)
                out.flush()
            }
        }
        out.close()

    }

    def getECDir(hdfs: FileSystem, namedir: String): Unit = {
        val ecFileLog = "output/ecfile.log"
        val out = new FileWriter(ecFileLog, true)
        import scala.util.control._
        val loop = new Breaks;
        loop.breakable {
            for (dir <- hdfs.listStatus(new Path(namedir))) {
                if (dir.isDirectory) {
                    getECDir(hdfs, namedir + dir.getPath.getName + "/")
                }
                else if (dir.isFile && dir.isErasureCoded) {
                    val ecfile = namedir + dir.getPath.getName.split("/").dropRight(1).mkString(":")
                    val dirContentSummary = hdfs.getContentSummary(new Path(ecfile))
                    val length = dirContentSummary.getLength
                    val strConsumed = StringUtils.byteDesc(length)
                    val dirinfo = ecfile + "\t" + strConsumed + "\t" + namedir + dir.getPath.getName + "\n"
                    out.write(dirinfo)
                    out.close()
                    println(dirinfo + "--> ecFileLog")
                    loop.break;
                }
            }
        }
    }

    def files(fs: FileSystem, in: Path): Array[Path] = for (s <- fs.listStatus(in) if s.getPath.getName != "_SUCCESS" && s.getPath.getName != "_temporary" && !s.getPath.getName.contains("metadata")) yield s.getPath()

    def files(fs: FileSystem, in: String): Array[Path] = files(fs, new Path(in))

    //def open(fs:FileSystem ,path: String): FSDataInputStream = open(new Path(path))
    def open(fs: FileSystem, path: Path): FSDataInputStream = fs.open(path)


}
