package com.dataworker.spark.sql.util

import java.io.{BufferedReader, Closeable, InputStream, InputStreamReader}
import java.net._
import java.util
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConversions._
import org.apache.commons.lang.StringUtils
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory
import sun.net.www.protocol.file.FileURLConnection

/**
  * Created by taofu on 2018/5/9.
  */
class HDFSClassLoader private(urls:Array[URL], parent: ClassLoader) extends URLClassLoader(urls, parent) {

  private val closeables = new util.WeakHashMap[Closeable, Void]

  private val cachedClass = new ConcurrentHashMap[String, Class[_]]()

  def addJar(jar:String): Unit ={
    addURL(new URL(jar))
  }

  def getResourceAsStream(url:URL): InputStream ={
    if (url == null) return null

    try {
      val urlc = url.openConnection
      val is = urlc.getInputStream
      if (urlc.isInstanceOf[JarURLConnection]){
        val juc = urlc.asInstanceOf[JarURLConnection]
        val jar = juc.getJarFile
        closeables.synchronized{if (!closeables.containsKey(jar)) closeables.put(jar, null)}
      } else if (urlc.isInstanceOf[FileURLConnection]) {
        closeables.synchronized{closeables.put(is, null)}
      }
      is
    } catch {
      case _:Exception => null
    }
  }


  def getPropertiesFromUrl(url:URL): util.HashMap[String, String] ={
    val properties = new util.HashMap[String, String]()
    var reader:BufferedReader = null
    try {
      reader = new BufferedReader(new InputStreamReader(getResourceAsStream(url), "UTF-8"))
      var readNull = false
      while (!readNull){
        var line = reader.readLine()
        if (line == null) readNull = true
        else {
          line = line.trim
          if (line.indexOf('=') > 0) {
            val key = StringUtils.substringBefore(line, "=").trim
            val value = StringUtils.substringAfter(line, "=").trim
            properties.put(key, value)
          }
        }
      }
    } catch {
      case _:Exception =>
    } finally {
      try {
        if (reader != null) reader.close()
      } catch {
        case _:Exception =>
      }
    }
    properties
  }

  def getClassByName(name:String): Class[_] = {
    if (!cachedClass.containsKey(name)){
      val iter = getResources("cn.tongdun.angel.api.AngelCommand")
      while (iter.hasMoreElements) {
        val url = iter.nextElement()
        val properties = getPropertiesFromUrl(url)
        for (key <- properties.keySet()) {
          val className = properties.get(key)
          val clazz = loadClass(className)
          cachedClass.put(key, clazz)
        }
      }
    }
    cachedClass.getOrDefault(name, null)
  }

  override def close(): Unit = {
    closeables.synchronized{
      for (c <- closeables.keySet()){
        try {
          c.close()
        } catch {
          case _:Exception =>
        }
      }
      closeables.clear()
    }
    cachedClass.clear()
    super.close()
  }
}


object HDFSClassLoader {

  private var classLoader:HDFSClassLoader = null

  def getHDFSClassLoader: HDFSClassLoader ={
    if (classLoader == null){
      val clazz = classOf[URL]
      val field = clazz.getDeclaredField("factory")
      field.setAccessible(true)
      val factory = field.get(null).asInstanceOf[URLStreamHandlerFactory]
      if (factory==null) URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory)

      classLoader = new HDFSClassLoader(new Array[URL](0), Thread.currentThread.getContextClassLoader)
    }
    classLoader
  }

  def main(args: Array[String]): Unit = {
    val loader = getHDFSClassLoader
    loader.addJar("hdfs://dp126:8020/user/datacompute/platformtool/resources/543/latest/jobserverdemo-1.0.jar")
    loader.addJar("hdfs://dp126:8020/user/tongdun/yarn_jars/jobserverdemo-2.0.jar")
    val iter = loader.getResources("log4j.xml")
    while (iter.hasMoreElements) {
      val url = iter.nextElement()
      println(loader.getPropertiesFromUrl(url))
    }
  }
}
