package mammuthus.web.service

import java.net.URLDecoder
import net.csdn.common.path.Url
import net.liftweb.{json => SJSon}
import mammuthus.web.bean._
import java.util.concurrent.atomic.AtomicInteger
import mammuthus.web.bean.TaskDesc
import mammuthus.web.bean.TaskInfo
import mammuthus.web.bean.ShellResult
import mammuthus.web.bean.Status

/**
 * 6/12/15 WilliamZhu(allwefantasy@gmail.com)
 */
trait BasicClusterServiceTrait {
  this: SlaveService =>

  /*
    以get方式执行shell脚本。appName是可选参数
   */
  def haddoopC(appName: String, nodes: String, shellCommand: String, timout: Int = 10) = {
    logger.info(s"appName = $appName  nodes = $nodes shellCommand = ${URLDecoder.decode(shellCommand, "utf-8")}")
    val resp = httpClient.get(new Url(shellURL).query(s"shellCommand=${shellCommand}&slaves=${nodes}"))
    generateShellResult(resp, timout)
  }

  /*
    以post方式执行shell脚本。appName是可选参数
   */
  def haddoopCWithPost(appName: String, nodes: String, shellCommand: String, timout: Int = 10) = {
    import scala.collection.JavaConversions._
    logger.info(s"appName = $appName  nodes = $nodes shellCommand = ${URLDecoder.decode(shellCommand, "utf-8")}")
    val params: java.util.Map[String, String] = Map("shellCommand" -> shellCommand, "slaves" -> nodes)
    val resp = httpClient.post(new Url(shellURL), params)
    generateShellResult(resp, timout)
  }

  /*
      获取应用的安装目录 类似 /data/1/usr/local/applications
   */
  def installPath(appName: String) = {
    getAPPExtra(appName, "installPath")
  }

  /*
      获取应用属性。基本的有 appName,app,installPath
      对于集群类，比如hadoop有:  nameNodes,dataNodes,qjm 等
   */
  def getAPPExtra(appName: String, name: String) = {
    val items = getAPPExtraItems(appName,name)
    if (items.size > 0) items.map(f => f.extra).filterNot(f => f.isEmpty).distinct.mkString(",") else ""
  }

  /*
      获取应用属性。基本的有 appName,app,installPath
      对于集群类，比如hadoop有:  nameNodes,dataNodes,qjm 等
      该结果返回所有节点的该属性信息，可以用来查看哪台服务器应用信息不同步等
   */
  def getAPPExtraItems(appName: String, name: String) = {
    //implicit val formats = SJSon.Serialization.formats(SJSon.NoTypeHints)
    implicit val formats = SJSon.DefaultFormats
    val url = s"${masterUrl}/app/get/extra"
    val resp = httpClient.get(new Url(url).query(s"${authTokenStr}appName=${appName}&name=${name}"))
    val items = SJSon.parse(resp.getContent).extract[List[GetAppExtra]]
    items
  }

  /*
     给应用添加额外的属性
   */
  def putAPPExtra(appName: String, params: Map[String, String]) = {
    val extraParamsUrl = s"${masterUrl}/app/put/extra"

    val extraParams = params.filter(f => !f._2.isEmpty).map {
      f =>
        s"${f._1}=${encode(f._2)}"
    }.mkString("&")

    logger.info(s"${authTokenStr}${extraParams}")
    val rps = httpClient.get(new Url(extraParamsUrl).query(s"${authTokenStr}appName=${appName}&${extraParams}"))
    logger.info(rps.getContent)
    parseAppExtra(rps.getContent)
  }

  def parseAppConfig(pps:String) = {
    implicit val formats = SJSon.DefaultFormats
    val items = SJSon.parse(pps).extract[List[AppConfigResult]]
    items.map {
      f =>
        val tasks = f.executeResult.map{k=>
          TaskInfo(TaskDesc("-", k.hostName), -1,k.msg, Status(true, false, !k.success))
        }

        (f.template,ShellResult(true, "", "", tasks))
    }.toList
  }

  def parseAppExtra(rps: String) = {
    implicit val formats = SJSon.DefaultFormats
    val items = SJSon.parse(rps).extract[List[PutAppExtra]]
    val tasks = items.map {
      f =>
        TaskInfo(TaskDesc("-", f.hostName), -1, f.msg, Status(true, false, !f.success))
    }
    ShellResult(true, "", "", tasks)
  }

  /*
     通过shell写入大段文本到某个文件。只允许写入应用目录。比如path 是 etc/hadoop/hdfs-site.xml 会被自动补全为
     /data/1/usr/local/applications/hadoop/etc/hadoop/hdfs-site.xml
   */
  def writeRemoteFile(appName: String, path: String, content: String, nodes: String, append: Boolean = false) = {
    require(!path.contains("..") && !path.startsWith("/"), "path should not contains '..' relative path symbol or starts with /")
    val newPath = installPath(appName) + "/" + appName + "/" + path
    val appendStr = if (append) ">>" else ">"
    val shellCommand = s"""cat <<EOF ${appendStr} ${newPath}\n${content}\nEOF"""
    haddoopCWithPost(
      "",
      nodes,
      shellCommand
    )
  }

  /*
    通过应用类型找到已经安装的应用。
   */
  def findAppNamesByApp(app: String) = {
    implicit val formats = SJSon.DefaultFormats
    val url = s"${masterUrl}/app/get/extra"
    val resp = httpClient.get(new Url(url).query(s"${authTokenStr}app=${app}&name=appName"))
    val items = SJSon.parse(resp.getContent).extract[List[GetAppExtra]]
    if (items.size > 0) items.map(f=>f.extra).distinct.filterNot(f=>f.isEmpty).mkString(",") else ""
  }

  //初始化添加hostName
  def sycHosts(appName: String): ShellResult = {
    sycHosts(appName, findHadoopNodes(appName))
  }

  //往指定服务器上添加所有hadoop hostName
  def sycHosts(appName: String, hostNames: String): ShellResult = {
    sycHosts(appName, hostNames, findHadoopNodes(appName))
  }

  //往指定服务器上添加所有指定hostName
  def sycHosts(appName: String, hostNames: String, newHostsToFile: String): ShellResult = {
    import scala.collection.JavaConversions._
    val slaves = getSlave.map(f => (f.hostName, f)).toMap
    val hostsStr = newHostsToFile.split(",").map(f => s"${fetchIp(slaves(f))} ${f}").mkString("\n")
    val shellCommand = encode(s"echo '${hostsStr}' >> /etc/hosts")
    haddoopC(appName, hostNames, shellCommand)
  }

  def installNewSlaves(appName: String, slaves: String) = {
    //安装
    val version = getAPPExtra(appName, "version")
    val app = getAPPExtra(appName, "app")
    val downloadUrl = s"${masterUrl}/${app}@${version}.tar.gz"
    val installUrl = s"${masterUrl}/app/install?${authTokenStr}"


    val resp = httpClient.get(new Url(installUrl).
      addParam("slaves", encode(slaves)).
      addParam("appVersion", encode(version)).
      addParam("url", encode(downloadUrl)).
      addParam("filePath", encode(s"${softwareDir}/${app}@${version}.tar.gz")).
      addParam("installPath", encode(applicationDir)).
      addParam("appName", encode(appName)).
      addParam("app", encode(app)).
      addParam("tt", encode("1800000"))
    )
    generateShellResult(response = resp, 30)
  }

  /*

   */
  def configNewSlaves(appName: String, nameNode: String, slaves: String, configFiles: List[String], configFilePath: String) = {
    configFiles.map {
      f =>
        val res = haddoopC(
          appName,
          nameNode,
          encode(s"cat  ${installPath(appName)}/${appName}/$configFilePath/$f"))
        val fileContent = res.taskInfos(0).message
        (f, fileContent)
    }.map {
      f =>
        val rs = writeRemoteFile(appName, s"${configFilePath}/${f._1}", f._2, slaves)
        Map("name" -> s"write  config file ${f._1} in new slaves", "rs" -> rs)
    }
  }


  def waitUntilAllNewSlavesReady(appName: String, oldSize: Int, newSlaves: String) = {
    val targetSize = oldSize + newSlaves.split(",").length
    var nowSize = getAPPExtraItems(appName, "appName").size
    val counter = new AtomicInteger(0)
    while (nowSize < targetSize && counter.get() < 50) {
      logger.info(s"${nowSize} = ${targetSize}")
      Thread.sleep(200)
      nowSize = getAPPExtraItems(appName, "appName").size
      counter.addAndGet(1)
    }

  }
}
