package dli_and_obs.utils

import java.util.concurrent.Executors
import java.util.{List, Properties}

import com.huawei.dli.sdk.authentication.AuthenticationMode
import com.huawei.dli.sdk.common.{DLIInfo, SparkJobInfo}
import com.huawei.dli.sdk.exception.DLIException
import com.huawei.dli.sdk.{BatchJob, Cluster, DLIClient, PackageResource}

import scala.util.control.Breaks._
/**
  * @Description TODO
  * @Author liutaowei
  * @Date 2019-5-21 10:38
  */
object dli_utils {

  def get_dli_client(properties:Properties) = {
    val ak = properties.getProperty("ak")
    val sk = properties.getProperty("sk")

    val regionName = properties.getProperty("regionName")
    val projectId = properties.getProperty("projectId")

    val dliInfo = new DLIInfo(regionName, ak, sk, projectId)
    val client = new DLIClient(AuthenticationMode.AKSK, dliInfo)
    client
  }


  /**
    *@Author: Liutaowei
    *@Date:2019-5-20 9:49
    *@Description: 上传jar包到dli  返回资源list
    */
  def jar_load_dli(obs_path:String,properties:Properties)={
    println("jar包上传DLI")
    val ak = properties.getProperty("ak")
    val sk = properties.getProperty("sk")
    val regionName = properties.getProperty("regionName")
    val projectId = properties.getProperty("projectId")

    val dliInfo = new DLIInfo(regionName, ak, sk, projectId)
    val client = new DLIClient(AuthenticationMode.AKSK, dliInfo)

    var packageResources:List[PackageResource] = null

    if("" == obs_path){
      println("类: jar_load_dli \n错误原因：输入obs地址为空")
    }else {
      val paths = new Array[String](1)
      paths(0) = obs_path
      val description = "test for sdk"
      val kind = "jar"
      try {
        packageResources = client.uploadResources(kind, paths, description)
        println("jar包上传成功")
      }catch {
        case e:Exception => println("类: jar_load_dli \n错误原因：jar包上传dli错误")
      }
    }
    //    val cluster = client.getCluster("o2o16u")

    (packageResources,client)
  }


  /**
    *@Author: Liutaowei
    *@Date:2019-5-20 10:09
    *@Description: 开启任务
    */
   def batchJobs(dli_resources:(List[PackageResource],DLIClient),properties:Properties): String = {
    val resource_name = dli_resources._1.get(0).getResourceName
    val client = dli_resources._2
    val cluster = client.getCluster(properties.getProperty("cluster_name"))
    var class_names = properties.getProperty("class_names")
      .split(";").filter(_!="").filter(_!=null)


    var submit_style = properties.getProperty("submit_style").trim // 处理方式

    properties.setProperty("create_time",other_utils.current_date())
//     properties.store(new FileWriter(properties.getProperty("prop_addr")),"")

//     val create_time = other_utils.current_date()

    // 判断处理方式 并行还是串行
     if(class_names.size == 0){
//       println("未填在 class_names 属性填写类名")
       throw new Exception("未填在 class_names 属性填写类名")
     } else if (submit_style.equals("asyncSubmit")){
       BatchJob_asyncSubmit_submit(resource_name,cluster,class_names,submit_style,properties)
     }else{
       BatchJob_serial_submit(resource_name,cluster,class_names,submit_style,properties)
     }

     println(class_names.mkString(","))


    return "success"
  }


  /**
  *@Author: Liutaowei
  *@Date:2019-5-22 10:33
  *@Description: 串行处理
  */
  def BatchJob_serial_submit(resource_name:String,cluster:Cluster,class_names:Array[String],submit_style:String,properties:Properties)={
     var serial = 0
    for(class_name <-class_names){
      serial += 1
       runBatchJob(resource_name,cluster,serial+"#"+class_name,submit_style,properties)
     }
  }

    /**
    *@Author: Liutaowei
    *@Date:2019-5-22 10:22
    *@Description: 任务并行处理
    */
  def BatchJob_asyncSubmit_submit(resource_name:String,cluster:Cluster,class_names:Array[String],submit_style:String,properties:Properties)={

    val excutor = Executors.newFixedThreadPool(class_names.size+5)
    try {

      var ser_id = 0
      for (class_name <- class_names) { // 遍历任务列表，根据列表任务顺序开始任务
        ser_id += 1
        println("任务： " + class_name)
        excutor.execute(new BatchJob_thread(resource_name, cluster, ser_id + "#" + class_name, submit_style, properties))
        Thread.sleep(1000)
      }
    }finally {
      excutor.shutdown()
    }
  }

  /**
  *@Author: Liutaowei
  *@Date:2019-5-22 10:23
  *@Description: 构建线程
  */
  class BatchJob_thread(resource_name:String,cluster:Cluster,class_name:String,submit_style:String,properties:Properties)
    extends Runnable{
    override def run(): Unit = {
      runBatchJob(resource_name,cluster,class_name,submit_style,properties)
    }
  }


  /**
    *@Author: Liutaowei
    *@Date:2019-5-20 11:59
    *@Description: DLI提交任务运行方法
    */
  private def runBatchJob(resource_name:String,cluster:Cluster,class_name_1:String,submit_style:String,properties:Properties):Unit={
    val class_name = class_name_1.split("#")(1)
    val owner = properties.getProperty("user")

    val jobInfo = new SparkJobInfo
    jobInfo.setClassName(class_name)
    jobInfo.setSc_type("C")
    jobInfo.setName(class_name)
    jobInfo.setFile(resource_name)
    jobInfo.setCluster_name(cluster.getClusterName)



    // 调用BatchJob对象的asyncSubmit接口提交批处理作业
    val job = new BatchJob(cluster, jobInfo)
    println("开始dli任务\n任务包名:  " + resource_name + "\n任务类名:  " + class_name
      + "\n使用dli集群名称:  " + cluster.getClusterName
      + "\n提交方式: " + submit_style)

    try {
      if (submit_style.equals("submit_style")) job.asyncSubmit()
      else job.submit()
    }catch {
      case e:Exception =>
        break()
        throw new DLIException("任务出错： " + e.getMessage)
    }finally {
      try {
        import scala.collection.JavaConverters._
        val log = job.getLog(0, 1000).asScala.mkString("\n")

        log_utils.log_writeto_obs(properties,log,class_name_1.replace("#","_") + "_" + job.getStatus)
        println("日志 ： "+log)
      }catch {
        case e:Exception=> println("任务: " +class_name+ " , 未获取到log,请至 https://console.huaweicloud.com/dli/?region=cn-north-1#/main/sparkJob 查询")
      }
    }

    println("\nJob:"+class_name +"\n完成  时间: "  + System.currentTimeMillis())

//    var run_state = true
//    while (run_state) {
//      val jobStatus = job.getStatus
//      println("正在获取信息：" + System.currentTimeMillis() + "  status:" + jobStatus)
//
//      if (SparkJobStatus.SUCCESS == jobStatus) {
//        println("Job: 完成  时间: "  + System.currentTimeMillis())
//      }
//
//      if (SparkJobStatus.DEAD == jobStatus)
//      {
//        val log = job.getLog(44,1000)
//        println(log)
//        println("任务失败\n失败原因: "+log)
//        run_state = false
//      }
//      try
//        Thread.sleep(1000)
//      catch {
//        case e: InterruptedException =>
//          e.printStackTrace()
//      }
//    }
  }

}
