package fun.lumia.compute

import java.text.SimpleDateFormat
import java.util.Properties

import fum.lumia.compute.ComputeConstant
import fun.lumia.common.SparkTool
import org.apache.spark.sql.SaveMode

object computeRate extends SparkTool {
  /**
   * 在run方法里面编写spark业务逻辑
   */
  def formatTimeTemp = (timeStampTemp: Long) => {
    val format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    format.format(timeStampTemp + 5 * 60 * 1000)
  }

  override def run(args: Array[String]): Unit = {
    sql.udf.register("formatTimeTemp", formatTimeTemp)
    //    val unionDF = sql.read.parquet(ComputeConstant.INPUT_LOCAL_PATH)
    val unionDF = sql.read.parquet(ComputeConstant.INPUT_HDFS_PATH)
    //    unionDF.select("time_stamp", "filesize").groupBy()
    unionDF.registerTempTable("unionTable")
    //    sql.sql(
    //      """
    //        |select * from unionTable where vendor="杭研"
    //        |""".stripMargin).show(10)
    sql.sql(
      """
        |select time_stamp,
        |formatTimeTemp(time_stamp) as format_timestamp,
        |server_ip,
        |reqdomain,
        |vendor,
        |sum(filesize) as sum_filesize,
        |sum(total) as total
        |from unionTable group by time_stamp,server_ip,reqdomain,vendor
        |""".stripMargin).registerTempTable("groupTable")
    val domainDF = sql.read.format("jdbc") //指定读取数据格式
      .options(Map( //指定参数
        "url" -> ComputeConstant.MYSQL_URL,
        "driver" -> ComputeConstant.MYSQL_DRIVER,
        "dbtable" -> ComputeConstant.MYSQL_DBTABLE_DOMAIN,
        "user" -> ComputeConstant.MYSQL_USER,
        "password" -> ComputeConstant.MYSQL_PASSWORD
      )).load() //加载数据
    domainDF.select("DOMAIN", "BUSI_NAME").registerTempTable("domainTable")
    sql.sql(
      """
        |select * from groupTable a left join domainTable b on a.reqdomain=b.DOMAIN
        |
        |""".stripMargin).registerTempTable("finalTable")
    val finalDF = sql.sql(
      """
        |select time_stamp,
        |format_timestamp,
        |server_ip,
        |reqdomain,
        |vendor,
        |BUSI_NAME as busi_name,
        |sum_filesize,
        |total
        |from finalTable
        |""".stripMargin)
    val prop = new Properties()
    prop.put("driver", ComputeConstant.MYSQL_DRIVER)
    prop.put("user", ComputeConstant.MYSQL_USER)
    prop.put("password", ComputeConstant.MYSQL_PASSWORD)
    finalDF.write.mode(SaveMode.Overwrite).jdbc(ComputeConstant.MYSQL_URL, ComputeConstant.MYSQL_DBTABLE_DWS, prop)

  }

  /**
   * 初始化spark配置
   *  conf.setMaster("local")
   */
  override def init(): Unit = {
    //    conf.setMaster("local[4]")
    //    conf.set("spark.sql.shuffle.partitions", "45")
  }
}
