package com.sunzm.spark.sql.hive.report.dws

import java.util

import com.alibaba.fastjson.JSON
import org.apache.spark.sql.{Dataset, SparkSession}

/**
 *
 * DWD层数据入DWS层示例
 *
 * @author Administrator
 * @version 1.0
 * @date 2021-07-22 20:06
 */
object DWDToDWSDemo {
  def main(args: Array[String]): Unit = {
    //这句话需要在 SparkSession 创建之前加
    System.setProperty("HADOOP_USER_NAME", "hdfs")

    val warehouseLocation = "/user/hive/warehouse"
    //val warehouseLocation = new File("spark-warehouse").getAbsolutePath

    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.stripSuffix("$"))
      .master("local[*]")
      .config("spark.default.parallelism", 8)
      .config("spark.sql.shuffle.partitions", 8)
      //如果集群中有hive相关的配置文件，下面的都可以不写
      //.config("fs.defaultFS", "hdfs://192.168.1.158:8020")
      .config("fs.defaultFS", "file:///")
      .config("spark.sql.warehouse.dir", warehouseLocation)
      //开启动态分区支持
      .config("hive.exec.dynamic.partition", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      //如果需要Spark自己管理元数据,就不需要配置这个,但是需要配置mysql，不然会使用derby数据库
      /*.config("hive.metastore.uris", "thrift://hive01.prd.bj.sobot.com:9083")*/
       .config("javax.jdo.option.ConnectionDriverName", "com.mysql.jdbc.Driver")
       .config("javax.jdo.option.ConnectionURL", "jdbc:mysql://82.156.210.70:3306/hive?useSSL=false")
       .config("javax.jdo.option.ConnectionUserName", "root")
       .config("javax.jdo.option.ConnectionPassword", "ABC123abc.123")
      .enableHiveSupport()
      .getOrCreate()

    spark.sparkContext.setLogLevel("WARN")

    //createDBAndTable(spark)

    dwdTodws(spark)

    spark.stop()
  }

  def dwdTodws(spark: SparkSession) = {

    spark.sql("USE applog_dwd")

    spark.sql("SHOW TABLES").show(10, false)

    spark.sql("SELECT * FROM app_event_detail limit 10")
      .show(10, false)

  }

  def createDBAndTable(spark: SparkSession) = {
    //创建数据库
   /* spark.sql("CREATE DATABASE IF NOT EXISTS applog_dwd")
    spark.sql("SHOW DATABASES").show(10, false)

    //在 call_db 库中建一个表
    spark.sql("USE applog_dwd")

    spark.sql(
      """
        | CREATE EXTERNAL TABLE IF NOT EXISTS `app_event_detail`(
        |	`account` string,
        |	`province` string,
        |	`city` string,
        |	`sessionId` string,
        |	`eventId` string,
        |	`timeStamp` bigint,
        |	`properties` map<String,String>
        |)
        | PARTITIONED BY (`dt` string)
        | STORED AS parquet
        | LOCATION '/data/hive/applog_dwd/'
        | TBLPROPERTIES("parquet.compress"="snappy")
        |
        |""".stripMargin)

    spark.sql("SHOW TABLES").show(10, false)*/

    //spark.sql("DESC app_event_detail").show(10, false)

    //spark.sql("SHOW CREATE TABLE app_event_detail").show(10, false)

    //往表中导入数据
    val dt = "2021-07-23"
    //val ds: Dataset[String] = spark.read.textFile("/data/spark/sql/hive/app_event_detail.txt")
    val ds: Dataset[String] = spark.read.textFile("data/spark/sql/hive/app_event_detail.txt")

    import spark.implicits._

    ds.map(line => {
      val jSONObject = JSON.parseObject(line)

      val account = jSONObject.getString("account")
      val province = jSONObject.getString("province")
      val city = jSONObject.getString("city")
      val sessionId = jSONObject.getString("sessionId")
      val eventId = jSONObject.getString("eventId")
      val timeStamp = jSONObject.getLongValue("timeStamp")

      import scala.collection.JavaConversions._
      val properties: collection.Map[String, String] = jSONObject.getJSONObject("properties").getInnerMap.mapValues(_.toString)

      AppEventDetail(account, province, city, sessionId, eventId, timeStamp, properties.toMap)

    }).createOrReplaceTempView("v_app_event_detail")

    spark.sql(
      s"""
        |  INSERT INTO TABLE app_event_detail
        |     partition (dt='${dt}')
        |  SELECT
        |     *
        |  FROM v_app_event_detail
        |""".stripMargin)

    spark.sql("SELECT * FROM app_event_detail limit 10")
      .show(10, false)

  }

  private case class AppEventDetail(account: String,
                                    province: String,
                                    city: String,
                                    sessionId: String,
                                    eventId: String,
                                    timeStamp: Long,
                                    properties: Map[String, String])
}
