package chapter06

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * Spark-SQL内置函数
 */
object BuildinFunctionApp {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .appName("HiveSourceApp")
      .master("local")
      .getOrCreate()

    val config: Config = ConfigFactory.load()
    val driver: String = config.getString("db.default.driver")
    val url: String = config.getString("db.default.url")
    val user: String = config.getString("db.default.user")
    val password: String = config.getString("db.default.password")
    val database: String = config.getString("db.default.database")
    val table: String = config.getString("db.default.table")
    val sinkTable: String = config.getString("db.default.sink.table")

    import spark.implicits._
    //引入sparkSQL的内置函数
    import org.apache.spark.sql.functions._

    //git做版本管理 clone下来后  不要做代码样式的格式化
    val userAccessLog: Array[String] = Array(
      "2016-10-01,1122",
      "2016-10-01,1122",
      "2016-10-01,1122",
      "2016-10-01,1123",
      "2016-10-01,1124",
      "2016-10-01,1124",
      "2016-10-02,1121",
      "2016-10-02,1121",
      "2016-10-02,1123",
      "2016-10-02,1123"
    )

    val userAccessRDD: RDD[String] = spark
      .sparkContext
      .parallelize(userAccessLog)

    val userAccessDF: DataFrame = userAccessRDD.map(x => {
      val splits: Array[String] = x.split(",")
      Log(splits(0), splits(1).toInt)
    }).toDF()

    //userAccessDF.show()
    val UserCountDF: DataFrame = userAccessDF.groupBy("day")
      .agg(count("userId") as ("pv"))
    // UserCountDF.show()

    val UserUvCountDF: DataFrame = userAccessDF.groupBy("day")
      .agg(countDistinct("userId") as ("uv"))

    //todo 写入到MySQL
    UserUvCountDF.write
      .format("jdbc")
      .option("url", url)
      .option("dbtable", "myhive.uv")
      .option("user", user)
      .option("password", password)
      .save()


    spark.stop()
  }

  case class Log(day: String, userId: Int)

}
