package pk

import org.apache.spark.sql.SparkSession

object demo1 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("SparkJdLogSqlApp")
      .master("local[2]")
      .config("spark.sql.shuffle.partitions", "2")
      .getOrCreate()
    import spark.implicits._
    spark.sparkContext.setLogLevel("WARN")

    val read = spark.read
      .option("sep",",")
      .schema("ip string,user_id string,url string,access_time string")
      .csv("")
      read.printSchema()
      read.show(10,truncate = false)
    read.createTempView("tmp_jd_log")
    val r1 = spark.sql(
      """
        |SELECT
        |substring(access_time,1,10) as day_str
        |,count(distinct user_id) as uv
        |FROM tmp_jd_log
        |GROUP BY substring(access_time,1,10)
        |""".stripMargin)
    r1.show(false)
    val r2 = spark.sql(
      """
        |SELECT
        |substring(access_time,1,10) AS day_str
        |,count(url) as pv
        |FROM tmp_jd_log
        |GROUP BY substring(access_time,1,10)
        |""".stripMargin)
    r2.show(false)
    val r3 = spark.sql(
      """
        |SELECT
        |url,
        |substring(access_time,1,10) as day_str,
        |count(distinct user_id) as uv
        |FROM tmp_jd_log
        |group by url,substring(access_time)
        |""".stripMargin)
    r3.show(false)
    val r4 = spark.sql(
      """
        |SELECT
        |substring(access_time,1,10) as day_str,
        |count(url) as pv
        |FROM tmp_jd_log
        |GROUP BY URL,substring(access_time)
        |order by pv desc
        |
        |""".stripMargin
    )
    r4.show(false)

    r4.write
      .format("jdbc")
      .option("url", "jdbc:mysql://node101:3306/db_test?")
      .option("dbtable", "db_test.result_pv")
      .option("user", "root")
      .option("password", "1223456")
      .save()
    spark.udf.register(
      "extract_date",
      (accessTime:String)=>{
        accessTime.substring(0,10)
      }
    )
    val r5 = spark.sql(
      """
        |SELECT
        |access_time,
        | substring(access_time,1,10)AS day_str,
        | extract_date(access_time)as date_str
        | FROM tmp_jd_log
        |""".stripMargin)
    r5.show(false)
    val r6 = spark.sql(
      """
        |SELECT
        |access_time,
        |substring(access_time,1,10) as day_str
        |,extract_dat
        |FROM tmp_jd_log
        |
        |""".stripMargin)
    r6.show(false)
    spark.sql(
      """
        |SELECT
        |access_time,
        |,
        |FROM tmp_jd_log
        |""".stripMargin)






































  }
}
