package Practice

import org.apache.spark.sql.{DataFrame, SparkSession}

object a1 {
  def main(args: Array[String]): Unit = {
    //1.SparkSession会话对象
    val spark:SparkSession=SparkSession.builder()
      .appName("SparkSession")
      .master("local[2]")
      .config("spark.sql.shuffle,partitions","2")
      .getOrCreate()

    spark.sparkContext.setLogLevel("WARN")

    //2.读取数据
    val logDF:DataFrame=spark.read
      .option("sep",",")
      .schema("ip STRING,user_id STRING,url STRING,access_time STRING")
      .csv("zg6-spark-lecture/src/main/resources/jd-behavior.log")
    logDF.printSchema()
    logDF.show(10,truncate=false)

    //注册临时表
    logDF.createTempView("tmp_jd")
    spark.sql("SELECT * FROM tmp_jd").show(false)

    //4.分析1；京东每日的uv ,降序排序，打印输出
    val df4=spark.sql(
      """
        |select
        | substring(access_time,1,10) AS day_str
        | ,count(distinct user_id) AS uv
        | from tmp_jd
        | group by substring(access_time,1,10)
        |""".stripMargin)
    df4.show(false)

    //5.分析2：京东每日浏览量pv,降序排序，打印输出
    val df5=spark.sql(
      """
        |select
        | substring(access_time,1,10) AS day_str
        | , count(url) AS pv
        | from tmp_jd
        | group by substring(access_time,1,10)
        |""".stripMargin)
    df5.show(false)

    //6.分析3：京东每个url每日访客数uv,降序排序，打印输出
    val df6=spark.sql(
      """
        |select
        | url
        | ,substring(access_time,1,10) AS day_str
        | , count(distinct url) AS pv
        | from tmp_jd
        | group by url,substring(access_time,1,10)
        |""".stripMargin)
    df6.show(false)

    //7/分析4：京东每个URL每日浏览量pv,降序排序，保存MYsql表：retult_pre
    val df7=spark.sql(
      """
        |select
        | url
        | ,substring(access_time,1,10) AS day_str
        | , count(url) AS pv
        | from tmp_jd
        | group by url,substring(access_time,1,10)
        | order by pv Desc
        |""".stripMargin)
    df7.show(false)

    //8.SparkSQL定义UDF ：从字段达特datetime获取每天日期
    spark.udf.register(
      "extract_date"
        ,(accessTime:String)=>{
      accessTime.substring(0,10)
      }
    )

   //9.分析5:编写SQl语句，使用UDF函数，获取日期，打印输出
   val df9:DataFrame=spark.sql(
     """
       |select
       | access_time
       | ,substring(access_time,1,10) AS day_str
       | ,extract_date(access_time) AS date_str
       | from tmp_jd
       |""".stripMargin)
    df9.show(false)

    //关闭
    spark.stop()


  }

}
