package com.king.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql._


object SparkSQL10_Req_2{
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME", "atguigu")

    //新的起点: SparkSession

    val sparkConf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    val spark: SparkSession =
          SparkSession.builder()
            //.appName("SparkSQL")
            //.master("local[*]")
            .config(sparkConf)
            .enableHiveSupport()  // 启用hive的支持
            .getOrCreate()

    var sc = spark.sparkContext
    //写代码不管用不用都导入。

    //准备数据
    spark.sql(
      """
        |select
        |  t3.area,
        |  t3.product_name,
        |  t3.p_click_count,
        |  t3.rk
        |from
        |  (
        |select
        |  t2.area,
        |  t2.product_name,
        |  t2.p_click_count,
        |  rank() over( partition by t2.area order by t2.p_click_count desc ) rk
        |from
        |  (
        |select
        |  t1.area,
        |  t1.product_name,
        |  count(t1.click_product_id) p_click_count
        |
        |from
        |  (
        |select
        |    u.click_product_id ,
        |    p.product_name,
        |    c.city_name,
        |    c.area
        |from
        |   user_visit_action  u
        |join
        |   product_info p
        |on
        |   u.click_product_id = p.product_id
        |join
        |   city_info c
        |on
        |   u.city_id  = c.city_id
        |where
        |   u.click_product_id != -1
        |  )t1
        |group by t1.area , t1.product_name
        |  )t2
        |  )t3
        |where t3.rk <=3
      """.stripMargin).show()

    spark.stop()

  }
}
