package com.atguigu.sql.pj

import com.atguigu.sql.util.MySparkSessionUtil
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * description ：各区域热门商品 Top3
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/1/13 11:18
 * modified By ：
 * version:    : 1.0
 */
object AreaCityProductTop3 {

  def main(args: Array[String]): Unit = {

    // 开启hive支持和hdfs权限
    val param: Array[String] = Array("spark.sql.catalogImplementation::hive")
    System.setProperty("HADOOP_USER_NAME", "atguigu")

    // 获取 sparkSession
    val spark: SparkSession = MySparkSessionUtil.get(param)

    // 设置shuflle分区数，默认是200个
    spark.conf.set("spark.sql.shuffle.partitions","3")

    spark.sql("use spark_sql_pj")

    // 1、用户形为表，商品表，地区表连接，补全信息 t1
    spark.sql(
      """
        |select
        |  ci.area,
        |  ci.city_name,
        |  pi.product_name,
        |  pi.product_id
        |from spark_sql_pj.user_visit_action uva
        |join spark_sql_pj.product_info pi on uva.click_product_id=pi.product_id
        |join spark_sql_pj.city_info ci on uva.city_id=ci.city_id
        |""".stripMargin)
      .createOrReplaceTempView("t1")

    // 注册自定义函数
    spark.udf.register("city_remark", CityRemarkUDAF)

    // 2、根据t1表，对 area, product_name, product_id 分组，聚合点击次数 t2
    // 2.1、对分组内的数据求 各城市的占比率 --> 北京21.2%，天津13.2%，其他65.6%
    spark.sql(
      """
        |select
        |  t1.area,
        |  t1.product_name,
        |  count(1) cn,
        |  city_remark(t1.city_name) city_remark
        |from t1
        |group by t1.area, t1.area, t1.product_name, t1.product_id
        |""".stripMargin)
      .createOrReplaceTempView("t2")



    // 3、对t2表，按 area, city_name，排名 t3
    spark.sql(
      """
        |select
        |  t2.*,
        |  rank() over(partition by t2.area order by t2.cn desc) rk
        |from t2
        |""".stripMargin)
      .createOrReplaceTempView("t3")

    // 4、4、对t3，取top 3
        spark.sql(
          """
            |select
            |  t3.area,
            |  t3.product_name,
            |  t3.cn,
            |  t3.city_remark
            |from t3 where t3.rk<=3
            |""".stripMargin)
          .repartition(4)
          .write
          .mode(SaveMode.Overwrite)
          .saveAsTable("spark_sql_pj.area_city_product_top3")

    // TODO 如果没有使用coalesce减少分区区，默认就是200个，使用sql插入(临时表再保存)和saveAsTable插入都会生成200个文件
//    spark.sql(
//      """
//        |select
//        |  t3.area,
//        |  t3.product_name,
//        |  t3.cn,
//        |  t3.city_remark
//        |from t3 where t3.rk<=3
//        |""".stripMargin)
//      .createOrReplaceTempView("t4")
//
//
//    spark.sql("insert overwrite table area_city_product_top3 select * from t4")

    spark.sql("select * from spark_sql_pj.area_city_product_top3").show(1000, truncate = false)


    // 关闭 sparkSession 资源
    MySparkSessionUtil.close(spark)
  }

}


/*
结果
地区	商品名称		点击次数	城市备注
华北	商品A		100000	北京21.2%，天津13.2%，其他65.6%
华北	商品P		80200	北京63.0%，太原10%，其他27.0%
华北	商品M		40000	北京63.0%，太原10%，其他27.0%
东北	商品J		92000	大连28%，辽宁17.0%，其他 55.0%

database: spark_sql_pj

1、用户形为表，商品表，地区表连接，补全信息 t1
select
  ci.area,
  pi。product_name,
  pi.product_id
from spark_sql_pj.user_visit_action uva
join spark_sql_pj.product_info pi on uva.click_product_id=pi.product_id
join spark_sql_pj.city_info ci on uva.city_id=ci.uva

2、根据t1表，对 area, product_name, product_id 分组，聚合点击次数 t2
select
  t1.*,
  count(1) cn
from t1
group by t1.area, t1.city_name, t1.product_name, t1.product_id

3、对t2表，按 area, city_name，排名 t3
select
  t2.*
  rank(partition by t2.area order by t2.cn desc) rk
from t2

4、对t3，取top 3
select
  t3.area,
  t3.product_name,
  t3.cn
from t3 where t3.rk<=3



表结构：
CREATE TABLE `user_visit_action`(
  `date` string,
  `user_id` bigint,
  `session_id` string,
  `page_id` bigint,
  `action_time` string,
  `search_keyword` string,
  `click_category_id` bigint,
  `click_product_id` bigint,
  `order_category_ids` string,
  `order_product_ids` string,
  `pay_category_ids` string,
  `pay_product_ids` string,
  `city_id` bigint)
row format delimited fields terminated by '\t';
load data local inpath '/opt/module/datas/user_visit_action.txt' into table sparkpractice.user_visit_action;

CREATE TABLE `product_info`(
  `product_id` bigint,
  `product_name` string,
  `extend_info` string)
row format delimited fields terminated by '\t';
load data local inpath '/opt/module/datas/product_info.txt' into table sparkpractice.product_info;

CREATE TABLE `city_info`(
  `city_id` bigint,
  `city_name` string,
  `area` string)
row format delimited fields terminated by '\t';
load data local inpath '/opt/module/datas/city_info.txt' into table sparkpractice.city_info;
 */