package com.at.bigdata.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 *
 * @author cdhuangchao3
 * @date 2023/5/27 8:05 PM
 */
object Spark06_SparkSql_Test1 {
  def main(args: Array[String]): Unit = {
    //      System.setProperty("HADOOP_USER_NAME", "hdfs")
    //    System.setProperty("SPARK_LOCAL_IP", "192.168.0.109")
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("hive")
    val spark = SparkSession.builder()
      .enableHiveSupport()
      .config(sparkConf)
      .config("spark.sql.warehouse.dir", "hdfs://node01:9000/user/hive/warehouse")
      .getOrCreate()

    spark.sql("use hdm")
    spark.sql(
      """
        |SELECT
        |	*
        |from (
        |	SELECT
        |		*,
        |		rank() over(partition by area order by clickCnt desc) as rank
        |	from (
        |		SELECT
        |			area,
        |			product_name,
        |			count(*) as clickCnt
        |		from (
        |			SELECT
        |				a.*,
        |       p.product_name,
        |       c.area,
        |       c.city_name
        |			from hdm.user_visit_action a
        |			join hdm.product_info p on a.click_product_id = p.product_id
        |			join hdm.city_info c on a.city_id = c.city_id
        |			where a.click_product_id > -1
        |		) t1 group by area, product_name
        |	) t2
        |) t3 where rank <= 3
        |""".stripMargin
    ).show
    spark.close();
  }

}
