package org.huangrui.spark.scala.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 * @Author hr
 * @Create 2024-10-21 4:49 
 */
object SparkSQL09_Source_Req_1 {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "huangrui")
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL")
    val spark: SparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate()
    spark.sql("use db_spark")

//    spark.sql("drop table user_visit_action")
//    spark.sql("drop table product_info")
//    spark.sql("drop table city_info")
    spark.sql(
      """
        |SELECT	c.area,	p.product_name,	count(*)
        |FROM
        |	( SELECT click_product_id, city_id FROM user_visit_action WHERE click_product_id != - 1 ) AS a
        |	JOIN product_info p ON a.click_product_id = p.product_id
        |	JOIN ( SELECT city_id, city_name, area FROM city_info ) c ON a.city_id = c.city_id
        |GROUP BY	area,	product_id,	product_name
        |LIMIT 10
        |""".stripMargin).show

    spark.stop()
  }
}
