package com.atguigu.demo;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @author yhm
 * @create 2022-12-26 15:03
 */
public class Test01_top3 {
    public static void main(String[] args) {

        // 修改系统用户名  获取执行权限
        System.setProperty("HADOOP_USER_NAME","atguigu");

        // 1. 创建sparkConf配置对象
        SparkConf conf = new SparkConf().setAppName("sql").setMaster("local[*]");

        // 2. 创建sparkSession连接对象
        // 开启hive的支持
        SparkSession spark = SparkSession.builder()
                .enableHiveSupport()
                .config(conf).getOrCreate();

        // 3. 编写代码
        // 一 过滤点击商品的数据
        Dataset<Row> dataset = spark.sql("select \n" +
                "  click_product_id,\n" +
                "  city_id\n" +
                "from user_visit_action uva\n" +
                "where click_product_id != -1");

        dataset.createOrReplaceTempView("t1");

        // 二 拼接字段  获取区域名称和商品名称
        spark.sql("select \n" +
                "    area,\n" +
                "    city_name,\n" +
                "    product_name\n" +
                "from t1\n" +
                "join city_info ci\n" +
                "on  t1.city_id = ci.city_id\n" +
                "join product_info pi\n" +
                "on  click_product_id=product_id").createOrReplaceTempView("t2");

        // 三 统计区域商品点击次数
        spark.sql("select \n" +
                "    area,\n" +
                "    product_name,\n" +
                "    count(*) click_nums\n" +
                "from t2\n" +
                "group by area,product_name").createOrReplaceTempView("t3");

        // 四 对区域内商品点击次数进行排序
        spark.sql("select \n" +
                "    area,\n" +
                "    product_name,\n" +
                "    click_nums,\n" +
                "    rank()over(partition by area order by click_nums desc) rk\n" +
                "from t3").createOrReplaceTempView("t4");

        // 五 过滤出top3商品
        spark.sql("select \n" +
                "    area,\n" +
                "    product_name,\n" +
                "    click_nums\n" +
                "from t4\n" +
                "where rk <= 3").show();

        // 4. 关闭sparkSession
        spark.close();
    }
}
