package org.huangrui.spark.java.sql;

import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;

/**
 * @Author hr
 * @Create 2024-10-21 10:59
 */
public class SparkSQL09_Source_Req_3 {
    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME", "huangrui");
        // TODO 启用Hive的支持
        SparkSession spark = SparkSession.builder().enableHiveSupport().appName("SparkSQL08_Source_Hive").master("local[2]").getOrCreate();
        spark.sql("use db_spark");

        spark.udf().register("cityRemark", functions.udaf(new MyCityRemarkUDAF(), Encoders.STRING()));

        // 查询基本数据
        spark.sql("select\n"
                + "    a.*,\n"
                + "    p.product_name,\n"
                + "    c.area,\n"
                + "    c.city_name\n"
                + " from user_visit_action a\n"
                + " join product_info p on a.click_product_id = p.product_id\n"
                + " join city_info c on a.city_id = c.city_id\n"
                + " where a.click_product_id > -1").createOrReplaceTempView("t1");

        // 根据区域，商品进行数据聚合计算
        spark.sql("select\n"
                + "    area,\n"
                + "    product_name,\n"
                + "    count(*) as clickCnt,\n"
                + "    cityRemark(city_name) as city_remark\n"
                + " from t1 group by area, product_name").createOrReplaceTempView("t2");

        // 区域内对点击数量进行排行
        spark.sql("select\n"
                + "     *,\n"
                + "     rank() over( partition by area order by clickCnt desc ) as rank\n"
                + " from t2").createOrReplaceTempView("t3");

        // 取前3名
        spark.sql("select * from t3 where rank <= 3").show(false);

        spark.stop();
    }
}
