package org.huangrui.spark.java.sql;

import org.apache.spark.sql.SparkSession;

/**
 * @Author hr
 * @Create 2024-10-21 3:46
 */
public class SparkSQL09_Source_Req_1 {
    public static void main(String[] args) {
        // TODO 在编码前，设定Hadoop的访问用户
        System.setProperty("HADOOP_USER_NAME", "huangrui");
        // TODO 启用Hive的支持
        SparkSession spark = SparkSession.builder().enableHiveSupport().appName("SparkSQL08_Source_Hive").master("local[2]").getOrCreate();
        spark.sql("use db_spark");

        spark.sql("SELECT\tc.area,\tp.product_name,\tcount(*)\n"
                + "FROM\n"
                + "\t( SELECT click_product_id, city_id FROM user_visit_action WHERE click_product_id != - 1 ) AS a\n"
                + "\tJOIN product_info p ON a.click_product_id = p.product_id\n"
                + "\tJOIN ( SELECT city_id, city_name, area FROM city_info ) c ON a.city_id = c.city_id \n"
                + "GROUP BY\tarea,\tproduct_id,\tproduct_name \n"
                + "LIMIT 10").show();


        spark.stop();
    }
}
