package com.huahua.bigdata.sparksql;

import org.apache.spark.sql.SparkSession;

public class SparkSQL09_Source_Req {
    public static void main(String[] args) {

        // TODO 在编码前, 设定Hadoop的访问用户
        System.setProperty("HADOOP_USER_NAME", "atguigu");

        final SparkSession sparkSession = SparkSession
                .builder()
                .enableHiveSupport()  // TODO 启用Hive的支持
                .master("local[*]")
                .appName("SparkSQL")
                .getOrCreate();

        sparkSession.sql("show tables").show();
        sparkSession.sql("create table user_visit_action( \n " +
                "  date string,\n +" +
                "  user_id bigint,\n +" +
                "  session_id string, \n" +
                "  page_id  bigint, \n" +
                "  action_time  string, \n" +
                "  search_keyword string, \n" +
                "  click_category_id bigint, \n" +
                "  click_product_id bigint,\n" +
                "  order_category_ids string, \n" +
                "  order_product_ids string, \n" +
                "  pay_category_ids  string,\n" +
                "  pay_parduct_ids  string,\n" +
                "  city_id bigint --城市id\n" +
                ")\n" +
                "row format delimited fields terminated by '\\t'; ");
        sparkSession.sql("load data local inpath 'data/user_visit_action.txt' into table user_visit_action;");

        sparkSession.sql("create table city_info (\n" +
                "  city_id bigint, --城市Id\n" +
                "  city_name string, --城市名称 \n" +
                "  area string --区域名称\n" +
                ")\n" +
                "row format delimited fields terminated by '\\t';");

        sparkSession.sql("CREATE TABLE product_info (\n " +
                "  product_id bigint, --商品id\n" +
                "  product_name string, --商品名称\n" +
                "  extend_info  string\n" +
                ")\n" +
                "row format delimited fields terminated by '\\t';");
        sparkSession.sql("load data local inpath 'data/product_info.txt' into table product_info;");
        sparkSession.sql("load data local inpath 'data/city_info.txt' into table city_info;");

        // TODO 需求 各个区域热门商品Top3

        sparkSession.close();
    }
}
