package com.wfg.bigdata.spark.sparksql;

import org.apache.spark.sql.SparkSession;


public class SparkSQL07_Source_Hive_Case {
    public static void main(String[] args) {

        // TODO 在编码前，设定Hadoop的访问用户
        System.setProperty("HADOOP_USER_NAME","root");
        // TODO 构建环境对象
        //      Spark在结构化数据的处理场景中对核心功能，环境进行了封装
        //      构建SparkSQL的环境对象时，一般采用构建器模式
        //      构建器模式： 构建对象
        final SparkSession spark = SparkSession
                .builder()
                .enableHiveSupport() // TODO 启用Hive的支持
                .master("local[*]")
                .appName("SparkSQL")
                .getOrCreate();

        spark.sql("use atguigu;");
        spark.sql(" drop table if exists city_info");
        spark.sql(" drop table if exists product_info");
        spark.sql(" drop table if exists user_visit_action");
        // 准备数据
        spark.sql("CREATE TABLE `user_visit_action`(\n" +
                "                    `date` string,\n" +
                "                    `user_id` bigint,\n" +
                "                    `session_id` string,\n" +
                "                    `page_id` bigint,\n" +
                "                    `action_time` string,\n" +
                "                    `search_keyword` string,\n" +
                "                    `click_category_id` bigint,\n" +
                "                    `click_product_id` bigint,\n" +
                "                    `order_category_ids` string,\n" +
                "                    `order_product_ids` string,\n" +
                "                    `pay_category_ids` string,\n" +
                "                    `pay_product_ids` string,\n" +
                "                    `city_id` bigint)\n" +
                "                  row format delimited fields terminated by '\\t'");

        spark.sql("load data local inpath 'datas/sql/user_visit_action.txt' into table atguigu.user_visit_action");


        spark.sql(
                " CREATE TABLE `product_info`( " +
                     " `product_id` bigint, " +
                " `product_name` string, " +
                "   `extend_info` string) " +
                " row format delimited fields terminated by '\t' "
                );

        spark.sql(
                " load data local inpath 'datas/product_info.txt' into table atguigu.product_info");

        spark.sql(

                 " CREATE TABLE `city_info`( " +
                "   `city_id` bigint, " +
                "   `city_name` string, " +
                "   `area` string) " +
                " row format delimited fields terminated by '\t' " );

        spark.sql(
                "load data local inpath 'datas/city_info.txt' into table atguigu.city_info");

        spark.sql("select * from user_visit_action").show();

        // TODO 释放资源
        spark.close();

    }
}
