package com.yujiahao.bigdata.sql

import org.apache.spark.sql.{DataFrame, SparkSession}

import java.util.Properties

object SparkSQL01_Hive_CreateTable {
  def main(args: Array[String]): Unit = {
    //这个要使用hadoop中的用户名，不然会报权限不够错误：user_visit_action
    System.setProperty("HADOOP_USER_NAME", "yujiahao")
    //创建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .enableHiveSupport()
      .master("local[*]")
      .appName("sql")
      .getOrCreate()
    //选择使用的数据库
    spark.sql(" use atguigu0819")
    //执行建表语句
    spark.sql(
      """
        |CREATE TABLE `user_visit_action`(
        |  `date` string,
        |  `user_id` bigint,
        |  `session_id` string,
        |  `page_id` bigint,
        |  `action_time` string,
        |  `search_keyword` string,
        |  `click_category_id` bigint,
        |  `click_product_id` bigint,
        |  `order_category_ids` string,
        |  `order_product_ids` string,
        |  `pay_category_ids` string,
        |  `pay_product_ids` string,
        |  `city_id` bigint)
        |row format delimited fields terminated by '\t';
        |""".stripMargin)
    spark.sql(
      """
        |load data local inpath 'data/user_visit_action.txt' into table user_visit_action;
        |""".stripMargin)
    spark.sql(
      """
        |CREATE TABLE `product_info`(
        |  `product_id` bigint,
        |  `product_name` string,
        |  `extend_info` string)
        |row format delimited fields terminated by '\t';
        |""".stripMargin)
    spark.sql(
      """
        |load data local inpath 'data/product_info.txt' into table product_info;
        |""".stripMargin)
    spark.sql(
      """
        |CREATE TABLE `city_info`(
        |  `city_id` bigint,
        |  `city_name` string,
        |  `area` string)
        |row format delimited fields terminated by '\t';
        |""".stripMargin)
    spark.sql(
      """
        |load data local inpath 'data/city_info.txt' into table city_info;
        |""".stripMargin)


    spark.sql("select * from city_info").show()

    //TODO 2、关闭资源
    spark.stop()
  }

}
