package com.king.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql._


object SparkSQL10_Req_1{
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME", "atguigu")

    //新的起点: SparkSession

    val sparkConf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    val spark: SparkSession =
          SparkSession.builder()
            //.appName("SparkSQL")
            //.master("local[*]")
            .config(sparkConf)
            .enableHiveSupport()  // 启用hive的支持
            .getOrCreate()

    var sc = spark.sparkContext
    //写代码不管用不用都导入。

    //准备数据
    spark.sql(
      """
        |CREATE TABLE `user_visit_action`(
        |  `date` string,
        |  `user_id` bigint,
        |  `session_id` string,
        |  `page_id` bigint,
        |  `action_time` string,
        |  `search_keyword` string,
        |  `click_category_id` bigint,
        |  `click_product_id` bigint,
        |  `order_category_ids` string,
        |  `order_product_ids` string,
        |  `pay_category_ids` string,
        |  `pay_product_ids` string,
        |  `city_id` bigint)
        |row format delimited fields terminated by '\t'
        |
      """.stripMargin)

    spark.sql(
      """
        |load data local inpath 'input/user_visit_action.txt' into table user_visit_action
      """.stripMargin)

    spark.sql(
      """
        |CREATE TABLE `product_info`(
        |  `product_id` bigint,
        |  `product_name` string,
        |  `extend_info` string)
        |row format delimited fields terminated by '\t'
        |
      """.stripMargin)

    spark.sql(
      """
        |load data local inpath 'input/product_info.txt' into table product_info
      """.stripMargin)

    spark.sql(
      """
        |CREATE TABLE `city_info`(
        |  `city_id` bigint,
        |  `city_name` string,
        |  `area` string)
        |row format delimited fields terminated by '\t'
        |
      """.stripMargin)

    spark.sql("load data local inpath 'input/city_info.txt' into table city_info")

    spark.stop()

  }
}
