package com.at.bigdata.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
 *
 * @author cdhuangchao3
 * @date 2023/5/27 8:05 PM
 */
object Spark06_SparkSql_Test {
  def main(args: Array[String]): Unit = {
    //      System.setProperty("HADOOP_USER_NAME", "hdfs")
    //    System.setProperty("SPARK_LOCAL_IP", "192.168.0.109")
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("hive")
    val spark = SparkSession.builder()
      .enableHiveSupport()
      .config(sparkConf)
      .config("spark.sql.warehouse.dir", "hdfs://node01:9000/user/hive/warehouse")
      .getOrCreate()

    spark.sql("show databases").show
    println(1111)
    spark.sql("use hdm")
    println(2222)

    spark.sql(
      """
        |CREATE TABLE if not exists hdm.user_visit_action(
        | `date` string,
        | user_id  bigint,
        | session_id  string,
        | page_id  bigint,
        | action_time string,
        | search_keyword string,
        | click_category_id bigint,
        | click_product_id bigint,
        | order_category_ids  string,
        | order_product_ids  string,
        | pay_categoryids string,
        | pay_product_ids string,
        | city_id bigint)
        | row format delimited fields terminated by  '\t'
        |""".stripMargin
    )
    println(3333)
    spark.sql(
      """
        |load data local inpath 'input/user_visit_action.txt' into table hdm.user_visit_action
        |""".stripMargin
    )
    println(3333)
    spark.sql(
      """
        |CREATE TABLE if not exists hdm.product_info(
        | product_id bigint,
        | product_name string,
        | extend_info string)
        |row format delimited fields terminated by '\t'
        |""".stripMargin
    )
    println(4444)
    spark.sql(
      """
        |load data local inpath 'input/product_info.txt' into table hdm.product_info
        |""".stripMargin)
    println(5555)
    spark.sql(
      """
        |CREATE TABLE if not exists hdm.city_info(
        | city_id bigint,
        | city_name string,
        | area string)
        |row format delimited fields terminated by '\t'
        |""".stripMargin)
    println(6666)
    spark.sql(
      """
        |load data local inpath 'input/city_info.txt' into table hdm.city_info
        |""".stripMargin)
    println(7777)
    spark.sql("select * from hdm.city_info").show

    spark.close();
  }

}
