package com.offcn.bigdata.spark.sql.p2

import org.apache.spark.sql.SparkSession

/**
 * @Auther: BigData-LGW
 * @ClassName: SparkHive
 * @Date: 2020/12/9 18:59
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object SparkHive {
    def main(args: Array[String]): Unit = {
        if(args == null || args.length != 2){
            println(
                """
                  |usage! <basic_path> <info_path>
                  |""".stripMargin)
            System.exit(-1)
        }
        val Array(basic_path,info_path) = args
        val spark = SparkSession.builder()
            .master("local[*]")
            .appName("SparkHive")
            .enableHiveSupport()
            .getOrCreate()
//        第一步：在hive中创建这两张表，并完成数据的加载
        println("step 1 create databases.")
        spark.sql(
            """
              |create database if not exists sparkHive0817
              |""".stripMargin)
        println("step 2 create table teacher_basic.")
        spark.sql(
            """
              |create table if not exists `sparkHive0817`.`teacher_basic` (
              |name string,
              |age int,
              |married boolean,
              |course int
              |) row format delimited
              |fields terminated by ','
              |""".stripMargin)
        println("step 2 create table teacher_info.")
        spark.sql(
            """
              |create table if not exists `sparkHive0817`.`teacher_info` (
              |name string,
              |height double
              |) row format delimited fields terminated by ','
              |""".stripMargin)
        println("step 3 load data into teacher_basic.")
        spark.sql(
            s"""
              |load data inpath '${basic_path}' into table `sparkHive0817`.`teacher_basic`
              |""".stripMargin)
        println("step 3 load data into teacher_info.")
        spark.sql(
            s"""
              |load data inpath '${info_path}' into table `sparkHive0817`.`teacher_info`
              |""".stripMargin)
//        第二部：进行关联查询，找到所有数据
        println("step 4 join")
        val joined = spark.sql(
            """
              |select
              |b.name,
              |b.age,
              |b.married,
              |b.course,
              |i.height
              |from `sparkHive0817`.`teacher_basic` b
              |inner join
              |`sparkHive0817`.`teacher_info` i on b.name = i.name
              |""".stripMargin)

//        第三步：将关联查询结果写入到hive对应表中
        println("step 5 save data into hive table")
        joined.write.saveAsTable("`sparkHive0817`.`teacher`")
        println("OK!")
        spark.stop()
    }
}
