package com.offcn.bigdata.sql.p1

import org.apache.spark.sql.SparkSession

/**
  * SparkSQL和Hive的整合操作
  * basic
  *     zhangsan,23,false,0
  * info
  *     zhangsan,175
  * 需求：
  *     在hive中，加载上述两张表的数据，完成关联查询，并将结果录入到hive中
  * 整合时注意：
  *     1、 需要将hive-site.xml配置文件添加到项目的classpath中
  *     2、 最好将hdfs-site.xml和core-site.xml也添加到classpath
  *     3、 需要将mysql的驱动也要添加到项目的classpath中
  */
object _05SparkSQLIntegrationOps {
    def main(args: Array[String]): Unit = {

        if(args == null || args.length != 2) {
            println("Usage: <basic_path> <info_path>")
            System.exit(-1)
        }
        val Array(basicPath, infoPath) = args

        val spark = SparkSession.builder()
            .appName("_05SparkSQLIntegrationOps")
//            .master("local[*]")
            .enableHiveSupport()//和hive整合
            .getOrCreate()
        println("step 1. create database ")
        var sql =
            """
              | create database if not exists test_0828
            """.stripMargin
        spark.sql(sql)
        println("step 2.1 create table of basic")
        sql =
            """
              | create table if not exists `test_0828`.`teacher_basic` (
              |     name string,
              |     age int,
              |     married boolean,
              |     children int
              | ) row format delimited
              | fields terminated by ','
            """.stripMargin
        spark.sql(sql)
        println("step 2.2 create table of info")
        sql =
            """
              | create table if not exists `test_0828`.`teacher_info` (
              |     name string,
              |     height double
              | ) row format delimited
              | fields terminated by ','
            """.stripMargin
        spark.sql(sql)

        println("step 3 load data into tables")
        sql =
            s"""
              | load data inpath '${basicPath}' overwrite into table `test_0828`.`teacher_basic`
            """.stripMargin
        spark.sql(sql)
        sql =
            s"""
              | load data inpath '${infoPath}' overwrite into table `test_0828`.`teacher_info`
            """.stripMargin
        spark.sql(sql)

        println("step 4 join selection")
        sql =
            s"""
               | select
               |    b.name,
               |    b.age,
               |    b.married,
               |    b.children,
               |    i.height
               | from `test_0828`.`teacher_basic` b
               | inner join `test_0828`.`teacher_info` i on b.name = i.name
            """.stripMargin
        val ret = spark.sql(sql)

        println("step 5 save into hive table")

        ret.write.saveAsTable("`test_0828`.`teacher`")

        spark.stop()
    }
}
